1 /* r128_state.c -- State support for r128 -*- linux-c -*- 2 * Created: Thu Jan 27 02:53:43 2000 by gareth (at) valinux.com 3 */ 4 /* 5 * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California. 6 * All Rights Reserved. 7 * 8 * Permission is hereby granted, free of charge, to any person obtaining a 9 * copy of this software and associated documentation files (the "Software"), 10 * to deal in the Software without restriction, including without limitation 11 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 12 * and/or sell copies of the Software, and to permit persons to whom the 13 * Software is furnished to do so, subject to the following conditions: 14 * 15 * The above copyright notice and this permission notice (including the next 16 * paragraph) shall be included in all copies or substantial portions of the 17 * Software. 18 * 19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 22 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR 23 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 24 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 25 * DEALINGS IN THE SOFTWARE. 26 * 27 * Authors: 28 * Gareth Hughes <gareth (at) valinux.com> 29 */ 30 31 #include "drmP.h" 32 #include "drm.h" 33 #include "r128_drm.h" 34 #include "r128_drv.h" 35 36 /* ================================================================ 37 * CCE hardware state programming functions 38 */ 39 40 static void r128_emit_clip_rects(drm_r128_private_t * dev_priv, 41 struct drm_clip_rect * boxes, int count) 42 { 43 u32 aux_sc_cntl = 0x00000000; 44 RING_LOCALS; 45 DRM_DEBUG("\n"); 46 47 BEGIN_RING((count < 3 ? count : 3) * 5 + 2); 48 49 if (count >= 1) { 50 OUT_RING(CCE_PACKET0(R128_AUX1_SC_LEFT, 3)); 51 OUT_RING(boxes[0].x1); 52 OUT_RING(boxes[0].x2 - 1); 53 OUT_RING(boxes[0].y1); 54 OUT_RING(boxes[0].y2 - 1); 55 56 aux_sc_cntl |= (R128_AUX1_SC_EN | R128_AUX1_SC_MODE_OR); 57 } 58 if (count >= 2) { 59 OUT_RING(CCE_PACKET0(R128_AUX2_SC_LEFT, 3)); 60 OUT_RING(boxes[1].x1); 61 OUT_RING(boxes[1].x2 - 1); 62 OUT_RING(boxes[1].y1); 63 OUT_RING(boxes[1].y2 - 1); 64 65 aux_sc_cntl |= (R128_AUX2_SC_EN | R128_AUX2_SC_MODE_OR); 66 } 67 if (count >= 3) { 68 OUT_RING(CCE_PACKET0(R128_AUX3_SC_LEFT, 3)); 69 OUT_RING(boxes[2].x1); 70 OUT_RING(boxes[2].x2 - 1); 71 OUT_RING(boxes[2].y1); 72 OUT_RING(boxes[2].y2 - 1); 73 74 aux_sc_cntl |= (R128_AUX3_SC_EN | R128_AUX3_SC_MODE_OR); 75 } 76 77 OUT_RING(CCE_PACKET0(R128_AUX_SC_CNTL, 0)); 78 OUT_RING(aux_sc_cntl); 79 80 ADVANCE_RING(); 81 } 82 83 static __inline__ void r128_emit_core(drm_r128_private_t * dev_priv) 84 { 85 drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv; 86 drm_r128_context_regs_t *ctx = &sarea_priv->context_state; 87 RING_LOCALS; 88 DRM_DEBUG("\n"); 89 90 BEGIN_RING(2); 91 92 OUT_RING(CCE_PACKET0(R128_SCALE_3D_CNTL, 0)); 93 OUT_RING(ctx->scale_3d_cntl); 94 95 ADVANCE_RING(); 96 } 97 98 static __inline__ void r128_emit_context(drm_r128_private_t * dev_priv) 99 { 100 drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv; 101 drm_r128_context_regs_t *ctx = &sarea_priv->context_state; 102 RING_LOCALS; 103 DRM_DEBUG("\n"); 104 105 BEGIN_RING(13); 106 107 OUT_RING(CCE_PACKET0(R128_DST_PITCH_OFFSET_C, 11)); 108 OUT_RING(ctx->dst_pitch_offset_c); 109 OUT_RING(ctx->dp_gui_master_cntl_c); 110 OUT_RING(ctx->sc_top_left_c); 111 OUT_RING(ctx->sc_bottom_right_c); 112 OUT_RING(ctx->z_offset_c); 113 OUT_RING(ctx->z_pitch_c); 114 OUT_RING(ctx->z_sten_cntl_c); 115 OUT_RING(ctx->tex_cntl_c); 116 OUT_RING(ctx->misc_3d_state_cntl_reg); 117 OUT_RING(ctx->texture_clr_cmp_clr_c); 118 OUT_RING(ctx->texture_clr_cmp_msk_c); 119 OUT_RING(ctx->fog_color_c); 120 121 ADVANCE_RING(); 122 } 123 124 static __inline__ void r128_emit_setup(drm_r128_private_t * dev_priv) 125 { 126 drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv; 127 drm_r128_context_regs_t *ctx = &sarea_priv->context_state; 128 RING_LOCALS; 129 DRM_DEBUG("\n"); 130 131 BEGIN_RING(3); 132 133 OUT_RING(CCE_PACKET1(R128_SETUP_CNTL, R128_PM4_VC_FPU_SETUP)); 134 OUT_RING(ctx->setup_cntl); 135 OUT_RING(ctx->pm4_vc_fpu_setup); 136 137 ADVANCE_RING(); 138 } 139 140 static __inline__ void r128_emit_masks(drm_r128_private_t * dev_priv) 141 { 142 drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv; 143 drm_r128_context_regs_t *ctx = &sarea_priv->context_state; 144 RING_LOCALS; 145 DRM_DEBUG("\n"); 146 147 BEGIN_RING(5); 148 149 OUT_RING(CCE_PACKET0(R128_DP_WRITE_MASK, 0)); 150 OUT_RING(ctx->dp_write_mask); 151 152 OUT_RING(CCE_PACKET0(R128_STEN_REF_MASK_C, 1)); 153 OUT_RING(ctx->sten_ref_mask_c); 154 OUT_RING(ctx->plane_3d_mask_c); 155 156 ADVANCE_RING(); 157 } 158 159 static __inline__ void r128_emit_window(drm_r128_private_t * dev_priv) 160 { 161 drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv; 162 drm_r128_context_regs_t *ctx = &sarea_priv->context_state; 163 RING_LOCALS; 164 DRM_DEBUG("\n"); 165 166 BEGIN_RING(2); 167 168 OUT_RING(CCE_PACKET0(R128_WINDOW_XY_OFFSET, 0)); 169 OUT_RING(ctx->window_xy_offset); 170 171 ADVANCE_RING(); 172 } 173 174 static __inline__ void r128_emit_tex0(drm_r128_private_t * dev_priv) 175 { 176 drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv; 177 drm_r128_context_regs_t *ctx = &sarea_priv->context_state; 178 drm_r128_texture_regs_t *tex = &sarea_priv->tex_state[0]; 179 int i; 180 RING_LOCALS; 181 DRM_DEBUG("\n"); 182 183 BEGIN_RING(7 + R128_MAX_TEXTURE_LEVELS); 184 185 OUT_RING(CCE_PACKET0(R128_PRIM_TEX_CNTL_C, 186 2 + R128_MAX_TEXTURE_LEVELS)); 187 OUT_RING(tex->tex_cntl); 188 OUT_RING(tex->tex_combine_cntl); 189 OUT_RING(ctx->tex_size_pitch_c); 190 for (i = 0; i < R128_MAX_TEXTURE_LEVELS; i++) { 191 OUT_RING(tex->tex_offset[i]); 192 } 193 194 OUT_RING(CCE_PACKET0(R128_CONSTANT_COLOR_C, 1)); 195 OUT_RING(ctx->constant_color_c); 196 OUT_RING(tex->tex_border_color); 197 198 ADVANCE_RING(); 199 } 200 201 static __inline__ void r128_emit_tex1(drm_r128_private_t * dev_priv) 202 { 203 drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv; 204 drm_r128_texture_regs_t *tex = &sarea_priv->tex_state[1]; 205 int i; 206 RING_LOCALS; 207 DRM_DEBUG("\n"); 208 209 BEGIN_RING(5 + R128_MAX_TEXTURE_LEVELS); 210 211 OUT_RING(CCE_PACKET0(R128_SEC_TEX_CNTL_C, 1 + R128_MAX_TEXTURE_LEVELS)); 212 OUT_RING(tex->tex_cntl); 213 OUT_RING(tex->tex_combine_cntl); 214 for (i = 0; i < R128_MAX_TEXTURE_LEVELS; i++) { 215 OUT_RING(tex->tex_offset[i]); 216 } 217 218 OUT_RING(CCE_PACKET0(R128_SEC_TEXTURE_BORDER_COLOR_C, 0)); 219 OUT_RING(tex->tex_border_color); 220 221 ADVANCE_RING(); 222 } 223 224 static void r128_emit_state(drm_r128_private_t * dev_priv) 225 { 226 drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv; 227 unsigned int dirty = sarea_priv->dirty; 228 229 DRM_DEBUG("dirty=0x%08x\n", dirty); 230 231 if (dirty & R128_UPLOAD_CORE) { 232 r128_emit_core(dev_priv); 233 sarea_priv->dirty &= ~R128_UPLOAD_CORE; 234 } 235 236 if (dirty & R128_UPLOAD_CONTEXT) { 237 r128_emit_context(dev_priv); 238 sarea_priv->dirty &= ~R128_UPLOAD_CONTEXT; 239 } 240 241 if (dirty & R128_UPLOAD_SETUP) { 242 r128_emit_setup(dev_priv); 243 sarea_priv->dirty &= ~R128_UPLOAD_SETUP; 244 } 245 246 if (dirty & R128_UPLOAD_MASKS) { 247 r128_emit_masks(dev_priv); 248 sarea_priv->dirty &= ~R128_UPLOAD_MASKS; 249 } 250 251 if (dirty & R128_UPLOAD_WINDOW) { 252 r128_emit_window(dev_priv); 253 sarea_priv->dirty &= ~R128_UPLOAD_WINDOW; 254 } 255 256 if (dirty & R128_UPLOAD_TEX0) { 257 r128_emit_tex0(dev_priv); 258 sarea_priv->dirty &= ~R128_UPLOAD_TEX0; 259 } 260 261 if (dirty & R128_UPLOAD_TEX1) { 262 r128_emit_tex1(dev_priv); 263 sarea_priv->dirty &= ~R128_UPLOAD_TEX1; 264 } 265 266 /* Turn off the texture cache flushing */ 267 sarea_priv->context_state.tex_cntl_c &= ~R128_TEX_CACHE_FLUSH; 268 269 sarea_priv->dirty &= ~R128_REQUIRE_QUIESCENCE; 270 } 271 272 #if R128_PERFORMANCE_BOXES 273 /* ================================================================ 274 * Performance monitoring functions 275 */ 276 277 static void r128_clear_box(drm_r128_private_t * dev_priv, 278 int x, int y, int w, int h, int r, int g, int b) 279 { 280 u32 pitch, offset; 281 u32 fb_bpp, color; 282 RING_LOCALS; 283 284 switch (dev_priv->fb_bpp) { 285 case 16: 286 fb_bpp = R128_GMC_DST_16BPP; 287 color = (((r & 0xf8) << 8) | 288 ((g & 0xfc) << 3) | ((b & 0xf8) >> 3)); 289 break; 290 case 24: 291 fb_bpp = R128_GMC_DST_24BPP; 292 color = ((r << 16) | (g << 8) | b); 293 break; 294 case 32: 295 fb_bpp = R128_GMC_DST_32BPP; 296 color = (((0xff) << 24) | (r << 16) | (g << 8) | b); 297 break; 298 default: 299 return; 300 } 301 302 offset = dev_priv->back_offset; 303 pitch = dev_priv->back_pitch >> 3; 304 305 BEGIN_RING(6); 306 307 OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4)); 308 OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL | 309 R128_GMC_BRUSH_SOLID_COLOR | 310 fb_bpp | 311 R128_GMC_SRC_DATATYPE_COLOR | 312 R128_ROP3_P | 313 R128_GMC_CLR_CMP_CNTL_DIS | R128_GMC_AUX_CLIP_DIS); 314 315 OUT_RING((pitch << 21) | (offset >> 5)); 316 OUT_RING(color); 317 318 OUT_RING((x << 16) | y); 319 OUT_RING((w << 16) | h); 320 321 ADVANCE_RING(); 322 } 323 324 static void r128_cce_performance_boxes(drm_r128_private_t * dev_priv) 325 { 326 if (atomic_read(&dev_priv->idle_count) == 0) { 327 r128_clear_box(dev_priv, 64, 4, 8, 8, 0, 255, 0); 328 } else { 329 atomic_set(&dev_priv->idle_count, 0); 330 } 331 } 332 333 #endif 334 335 /* ================================================================ 336 * CCE command dispatch functions 337 */ 338 339 static void r128_print_dirty(const char *msg, unsigned int flags) 340 { 341 DRM_INFO("%s: (0x%x) %s%s%s%s%s%s%s%s%s\n", 342 msg, 343 flags, 344 (flags & R128_UPLOAD_CORE) ? "core, " : "", 345 (flags & R128_UPLOAD_CONTEXT) ? "context, " : "", 346 (flags & R128_UPLOAD_SETUP) ? "setup, " : "", 347 (flags & R128_UPLOAD_TEX0) ? "tex0, " : "", 348 (flags & R128_UPLOAD_TEX1) ? "tex1, " : "", 349 (flags & R128_UPLOAD_MASKS) ? "masks, " : "", 350 (flags & R128_UPLOAD_WINDOW) ? "window, " : "", 351 (flags & R128_UPLOAD_CLIPRECTS) ? "cliprects, " : "", 352 (flags & R128_REQUIRE_QUIESCENCE) ? "quiescence, " : ""); 353 } 354 355 static void r128_cce_dispatch_clear(struct drm_device * dev, 356 drm_r128_clear_t * clear) 357 { 358 drm_r128_private_t *dev_priv = dev->dev_private; 359 drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv; 360 int nbox = sarea_priv->nbox; 361 struct drm_clip_rect *pbox = sarea_priv->boxes; 362 unsigned int flags = clear->flags; 363 int i; 364 RING_LOCALS; 365 DRM_DEBUG("\n"); 366 367 if (dev_priv->page_flipping && dev_priv->current_page == 1) { 368 unsigned int tmp = flags; 369 370 flags &= ~(R128_FRONT | R128_BACK); 371 if (tmp & R128_FRONT) 372 flags |= R128_BACK; 373 if (tmp & R128_BACK) 374 flags |= R128_FRONT; 375 } 376 377 for (i = 0; i < nbox; i++) { 378 int x = pbox[i].x1; 379 int y = pbox[i].y1; 380 int w = pbox[i].x2 - x; 381 int h = pbox[i].y2 - y; 382 383 DRM_DEBUG("dispatch clear %d,%d-%d,%d flags 0x%x\n", 384 pbox[i].x1, pbox[i].y1, pbox[i].x2, 385 pbox[i].y2, flags); 386 387 if (flags & (R128_FRONT | R128_BACK)) { 388 BEGIN_RING(2); 389 390 OUT_RING(CCE_PACKET0(R128_DP_WRITE_MASK, 0)); 391 OUT_RING(clear->color_mask); 392 393 ADVANCE_RING(); 394 } 395 396 if (flags & R128_FRONT) { 397 BEGIN_RING(6); 398 399 OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4)); 400 OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL | 401 R128_GMC_BRUSH_SOLID_COLOR | 402 (dev_priv->color_fmt << 8) | 403 R128_GMC_SRC_DATATYPE_COLOR | 404 R128_ROP3_P | 405 R128_GMC_CLR_CMP_CNTL_DIS | 406 R128_GMC_AUX_CLIP_DIS); 407 408 OUT_RING(dev_priv->front_pitch_offset_c); 409 OUT_RING(clear->clear_color); 410 411 OUT_RING((x << 16) | y); 412 OUT_RING((w << 16) | h); 413 414 ADVANCE_RING(); 415 } 416 417 if (flags & R128_BACK) { 418 BEGIN_RING(6); 419 420 OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4)); 421 OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL | 422 R128_GMC_BRUSH_SOLID_COLOR | 423 (dev_priv->color_fmt << 8) | 424 R128_GMC_SRC_DATATYPE_COLOR | 425 R128_ROP3_P | 426 R128_GMC_CLR_CMP_CNTL_DIS | 427 R128_GMC_AUX_CLIP_DIS); 428 429 OUT_RING(dev_priv->back_pitch_offset_c); 430 OUT_RING(clear->clear_color); 431 432 OUT_RING((x << 16) | y); 433 OUT_RING((w << 16) | h); 434 435 ADVANCE_RING(); 436 } 437 438 if (flags & R128_DEPTH) { 439 BEGIN_RING(6); 440 441 OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4)); 442 OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL | 443 R128_GMC_BRUSH_SOLID_COLOR | 444 (dev_priv->depth_fmt << 8) | 445 R128_GMC_SRC_DATATYPE_COLOR | 446 R128_ROP3_P | 447 R128_GMC_CLR_CMP_CNTL_DIS | 448 R128_GMC_AUX_CLIP_DIS | R128_GMC_WR_MSK_DIS); 449 450 OUT_RING(dev_priv->depth_pitch_offset_c); 451 OUT_RING(clear->clear_depth); 452 453 OUT_RING((x << 16) | y); 454 OUT_RING((w << 16) | h); 455 456 ADVANCE_RING(); 457 } 458 } 459 } 460 461 static void r128_cce_dispatch_swap(struct drm_device * dev) 462 { 463 drm_r128_private_t *dev_priv = dev->dev_private; 464 drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv; 465 int nbox = sarea_priv->nbox; 466 struct drm_clip_rect *pbox = sarea_priv->boxes; 467 int i; 468 RING_LOCALS; 469 DRM_DEBUG("\n"); 470 471 #if R128_PERFORMANCE_BOXES 472 /* Do some trivial performance monitoring... 473 */ 474 r128_cce_performance_boxes(dev_priv); 475 #endif 476 477 for (i = 0; i < nbox; i++) { 478 int x = pbox[i].x1; 479 int y = pbox[i].y1; 480 int w = pbox[i].x2 - x; 481 int h = pbox[i].y2 - y; 482 483 BEGIN_RING(7); 484 485 OUT_RING(CCE_PACKET3(R128_CNTL_BITBLT_MULTI, 5)); 486 OUT_RING(R128_GMC_SRC_PITCH_OFFSET_CNTL | 487 R128_GMC_DST_PITCH_OFFSET_CNTL | 488 R128_GMC_BRUSH_NONE | 489 (dev_priv->color_fmt << 8) | 490 R128_GMC_SRC_DATATYPE_COLOR | 491 R128_ROP3_S | 492 R128_DP_SRC_SOURCE_MEMORY | 493 R128_GMC_CLR_CMP_CNTL_DIS | 494 R128_GMC_AUX_CLIP_DIS | R128_GMC_WR_MSK_DIS); 495 496 /* Make this work even if front & back are flipped: 497 */ 498 if (dev_priv->current_page == 0) { 499 OUT_RING(dev_priv->back_pitch_offset_c); 500 OUT_RING(dev_priv->front_pitch_offset_c); 501 } else { 502 OUT_RING(dev_priv->front_pitch_offset_c); 503 OUT_RING(dev_priv->back_pitch_offset_c); 504 } 505 506 OUT_RING((x << 16) | y); 507 OUT_RING((x << 16) | y); 508 OUT_RING((w << 16) | h); 509 510 ADVANCE_RING(); 511 } 512 513 /* Increment the frame counter. The client-side 3D driver must 514 * throttle the framerate by waiting for this value before 515 * performing the swapbuffer ioctl. 516 */ 517 dev_priv->sarea_priv->last_frame++; 518 519 BEGIN_RING(2); 520 521 OUT_RING(CCE_PACKET0(R128_LAST_FRAME_REG, 0)); 522 OUT_RING(dev_priv->sarea_priv->last_frame); 523 524 ADVANCE_RING(); 525 } 526 527 static void r128_cce_dispatch_flip(struct drm_device * dev) 528 { 529 drm_r128_private_t *dev_priv = dev->dev_private; 530 RING_LOCALS; 531 DRM_DEBUG("page=%d pfCurrentPage=%d\n", 532 dev_priv->current_page, dev_priv->sarea_priv->pfCurrentPage); 533 534 #if R128_PERFORMANCE_BOXES 535 /* Do some trivial performance monitoring... 536 */ 537 r128_cce_performance_boxes(dev_priv); 538 #endif 539 540 BEGIN_RING(4); 541 542 R128_WAIT_UNTIL_PAGE_FLIPPED(); 543 OUT_RING(CCE_PACKET0(R128_CRTC_OFFSET, 0)); 544 545 if (dev_priv->current_page == 0) { 546 OUT_RING(dev_priv->back_offset); 547 } else { 548 OUT_RING(dev_priv->front_offset); 549 } 550 551 ADVANCE_RING(); 552 553 /* Increment the frame counter. The client-side 3D driver must 554 * throttle the framerate by waiting for this value before 555 * performing the swapbuffer ioctl. 556 */ 557 dev_priv->sarea_priv->last_frame++; 558 dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page = 559 1 - dev_priv->current_page; 560 561 BEGIN_RING(2); 562 563 OUT_RING(CCE_PACKET0(R128_LAST_FRAME_REG, 0)); 564 OUT_RING(dev_priv->sarea_priv->last_frame); 565 566 ADVANCE_RING(); 567 } 568 569 static void r128_cce_dispatch_vertex(struct drm_device * dev, struct drm_buf * buf) 570 { 571 drm_r128_private_t *dev_priv = dev->dev_private; 572 drm_r128_buf_priv_t *buf_priv = buf->dev_private; 573 drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv; 574 int format = sarea_priv->vc_format; 575 int offset = buf->bus_address; 576 int size = buf->used; 577 int prim = buf_priv->prim; 578 int i = 0; 579 RING_LOCALS; 580 DRM_DEBUG("buf=%d nbox=%d\n", buf->idx, sarea_priv->nbox); 581 582 if (0) 583 r128_print_dirty("dispatch_vertex", sarea_priv->dirty); 584 585 if (buf->used) { 586 buf_priv->dispatched = 1; 587 588 if (sarea_priv->dirty & ~R128_UPLOAD_CLIPRECTS) { 589 r128_emit_state(dev_priv); 590 } 591 592 do { 593 /* Emit the next set of up to three cliprects */ 594 if (i < sarea_priv->nbox) { 595 r128_emit_clip_rects(dev_priv, 596 &sarea_priv->boxes[i], 597 sarea_priv->nbox - i); 598 } 599 600 /* Emit the vertex buffer rendering commands */ 601 BEGIN_RING(5); 602 603 OUT_RING(CCE_PACKET3(R128_3D_RNDR_GEN_INDX_PRIM, 3)); 604 OUT_RING(offset); 605 OUT_RING(size); 606 OUT_RING(format); 607 OUT_RING(prim | R128_CCE_VC_CNTL_PRIM_WALK_LIST | 608 (size << R128_CCE_VC_CNTL_NUM_SHIFT)); 609 610 ADVANCE_RING(); 611 612 i += 3; 613 } while (i < sarea_priv->nbox); 614 } 615 616 if (buf_priv->discard) { 617 buf_priv->age = dev_priv->sarea_priv->last_dispatch; 618 619 /* Emit the vertex buffer age */ 620 BEGIN_RING(2); 621 622 OUT_RING(CCE_PACKET0(R128_LAST_DISPATCH_REG, 0)); 623 OUT_RING(buf_priv->age); 624 625 ADVANCE_RING(); 626 627 buf->pending = 1; 628 buf->used = 0; 629 /* FIXME: Check dispatched field */ 630 buf_priv->dispatched = 0; 631 } 632 633 dev_priv->sarea_priv->last_dispatch++; 634 635 sarea_priv->dirty &= ~R128_UPLOAD_CLIPRECTS; 636 sarea_priv->nbox = 0; 637 } 638 639 static void r128_cce_dispatch_indirect(struct drm_device * dev, 640 struct drm_buf * buf, int start, int end) 641 { 642 drm_r128_private_t *dev_priv = dev->dev_private; 643 drm_r128_buf_priv_t *buf_priv = buf->dev_private; 644 RING_LOCALS; 645 DRM_DEBUG("indirect: buf=%d s=0x%x e=0x%x\n", buf->idx, start, end); 646 647 if (start != end) { 648 int offset = buf->bus_address + start; 649 int dwords = (end - start + 3) / sizeof(u32); 650 651 /* Indirect buffer data must be an even number of 652 * dwords, so if we've been given an odd number we must 653 * pad the data with a Type-2 CCE packet. 654 */ 655 if (dwords & 1) { 656 u32 *data = (u32 *) 657 ((char *)dev->agp_buffer_map->handle 658 + buf->offset + start); 659 data[dwords++] = cpu_to_le32(R128_CCE_PACKET2); 660 } 661 662 buf_priv->dispatched = 1; 663 664 /* Fire off the indirect buffer */ 665 BEGIN_RING(3); 666 667 OUT_RING(CCE_PACKET0(R128_PM4_IW_INDOFF, 1)); 668 OUT_RING(offset); 669 OUT_RING(dwords); 670 671 ADVANCE_RING(); 672 } 673 674 if (buf_priv->discard) { 675 buf_priv->age = dev_priv->sarea_priv->last_dispatch; 676 677 /* Emit the indirect buffer age */ 678 BEGIN_RING(2); 679 680 OUT_RING(CCE_PACKET0(R128_LAST_DISPATCH_REG, 0)); 681 OUT_RING(buf_priv->age); 682 683 ADVANCE_RING(); 684 685 buf->pending = 1; 686 buf->used = 0; 687 /* FIXME: Check dispatched field */ 688 buf_priv->dispatched = 0; 689 } 690 691 dev_priv->sarea_priv->last_dispatch++; 692 } 693 694 static void r128_cce_dispatch_indices(struct drm_device * dev, 695 struct drm_buf * buf, 696 int start, int end, int count) 697 { 698 drm_r128_private_t *dev_priv = dev->dev_private; 699 drm_r128_buf_priv_t *buf_priv = buf->dev_private; 700 drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv; 701 int format = sarea_priv->vc_format; 702 int offset = dev->agp_buffer_map->offset - dev_priv->cce_buffers_offset; 703 int prim = buf_priv->prim; 704 u32 *data; 705 int dwords; 706 int i = 0; 707 RING_LOCALS; 708 DRM_DEBUG("indices: s=%d e=%d c=%d\n", start, end, count); 709 710 if (0) 711 r128_print_dirty("dispatch_indices", sarea_priv->dirty); 712 713 if (start != end) { 714 buf_priv->dispatched = 1; 715 716 if (sarea_priv->dirty & ~R128_UPLOAD_CLIPRECTS) { 717 r128_emit_state(dev_priv); 718 } 719 720 dwords = (end - start + 3) / sizeof(u32); 721 722 data = (u32 *) ((char *)dev->agp_buffer_map->handle 723 + buf->offset + start); 724 725 data[0] = cpu_to_le32(CCE_PACKET3(R128_3D_RNDR_GEN_INDX_PRIM, 726 dwords - 2)); 727 728 data[1] = cpu_to_le32(offset); 729 data[2] = cpu_to_le32(R128_MAX_VB_VERTS); 730 data[3] = cpu_to_le32(format); 731 data[4] = cpu_to_le32((prim | R128_CCE_VC_CNTL_PRIM_WALK_IND | 732 (count << 16))); 733 734 if (count & 0x1) { 735 #ifdef __LITTLE_ENDIAN 736 data[dwords - 1] &= 0x0000ffff; 737 #else 738 data[dwords - 1] &= 0xffff0000; 739 #endif 740 } 741 742 do { 743 /* Emit the next set of up to three cliprects */ 744 if (i < sarea_priv->nbox) { 745 r128_emit_clip_rects(dev_priv, 746 &sarea_priv->boxes[i], 747 sarea_priv->nbox - i); 748 } 749 750 r128_cce_dispatch_indirect(dev, buf, start, end); 751 752 i += 3; 753 } while (i < sarea_priv->nbox); 754 } 755 756 if (buf_priv->discard) { 757 buf_priv->age = dev_priv->sarea_priv->last_dispatch; 758 759 /* Emit the vertex buffer age */ 760 BEGIN_RING(2); 761 762 OUT_RING(CCE_PACKET0(R128_LAST_DISPATCH_REG, 0)); 763 OUT_RING(buf_priv->age); 764 765 ADVANCE_RING(); 766 767 buf->pending = 1; 768 /* FIXME: Check dispatched field */ 769 buf_priv->dispatched = 0; 770 } 771 772 dev_priv->sarea_priv->last_dispatch++; 773 774 sarea_priv->dirty &= ~R128_UPLOAD_CLIPRECTS; 775 sarea_priv->nbox = 0; 776 } 777 778 static int r128_cce_dispatch_blit(struct drm_device * dev, 779 struct drm_file *file_priv, 780 drm_r128_blit_t * blit) 781 { 782 drm_r128_private_t *dev_priv = dev->dev_private; 783 struct drm_device_dma *dma = dev->dma; 784 struct drm_buf *buf; 785 drm_r128_buf_priv_t *buf_priv; 786 u32 *data; 787 int dword_shift, dwords; 788 RING_LOCALS; 789 DRM_DEBUG("\n"); 790 791 /* The compiler won't optimize away a division by a variable, 792 * even if the only legal values are powers of two. Thus, we'll 793 * use a shift instead. 794 */ 795 switch (blit->format) { 796 case R128_DATATYPE_ARGB8888: 797 dword_shift = 0; 798 break; 799 case R128_DATATYPE_ARGB1555: 800 case R128_DATATYPE_RGB565: 801 case R128_DATATYPE_ARGB4444: 802 case R128_DATATYPE_YVYU422: 803 case R128_DATATYPE_VYUY422: 804 dword_shift = 1; 805 break; 806 case R128_DATATYPE_CI8: 807 case R128_DATATYPE_RGB8: 808 dword_shift = 2; 809 break; 810 default: 811 DRM_ERROR("invalid blit format %d\n", blit->format); 812 return -EINVAL; 813 } 814 815 /* Flush the pixel cache, and mark the contents as Read Invalid. 816 * This ensures no pixel data gets mixed up with the texture 817 * data from the host data blit, otherwise part of the texture 818 * image may be corrupted. 819 */ 820 BEGIN_RING(2); 821 822 OUT_RING(CCE_PACKET0(R128_PC_GUI_CTLSTAT, 0)); 823 OUT_RING(R128_PC_RI_GUI | R128_PC_FLUSH_GUI); 824 825 ADVANCE_RING(); 826 827 /* Dispatch the indirect buffer. 828 */ 829 buf = dma->buflist[blit->idx]; 830 buf_priv = buf->dev_private; 831 832 if (buf->file_priv != file_priv) { 833 DRM_ERROR("process %d using buffer owned by %p\n", 834 DRM_CURRENTPID, buf->file_priv); 835 return -EINVAL; 836 } 837 if (buf->pending) { 838 DRM_ERROR("sending pending buffer %d\n", blit->idx); 839 return -EINVAL; 840 } 841 842 buf_priv->discard = 1; 843 844 dwords = (blit->width * blit->height) >> dword_shift; 845 846 data = (u32 *) ((char *)dev->agp_buffer_map->handle + buf->offset); 847 848 data[0] = cpu_to_le32(CCE_PACKET3(R128_CNTL_HOSTDATA_BLT, dwords + 6)); 849 data[1] = cpu_to_le32((R128_GMC_DST_PITCH_OFFSET_CNTL | 850 R128_GMC_BRUSH_NONE | 851 (blit->format << 8) | 852 R128_GMC_SRC_DATATYPE_COLOR | 853 R128_ROP3_S | 854 R128_DP_SRC_SOURCE_HOST_DATA | 855 R128_GMC_CLR_CMP_CNTL_DIS | 856 R128_GMC_AUX_CLIP_DIS | R128_GMC_WR_MSK_DIS)); 857 858 data[2] = cpu_to_le32((blit->pitch << 21) | (blit->offset >> 5)); 859 data[3] = cpu_to_le32(0xffffffff); 860 data[4] = cpu_to_le32(0xffffffff); 861 data[5] = cpu_to_le32((blit->y << 16) | blit->x); 862 data[6] = cpu_to_le32((blit->height << 16) | blit->width); 863 data[7] = cpu_to_le32(dwords); 864 865 buf->used = (dwords + 8) * sizeof(u32); 866 867 r128_cce_dispatch_indirect(dev, buf, 0, buf->used); 868 869 /* Flush the pixel cache after the blit completes. This ensures 870 * the texture data is written out to memory before rendering 871 * continues. 872 */ 873 BEGIN_RING(2); 874 875 OUT_RING(CCE_PACKET0(R128_PC_GUI_CTLSTAT, 0)); 876 OUT_RING(R128_PC_FLUSH_GUI); 877 878 ADVANCE_RING(); 879 880 return 0; 881 } 882 883 /* ================================================================ 884 * Tiled depth buffer management 885 * 886 * FIXME: These should all set the destination write mask for when we 887 * have hardware stencil support. 888 */ 889 890 static int r128_cce_dispatch_write_span(struct drm_device * dev, 891 drm_r128_depth_t * depth) 892 { 893 drm_r128_private_t *dev_priv = dev->dev_private; 894 int count, x, y; 895 u32 *buffer; 896 u8 *mask; 897 int i, buffer_size, mask_size; 898 RING_LOCALS; 899 DRM_DEBUG("\n"); 900 901 count = depth->n; 902 if (count > 4096 || count <= 0) 903 return -EMSGSIZE; 904 905 if (DRM_COPY_FROM_USER(&x, depth->x, sizeof(x))) { 906 return -EFAULT; 907 } 908 if (DRM_COPY_FROM_USER(&y, depth->y, sizeof(y))) { 909 return -EFAULT; 910 } 911 912 buffer_size = depth->n * sizeof(u32); 913 buffer = drm_alloc(buffer_size, DRM_MEM_BUFS); 914 if (buffer == NULL) 915 return -ENOMEM; 916 if (DRM_COPY_FROM_USER(buffer, depth->buffer, buffer_size)) { 917 drm_free(buffer, buffer_size, DRM_MEM_BUFS); 918 return -EFAULT; 919 } 920 921 mask_size = depth->n * sizeof(u8); 922 if (depth->mask) { 923 mask = drm_alloc(mask_size, DRM_MEM_BUFS); 924 if (mask == NULL) { 925 drm_free(buffer, buffer_size, DRM_MEM_BUFS); 926 return -ENOMEM; 927 } 928 if (DRM_COPY_FROM_USER(mask, depth->mask, mask_size)) { 929 drm_free(buffer, buffer_size, DRM_MEM_BUFS); 930 drm_free(mask, mask_size, DRM_MEM_BUFS); 931 return -EFAULT; 932 } 933 934 for (i = 0; i < count; i++, x++) { 935 if (mask[i]) { 936 BEGIN_RING(6); 937 938 OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4)); 939 OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL | 940 R128_GMC_BRUSH_SOLID_COLOR | 941 (dev_priv->depth_fmt << 8) | 942 R128_GMC_SRC_DATATYPE_COLOR | 943 R128_ROP3_P | 944 R128_GMC_CLR_CMP_CNTL_DIS | 945 R128_GMC_WR_MSK_DIS); 946 947 OUT_RING(dev_priv->depth_pitch_offset_c); 948 OUT_RING(buffer[i]); 949 950 OUT_RING((x << 16) | y); 951 OUT_RING((1 << 16) | 1); 952 953 ADVANCE_RING(); 954 } 955 } 956 957 drm_free(mask, mask_size, DRM_MEM_BUFS); 958 } else { 959 for (i = 0; i < count; i++, x++) { 960 BEGIN_RING(6); 961 962 OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4)); 963 OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL | 964 R128_GMC_BRUSH_SOLID_COLOR | 965 (dev_priv->depth_fmt << 8) | 966 R128_GMC_SRC_DATATYPE_COLOR | 967 R128_ROP3_P | 968 R128_GMC_CLR_CMP_CNTL_DIS | 969 R128_GMC_WR_MSK_DIS); 970 971 OUT_RING(dev_priv->depth_pitch_offset_c); 972 OUT_RING(buffer[i]); 973 974 OUT_RING((x << 16) | y); 975 OUT_RING((1 << 16) | 1); 976 977 ADVANCE_RING(); 978 } 979 } 980 981 drm_free(buffer, buffer_size, DRM_MEM_BUFS); 982 983 return 0; 984 } 985 986 static int r128_cce_dispatch_write_pixels(struct drm_device * dev, 987 drm_r128_depth_t * depth) 988 { 989 drm_r128_private_t *dev_priv = dev->dev_private; 990 int count, *x, *y; 991 u32 *buffer; 992 u8 *mask; 993 int i, xbuf_size, ybuf_size, buffer_size, mask_size; 994 RING_LOCALS; 995 DRM_DEBUG("\n"); 996 997 count = depth->n; 998 if (count > 4096 || count <= 0) 999 return -EMSGSIZE; 1000 1001 xbuf_size = count * sizeof(*x); 1002 ybuf_size = count * sizeof(*y); 1003 x = drm_alloc(xbuf_size, DRM_MEM_BUFS); 1004 if (x == NULL) { 1005 return -ENOMEM; 1006 } 1007 y = drm_alloc(ybuf_size, DRM_MEM_BUFS); 1008 if (y == NULL) { 1009 drm_free(x, xbuf_size, DRM_MEM_BUFS); 1010 return -ENOMEM; 1011 } 1012 if (DRM_COPY_FROM_USER(x, depth->x, xbuf_size)) { 1013 drm_free(x, xbuf_size, DRM_MEM_BUFS); 1014 drm_free(y, ybuf_size, DRM_MEM_BUFS); 1015 return -EFAULT; 1016 } 1017 if (DRM_COPY_FROM_USER(y, depth->y, xbuf_size)) { 1018 drm_free(x, xbuf_size, DRM_MEM_BUFS); 1019 drm_free(y, ybuf_size, DRM_MEM_BUFS); 1020 return -EFAULT; 1021 } 1022 1023 buffer_size = depth->n * sizeof(u32); 1024 buffer = drm_alloc(buffer_size, DRM_MEM_BUFS); 1025 if (buffer == NULL) { 1026 drm_free(x, xbuf_size, DRM_MEM_BUFS); 1027 drm_free(y, ybuf_size, DRM_MEM_BUFS); 1028 return -ENOMEM; 1029 } 1030 if (DRM_COPY_FROM_USER(buffer, depth->buffer, buffer_size)) { 1031 drm_free(x, xbuf_size, DRM_MEM_BUFS); 1032 drm_free(y, ybuf_size, DRM_MEM_BUFS); 1033 drm_free(buffer, buffer_size, DRM_MEM_BUFS); 1034 return -EFAULT; 1035 } 1036 1037 if (depth->mask) { 1038 mask_size = depth->n * sizeof(u8); 1039 mask = drm_alloc(mask_size, DRM_MEM_BUFS); 1040 if (mask == NULL) { 1041 drm_free(x, xbuf_size, DRM_MEM_BUFS); 1042 drm_free(y, ybuf_size, DRM_MEM_BUFS); 1043 drm_free(buffer, buffer_size, DRM_MEM_BUFS); 1044 return -ENOMEM; 1045 } 1046 if (DRM_COPY_FROM_USER(mask, depth->mask, mask_size)) { 1047 drm_free(x, xbuf_size, DRM_MEM_BUFS); 1048 drm_free(y, ybuf_size, DRM_MEM_BUFS); 1049 drm_free(buffer, buffer_size, DRM_MEM_BUFS); 1050 drm_free(mask, mask_size, DRM_MEM_BUFS); 1051 return -EFAULT; 1052 } 1053 1054 for (i = 0; i < count; i++) { 1055 if (mask[i]) { 1056 BEGIN_RING(6); 1057 1058 OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4)); 1059 OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL | 1060 R128_GMC_BRUSH_SOLID_COLOR | 1061 (dev_priv->depth_fmt << 8) | 1062 R128_GMC_SRC_DATATYPE_COLOR | 1063 R128_ROP3_P | 1064 R128_GMC_CLR_CMP_CNTL_DIS | 1065 R128_GMC_WR_MSK_DIS); 1066 1067 OUT_RING(dev_priv->depth_pitch_offset_c); 1068 OUT_RING(buffer[i]); 1069 1070 OUT_RING((x[i] << 16) | y[i]); 1071 OUT_RING((1 << 16) | 1); 1072 1073 ADVANCE_RING(); 1074 } 1075 } 1076 1077 drm_free(mask, mask_size, DRM_MEM_BUFS); 1078 } else { 1079 for (i = 0; i < count; i++) { 1080 BEGIN_RING(6); 1081 1082 OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4)); 1083 OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL | 1084 R128_GMC_BRUSH_SOLID_COLOR | 1085 (dev_priv->depth_fmt << 8) | 1086 R128_GMC_SRC_DATATYPE_COLOR | 1087 R128_ROP3_P | 1088 R128_GMC_CLR_CMP_CNTL_DIS | 1089 R128_GMC_WR_MSK_DIS); 1090 1091 OUT_RING(dev_priv->depth_pitch_offset_c); 1092 OUT_RING(buffer[i]); 1093 1094 OUT_RING((x[i] << 16) | y[i]); 1095 OUT_RING((1 << 16) | 1); 1096 1097 ADVANCE_RING(); 1098 } 1099 } 1100 1101 drm_free(x, xbuf_size, DRM_MEM_BUFS); 1102 drm_free(y, ybuf_size, DRM_MEM_BUFS); 1103 drm_free(buffer, buffer_size, DRM_MEM_BUFS); 1104 1105 return 0; 1106 } 1107 1108 static int r128_cce_dispatch_read_span(struct drm_device * dev, 1109 drm_r128_depth_t * depth) 1110 { 1111 drm_r128_private_t *dev_priv = dev->dev_private; 1112 int count, x, y; 1113 RING_LOCALS; 1114 DRM_DEBUG("\n"); 1115 1116 count = depth->n; 1117 if (count > 4096 || count <= 0) 1118 return -EMSGSIZE; 1119 1120 if (DRM_COPY_FROM_USER(&x, depth->x, sizeof(x))) { 1121 return -EFAULT; 1122 } 1123 if (DRM_COPY_FROM_USER(&y, depth->y, sizeof(y))) { 1124 return -EFAULT; 1125 } 1126 1127 BEGIN_RING(7); 1128 1129 OUT_RING(CCE_PACKET3(R128_CNTL_BITBLT_MULTI, 5)); 1130 OUT_RING(R128_GMC_SRC_PITCH_OFFSET_CNTL | 1131 R128_GMC_DST_PITCH_OFFSET_CNTL | 1132 R128_GMC_BRUSH_NONE | 1133 (dev_priv->depth_fmt << 8) | 1134 R128_GMC_SRC_DATATYPE_COLOR | 1135 R128_ROP3_S | 1136 R128_DP_SRC_SOURCE_MEMORY | 1137 R128_GMC_CLR_CMP_CNTL_DIS | R128_GMC_WR_MSK_DIS); 1138 1139 OUT_RING(dev_priv->depth_pitch_offset_c); 1140 OUT_RING(dev_priv->span_pitch_offset_c); 1141 1142 OUT_RING((x << 16) | y); 1143 OUT_RING((0 << 16) | 0); 1144 OUT_RING((count << 16) | 1); 1145 1146 ADVANCE_RING(); 1147 1148 return 0; 1149 } 1150 1151 static int r128_cce_dispatch_read_pixels(struct drm_device * dev, 1152 drm_r128_depth_t * depth) 1153 { 1154 drm_r128_private_t *dev_priv = dev->dev_private; 1155 int count, *x, *y; 1156 int i, xbuf_size, ybuf_size; 1157 RING_LOCALS; 1158 DRM_DEBUG("\n"); 1159 1160 count = depth->n; 1161 if (count > 4096 || count <= 0) 1162 return -EMSGSIZE; 1163 1164 if (count > dev_priv->depth_pitch) { 1165 count = dev_priv->depth_pitch; 1166 } 1167 1168 xbuf_size = count * sizeof(*x); 1169 ybuf_size = count * sizeof(*y); 1170 x = drm_alloc(xbuf_size, DRM_MEM_BUFS); 1171 if (x == NULL) { 1172 return -ENOMEM; 1173 } 1174 y = drm_alloc(ybuf_size, DRM_MEM_BUFS); 1175 if (y == NULL) { 1176 drm_free(x, xbuf_size, DRM_MEM_BUFS); 1177 return -ENOMEM; 1178 } 1179 if (DRM_COPY_FROM_USER(x, depth->x, xbuf_size)) { 1180 drm_free(x, xbuf_size, DRM_MEM_BUFS); 1181 drm_free(y, ybuf_size, DRM_MEM_BUFS); 1182 return -EFAULT; 1183 } 1184 if (DRM_COPY_FROM_USER(y, depth->y, ybuf_size)) { 1185 drm_free(x, xbuf_size, DRM_MEM_BUFS); 1186 drm_free(y, ybuf_size, DRM_MEM_BUFS); 1187 return -EFAULT; 1188 } 1189 1190 for (i = 0; i < count; i++) { 1191 BEGIN_RING(7); 1192 1193 OUT_RING(CCE_PACKET3(R128_CNTL_BITBLT_MULTI, 5)); 1194 OUT_RING(R128_GMC_SRC_PITCH_OFFSET_CNTL | 1195 R128_GMC_DST_PITCH_OFFSET_CNTL | 1196 R128_GMC_BRUSH_NONE | 1197 (dev_priv->depth_fmt << 8) | 1198 R128_GMC_SRC_DATATYPE_COLOR | 1199 R128_ROP3_S | 1200 R128_DP_SRC_SOURCE_MEMORY | 1201 R128_GMC_CLR_CMP_CNTL_DIS | R128_GMC_WR_MSK_DIS); 1202 1203 OUT_RING(dev_priv->depth_pitch_offset_c); 1204 OUT_RING(dev_priv->span_pitch_offset_c); 1205 1206 OUT_RING((x[i] << 16) | y[i]); 1207 OUT_RING((i << 16) | 0); 1208 OUT_RING((1 << 16) | 1); 1209 1210 ADVANCE_RING(); 1211 } 1212 1213 drm_free(x, xbuf_size, DRM_MEM_BUFS); 1214 drm_free(y, ybuf_size, DRM_MEM_BUFS); 1215 1216 return 0; 1217 } 1218 1219 /* ================================================================ 1220 * Polygon stipple 1221 */ 1222 1223 static void r128_cce_dispatch_stipple(struct drm_device * dev, u32 * stipple) 1224 { 1225 drm_r128_private_t *dev_priv = dev->dev_private; 1226 int i; 1227 RING_LOCALS; 1228 DRM_DEBUG("\n"); 1229 1230 BEGIN_RING(33); 1231 1232 OUT_RING(CCE_PACKET0(R128_BRUSH_DATA0, 31)); 1233 for (i = 0; i < 32; i++) { 1234 OUT_RING(stipple[i]); 1235 } 1236 1237 ADVANCE_RING(); 1238 } 1239 1240 /* ================================================================ 1241 * IOCTL functions 1242 */ 1243 1244 static int r128_cce_clear(struct drm_device *dev, void *data, struct drm_file *file_priv) 1245 { 1246 drm_r128_private_t *dev_priv = dev->dev_private; 1247 drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv; 1248 drm_r128_clear_t *clear = data; 1249 DRM_DEBUG("\n"); 1250 1251 LOCK_TEST_WITH_RETURN(dev, file_priv); 1252 1253 RING_SPACE_TEST_WITH_RETURN(dev_priv); 1254 1255 if (sarea_priv->nbox > R128_NR_SAREA_CLIPRECTS) 1256 sarea_priv->nbox = R128_NR_SAREA_CLIPRECTS; 1257 1258 r128_cce_dispatch_clear(dev, clear); 1259 COMMIT_RING(); 1260 1261 /* Make sure we restore the 3D state next time. 1262 */ 1263 dev_priv->sarea_priv->dirty |= R128_UPLOAD_CONTEXT | R128_UPLOAD_MASKS; 1264 1265 return 0; 1266 } 1267 1268 static int r128_do_init_pageflip(struct drm_device * dev) 1269 { 1270 drm_r128_private_t *dev_priv = dev->dev_private; 1271 DRM_DEBUG("\n"); 1272 1273 dev_priv->crtc_offset = R128_READ(R128_CRTC_OFFSET); 1274 dev_priv->crtc_offset_cntl = R128_READ(R128_CRTC_OFFSET_CNTL); 1275 1276 R128_WRITE(R128_CRTC_OFFSET, dev_priv->front_offset); 1277 R128_WRITE(R128_CRTC_OFFSET_CNTL, 1278 dev_priv->crtc_offset_cntl | R128_CRTC_OFFSET_FLIP_CNTL); 1279 1280 dev_priv->page_flipping = 1; 1281 dev_priv->current_page = 0; 1282 dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page; 1283 1284 return 0; 1285 } 1286 1287 static int r128_do_cleanup_pageflip(struct drm_device * dev) 1288 { 1289 drm_r128_private_t *dev_priv = dev->dev_private; 1290 DRM_DEBUG("\n"); 1291 1292 R128_WRITE(R128_CRTC_OFFSET, dev_priv->crtc_offset); 1293 R128_WRITE(R128_CRTC_OFFSET_CNTL, dev_priv->crtc_offset_cntl); 1294 1295 if (dev_priv->current_page != 0) { 1296 r128_cce_dispatch_flip(dev); 1297 COMMIT_RING(); 1298 } 1299 1300 dev_priv->page_flipping = 0; 1301 return 0; 1302 } 1303 1304 /* Swapping and flipping are different operations, need different ioctls. 1305 * They can & should be intermixed to support multiple 3d windows. 1306 */ 1307 1308 static int r128_cce_flip(struct drm_device *dev, void *data, struct drm_file *file_priv) 1309 { 1310 drm_r128_private_t *dev_priv = dev->dev_private; 1311 DRM_DEBUG("\n"); 1312 1313 LOCK_TEST_WITH_RETURN(dev, file_priv); 1314 1315 RING_SPACE_TEST_WITH_RETURN(dev_priv); 1316 1317 if (!dev_priv->page_flipping) 1318 r128_do_init_pageflip(dev); 1319 1320 r128_cce_dispatch_flip(dev); 1321 1322 COMMIT_RING(); 1323 return 0; 1324 } 1325 1326 static int r128_cce_swap(struct drm_device *dev, void *data, struct drm_file *file_priv) 1327 { 1328 drm_r128_private_t *dev_priv = dev->dev_private; 1329 drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv; 1330 DRM_DEBUG("\n"); 1331 1332 LOCK_TEST_WITH_RETURN(dev, file_priv); 1333 1334 RING_SPACE_TEST_WITH_RETURN(dev_priv); 1335 1336 if (sarea_priv->nbox > R128_NR_SAREA_CLIPRECTS) 1337 sarea_priv->nbox = R128_NR_SAREA_CLIPRECTS; 1338 1339 r128_cce_dispatch_swap(dev); 1340 dev_priv->sarea_priv->dirty |= (R128_UPLOAD_CONTEXT | 1341 R128_UPLOAD_MASKS); 1342 1343 COMMIT_RING(); 1344 return 0; 1345 } 1346 1347 static int r128_cce_vertex(struct drm_device *dev, void *data, struct drm_file *file_priv) 1348 { 1349 drm_r128_private_t *dev_priv = dev->dev_private; 1350 struct drm_device_dma *dma = dev->dma; 1351 struct drm_buf *buf; 1352 drm_r128_buf_priv_t *buf_priv; 1353 drm_r128_vertex_t *vertex = data; 1354 1355 LOCK_TEST_WITH_RETURN(dev, file_priv); 1356 1357 if (!dev_priv) { 1358 DRM_ERROR("called with no initialization\n"); 1359 return -EINVAL; 1360 } 1361 1362 DRM_DEBUG("pid=%d index=%d count=%d discard=%d\n", 1363 DRM_CURRENTPID, vertex->idx, vertex->count, vertex->discard); 1364 1365 if (vertex->idx < 0 || vertex->idx >= dma->buf_count) { 1366 DRM_ERROR("buffer index %d (of %d max)\n", 1367 vertex->idx, dma->buf_count - 1); 1368 return -EINVAL; 1369 } 1370 if (vertex->prim < 0 || 1371 vertex->prim > R128_CCE_VC_CNTL_PRIM_TYPE_TRI_TYPE2) { 1372 DRM_ERROR("buffer prim %d\n", vertex->prim); 1373 return -EINVAL; 1374 } 1375 1376 RING_SPACE_TEST_WITH_RETURN(dev_priv); 1377 VB_AGE_TEST_WITH_RETURN(dev_priv); 1378 1379 buf = dma->buflist[vertex->idx]; 1380 buf_priv = buf->dev_private; 1381 1382 if (buf->file_priv != file_priv) { 1383 DRM_ERROR("process %d using buffer owned by %p\n", 1384 DRM_CURRENTPID, buf->file_priv); 1385 return -EINVAL; 1386 } 1387 if (buf->pending) { 1388 DRM_ERROR("sending pending buffer %d\n", vertex->idx); 1389 return -EINVAL; 1390 } 1391 1392 buf->used = vertex->count; 1393 buf_priv->prim = vertex->prim; 1394 buf_priv->discard = vertex->discard; 1395 1396 r128_cce_dispatch_vertex(dev, buf); 1397 1398 COMMIT_RING(); 1399 return 0; 1400 } 1401 1402 static int r128_cce_indices(struct drm_device *dev, void *data, struct drm_file *file_priv) 1403 { 1404 drm_r128_private_t *dev_priv = dev->dev_private; 1405 struct drm_device_dma *dma = dev->dma; 1406 struct drm_buf *buf; 1407 drm_r128_buf_priv_t *buf_priv; 1408 drm_r128_indices_t *elts = data; 1409 int count; 1410 1411 LOCK_TEST_WITH_RETURN(dev, file_priv); 1412 1413 if (!dev_priv) { 1414 DRM_ERROR("called with no initialization\n"); 1415 return -EINVAL; 1416 } 1417 1418 DRM_DEBUG("pid=%d buf=%d s=%d e=%d d=%d\n", DRM_CURRENTPID, 1419 elts->idx, elts->start, elts->end, elts->discard); 1420 1421 if (elts->idx < 0 || elts->idx >= dma->buf_count) { 1422 DRM_ERROR("buffer index %d (of %d max)\n", 1423 elts->idx, dma->buf_count - 1); 1424 return -EINVAL; 1425 } 1426 if (elts->prim < 0 || 1427 elts->prim > R128_CCE_VC_CNTL_PRIM_TYPE_TRI_TYPE2) { 1428 DRM_ERROR("buffer prim %d\n", elts->prim); 1429 return -EINVAL; 1430 } 1431 1432 RING_SPACE_TEST_WITH_RETURN(dev_priv); 1433 VB_AGE_TEST_WITH_RETURN(dev_priv); 1434 1435 buf = dma->buflist[elts->idx]; 1436 buf_priv = buf->dev_private; 1437 1438 if (buf->file_priv != file_priv) { 1439 DRM_ERROR("process %d using buffer owned by %p\n", 1440 DRM_CURRENTPID, buf->file_priv); 1441 return -EINVAL; 1442 } 1443 if (buf->pending) { 1444 DRM_ERROR("sending pending buffer %d\n", elts->idx); 1445 return -EINVAL; 1446 } 1447 1448 count = (elts->end - elts->start) / sizeof(u16); 1449 elts->start -= R128_INDEX_PRIM_OFFSET; 1450 1451 if (elts->start & 0x7) { 1452 DRM_ERROR("misaligned buffer 0x%x\n", elts->start); 1453 return -EINVAL; 1454 } 1455 if (elts->start < buf->used) { 1456 DRM_ERROR("no header 0x%x - 0x%x\n", elts->start, buf->used); 1457 return -EINVAL; 1458 } 1459 1460 buf->used = elts->end; 1461 buf_priv->prim = elts->prim; 1462 buf_priv->discard = elts->discard; 1463 1464 r128_cce_dispatch_indices(dev, buf, elts->start, elts->end, count); 1465 1466 COMMIT_RING(); 1467 return 0; 1468 } 1469 1470 static int r128_cce_blit(struct drm_device *dev, void *data, struct drm_file *file_priv) 1471 { 1472 struct drm_device_dma *dma = dev->dma; 1473 drm_r128_private_t *dev_priv = dev->dev_private; 1474 drm_r128_blit_t *blit = data; 1475 int ret; 1476 1477 LOCK_TEST_WITH_RETURN(dev, file_priv); 1478 1479 DRM_DEBUG("pid=%d index=%d\n", DRM_CURRENTPID, blit->idx); 1480 1481 if (blit->idx < 0 || blit->idx >= dma->buf_count) { 1482 DRM_ERROR("buffer index %d (of %d max)\n", 1483 blit->idx, dma->buf_count - 1); 1484 return -EINVAL; 1485 } 1486 1487 RING_SPACE_TEST_WITH_RETURN(dev_priv); 1488 VB_AGE_TEST_WITH_RETURN(dev_priv); 1489 1490 ret = r128_cce_dispatch_blit(dev, file_priv, blit); 1491 1492 COMMIT_RING(); 1493 return ret; 1494 } 1495 1496 static int r128_cce_depth(struct drm_device *dev, void *data, struct drm_file *file_priv) 1497 { 1498 drm_r128_private_t *dev_priv = dev->dev_private; 1499 drm_r128_depth_t *depth = data; 1500 int ret; 1501 1502 LOCK_TEST_WITH_RETURN(dev, file_priv); 1503 1504 RING_SPACE_TEST_WITH_RETURN(dev_priv); 1505 1506 ret = -EINVAL; 1507 switch (depth->func) { 1508 case R128_WRITE_SPAN: 1509 ret = r128_cce_dispatch_write_span(dev, depth); 1510 break; 1511 case R128_WRITE_PIXELS: 1512 ret = r128_cce_dispatch_write_pixels(dev, depth); 1513 break; 1514 case R128_READ_SPAN: 1515 ret = r128_cce_dispatch_read_span(dev, depth); 1516 break; 1517 case R128_READ_PIXELS: 1518 ret = r128_cce_dispatch_read_pixels(dev, depth); 1519 break; 1520 } 1521 1522 COMMIT_RING(); 1523 return ret; 1524 } 1525 1526 static int r128_cce_stipple(struct drm_device *dev, void *data, struct drm_file *file_priv) 1527 { 1528 drm_r128_private_t *dev_priv = dev->dev_private; 1529 drm_r128_stipple_t *stipple = data; 1530 u32 mask[32]; 1531 1532 LOCK_TEST_WITH_RETURN(dev, file_priv); 1533 1534 if (DRM_COPY_FROM_USER(&mask, stipple->mask, 32 * sizeof(u32))) 1535 return -EFAULT; 1536 1537 RING_SPACE_TEST_WITH_RETURN(dev_priv); 1538 1539 r128_cce_dispatch_stipple(dev, mask); 1540 1541 COMMIT_RING(); 1542 return 0; 1543 } 1544 1545 static int r128_cce_indirect(struct drm_device *dev, void *data, struct drm_file *file_priv) 1546 { 1547 drm_r128_private_t *dev_priv = dev->dev_private; 1548 struct drm_device_dma *dma = dev->dma; 1549 struct drm_buf *buf; 1550 drm_r128_buf_priv_t *buf_priv; 1551 drm_r128_indirect_t *indirect = data; 1552 #if 0 1553 RING_LOCALS; 1554 #endif 1555 1556 LOCK_TEST_WITH_RETURN(dev, file_priv); 1557 1558 if (!dev_priv) { 1559 DRM_ERROR("called with no initialization\n"); 1560 return -EINVAL; 1561 } 1562 1563 DRM_DEBUG("idx=%d s=%d e=%d d=%d\n", 1564 indirect->idx, indirect->start, indirect->end, 1565 indirect->discard); 1566 1567 if (indirect->idx < 0 || indirect->idx >= dma->buf_count) { 1568 DRM_ERROR("buffer index %d (of %d max)\n", 1569 indirect->idx, dma->buf_count - 1); 1570 return -EINVAL; 1571 } 1572 1573 buf = dma->buflist[indirect->idx]; 1574 buf_priv = buf->dev_private; 1575 1576 if (buf->file_priv != file_priv) { 1577 DRM_ERROR("process %d using buffer owned by %p\n", 1578 DRM_CURRENTPID, buf->file_priv); 1579 return -EINVAL; 1580 } 1581 if (buf->pending) { 1582 DRM_ERROR("sending pending buffer %d\n", indirect->idx); 1583 return -EINVAL; 1584 } 1585 1586 if (indirect->start < buf->used) { 1587 DRM_ERROR("reusing indirect: start=0x%x actual=0x%x\n", 1588 indirect->start, buf->used); 1589 return -EINVAL; 1590 } 1591 1592 RING_SPACE_TEST_WITH_RETURN(dev_priv); 1593 VB_AGE_TEST_WITH_RETURN(dev_priv); 1594 1595 buf->used = indirect->end; 1596 buf_priv->discard = indirect->discard; 1597 1598 #if 0 1599 /* Wait for the 3D stream to idle before the indirect buffer 1600 * containing 2D acceleration commands is processed. 1601 */ 1602 BEGIN_RING(2); 1603 RADEON_WAIT_UNTIL_3D_IDLE(); 1604 ADVANCE_RING(); 1605 #endif 1606 1607 /* Dispatch the indirect buffer full of commands from the 1608 * X server. This is insecure and is thus only available to 1609 * privileged clients. 1610 */ 1611 r128_cce_dispatch_indirect(dev, buf, indirect->start, indirect->end); 1612 1613 COMMIT_RING(); 1614 return 0; 1615 } 1616 1617 static int r128_getparam(struct drm_device *dev, void *data, struct drm_file *file_priv) 1618 { 1619 drm_r128_private_t *dev_priv = dev->dev_private; 1620 drm_r128_getparam_t *param = data; 1621 int value; 1622 1623 if (!dev_priv) { 1624 DRM_ERROR("called with no initialization\n"); 1625 return -EINVAL; 1626 } 1627 1628 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID); 1629 1630 switch (param->param) { 1631 case R128_PARAM_IRQ_NR: 1632 value = dev->irq; 1633 break; 1634 default: 1635 return -EINVAL; 1636 } 1637 1638 if (DRM_COPY_TO_USER(param->value, &value, sizeof(int))) { 1639 DRM_ERROR("copy_to_user\n"); 1640 return -EFAULT; 1641 } 1642 1643 return 0; 1644 } 1645 1646 void r128_driver_preclose(struct drm_device * dev, struct drm_file *file_priv) 1647 { 1648 if (dev->dev_private) { 1649 drm_r128_private_t *dev_priv = dev->dev_private; 1650 if (dev_priv->page_flipping) { 1651 r128_do_cleanup_pageflip(dev); 1652 } 1653 } 1654 } 1655 1656 void r128_driver_lastclose(struct drm_device * dev) 1657 { 1658 r128_do_cleanup_cce(dev); 1659 } 1660 1661 struct drm_ioctl_desc r128_ioctls[] = { 1662 DRM_IOCTL_DEF(DRM_R128_INIT, r128_cce_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 1663 DRM_IOCTL_DEF(DRM_R128_CCE_START, r128_cce_start, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 1664 DRM_IOCTL_DEF(DRM_R128_CCE_STOP, r128_cce_stop, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 1665 DRM_IOCTL_DEF(DRM_R128_CCE_RESET, r128_cce_reset, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 1666 DRM_IOCTL_DEF(DRM_R128_CCE_IDLE, r128_cce_idle, DRM_AUTH), 1667 DRM_IOCTL_DEF(DRM_R128_RESET, r128_engine_reset, DRM_AUTH), 1668 DRM_IOCTL_DEF(DRM_R128_FULLSCREEN, r128_fullscreen, DRM_AUTH), 1669 DRM_IOCTL_DEF(DRM_R128_SWAP, r128_cce_swap, DRM_AUTH), 1670 DRM_IOCTL_DEF(DRM_R128_FLIP, r128_cce_flip, DRM_AUTH), 1671 DRM_IOCTL_DEF(DRM_R128_CLEAR, r128_cce_clear, DRM_AUTH), 1672 DRM_IOCTL_DEF(DRM_R128_VERTEX, r128_cce_vertex, DRM_AUTH), 1673 DRM_IOCTL_DEF(DRM_R128_INDICES, r128_cce_indices, DRM_AUTH), 1674 DRM_IOCTL_DEF(DRM_R128_BLIT, r128_cce_blit, DRM_AUTH), 1675 DRM_IOCTL_DEF(DRM_R128_DEPTH, r128_cce_depth, DRM_AUTH), 1676 DRM_IOCTL_DEF(DRM_R128_STIPPLE, r128_cce_stipple, DRM_AUTH), 1677 DRM_IOCTL_DEF(DRM_R128_INDIRECT, r128_cce_indirect, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 1678 DRM_IOCTL_DEF(DRM_R128_GETPARAM, r128_getparam, DRM_AUTH), 1679 }; 1680 1681 int r128_max_ioctl = DRM_ARRAY_SIZE(r128_ioctls); 1682