1 /* $NetBSD: amdgpu_dce_mem_input.c,v 1.2 2021/12/18 23:45:02 riastradh Exp $ */ 2 3 /* 4 * Copyright 2016 Advanced Micro Devices, Inc. 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a 7 * copy of this software and associated documentation files (the "Software"), 8 * to deal in the Software without restriction, including without limitation 9 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 10 * and/or sell copies of the Software, and to permit persons to whom the 11 * Software is furnished to do so, subject to the following conditions: 12 * 13 * The above copyright notice and this permission notice shall be included in 14 * all copies or substantial portions of the Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 * OTHER DEALINGS IN THE SOFTWARE. 23 * 24 * Authors: AMD 25 * 26 */ 27 28 #include <sys/cdefs.h> 29 __KERNEL_RCSID(0, "$NetBSD: amdgpu_dce_mem_input.c,v 1.2 2021/12/18 23:45:02 riastradh Exp $"); 30 31 #include "dce_mem_input.h" 32 #include "reg_helper.h" 33 #include "basics/conversion.h" 34 35 #define CTX \ 36 dce_mi->base.ctx 37 #define REG(reg)\ 38 dce_mi->regs->reg 39 40 #undef FN 41 #define FN(reg_name, field_name) \ 42 dce_mi->shifts->field_name, dce_mi->masks->field_name 43 44 struct pte_setting { 45 unsigned int bpp; 46 unsigned int page_width; 47 unsigned int page_height; 48 unsigned char min_pte_before_flip_horiz_scan; 49 unsigned char min_pte_before_flip_vert_scan; 50 unsigned char pte_req_per_chunk; 51 unsigned char param_6; 52 unsigned char param_7; 53 unsigned char param_8; 54 }; 55 56 enum mi_bits_per_pixel { 57 mi_bpp_8 = 0, 58 mi_bpp_16, 59 mi_bpp_32, 60 mi_bpp_64, 61 mi_bpp_count, 62 }; 63 64 enum mi_tiling_format { 65 mi_tiling_linear = 0, 66 mi_tiling_1D, 67 mi_tiling_2D, 68 mi_tiling_count, 69 }; 70 71 static const struct pte_setting pte_settings[mi_tiling_count][mi_bpp_count] = { 72 [mi_tiling_linear] = { 73 { 8, 4096, 1, 8, 0, 1, 0, 0, 0}, 74 { 16, 2048, 1, 8, 0, 1, 0, 0, 0}, 75 { 32, 1024, 1, 8, 0, 1, 0, 0, 0}, 76 { 64, 512, 1, 8, 0, 1, 0, 0, 0}, /* new for 64bpp from HW */ 77 }, 78 [mi_tiling_1D] = { 79 { 8, 512, 8, 1, 0, 1, 0, 0, 0}, /* 0 for invalid */ 80 { 16, 256, 8, 2, 0, 1, 0, 0, 0}, 81 { 32, 128, 8, 4, 0, 1, 0, 0, 0}, 82 { 64, 64, 8, 4, 0, 1, 0, 0, 0}, /* fake */ 83 }, 84 [mi_tiling_2D] = { 85 { 8, 64, 64, 8, 8, 1, 4, 0, 0}, 86 { 16, 64, 32, 8, 16, 1, 8, 0, 0}, 87 { 32, 32, 32, 16, 16, 1, 8, 0, 0}, 88 { 64, 8, 32, 16, 16, 1, 8, 0, 0}, /* fake */ 89 }, 90 }; 91 92 static enum mi_bits_per_pixel get_mi_bpp( 93 enum surface_pixel_format format) 94 { 95 if (format >= SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616) 96 return mi_bpp_64; 97 else if (format >= SURFACE_PIXEL_FORMAT_GRPH_ARGB8888) 98 return mi_bpp_32; 99 else if (format >= SURFACE_PIXEL_FORMAT_GRPH_ARGB1555) 100 return mi_bpp_16; 101 else 102 return mi_bpp_8; 103 } 104 105 static enum mi_tiling_format get_mi_tiling( 106 union dc_tiling_info *tiling_info) 107 { 108 switch (tiling_info->gfx8.array_mode) { 109 case DC_ARRAY_1D_TILED_THIN1: 110 case DC_ARRAY_1D_TILED_THICK: 111 case DC_ARRAY_PRT_TILED_THIN1: 112 return mi_tiling_1D; 113 case DC_ARRAY_2D_TILED_THIN1: 114 case DC_ARRAY_2D_TILED_THICK: 115 case DC_ARRAY_2D_TILED_X_THICK: 116 case DC_ARRAY_PRT_2D_TILED_THIN1: 117 case DC_ARRAY_PRT_2D_TILED_THICK: 118 return mi_tiling_2D; 119 case DC_ARRAY_LINEAR_GENERAL: 120 case DC_ARRAY_LINEAR_ALLIGNED: 121 return mi_tiling_linear; 122 default: 123 return mi_tiling_2D; 124 } 125 } 126 127 static bool is_vert_scan(enum dc_rotation_angle rotation) 128 { 129 switch (rotation) { 130 case ROTATION_ANGLE_90: 131 case ROTATION_ANGLE_270: 132 return true; 133 default: 134 return false; 135 } 136 } 137 138 static void dce_mi_program_pte_vm( 139 struct mem_input *mi, 140 enum surface_pixel_format format, 141 union dc_tiling_info *tiling_info, 142 enum dc_rotation_angle rotation) 143 { 144 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi); 145 enum mi_bits_per_pixel mi_bpp = get_mi_bpp(format); 146 enum mi_tiling_format mi_tiling = get_mi_tiling(tiling_info); 147 const struct pte_setting *pte = &pte_settings[mi_tiling][mi_bpp]; 148 149 unsigned int page_width = log_2(pte->page_width); 150 unsigned int page_height = log_2(pte->page_height); 151 unsigned int min_pte_before_flip = is_vert_scan(rotation) ? 152 pte->min_pte_before_flip_vert_scan : 153 pte->min_pte_before_flip_horiz_scan; 154 155 REG_UPDATE(GRPH_PIPE_OUTSTANDING_REQUEST_LIMIT, 156 GRPH_PIPE_OUTSTANDING_REQUEST_LIMIT, 0x7f); 157 158 REG_UPDATE_3(DVMM_PTE_CONTROL, 159 DVMM_PAGE_WIDTH, page_width, 160 DVMM_PAGE_HEIGHT, page_height, 161 DVMM_MIN_PTE_BEFORE_FLIP, min_pte_before_flip); 162 163 REG_UPDATE_2(DVMM_PTE_ARB_CONTROL, 164 DVMM_PTE_REQ_PER_CHUNK, pte->pte_req_per_chunk, 165 DVMM_MAX_PTE_REQ_OUTSTANDING, 0x7f); 166 } 167 168 static void program_urgency_watermark( 169 struct dce_mem_input *dce_mi, 170 uint32_t wm_select, 171 uint32_t urgency_low_wm, 172 uint32_t urgency_high_wm) 173 { 174 REG_UPDATE(DPG_WATERMARK_MASK_CONTROL, 175 URGENCY_WATERMARK_MASK, wm_select); 176 177 REG_SET_2(DPG_PIPE_URGENCY_CONTROL, 0, 178 URGENCY_LOW_WATERMARK, urgency_low_wm, 179 URGENCY_HIGH_WATERMARK, urgency_high_wm); 180 } 181 182 static void dce120_program_urgency_watermark( 183 struct dce_mem_input *dce_mi, 184 uint32_t wm_select, 185 uint32_t urgency_low_wm, 186 uint32_t urgency_high_wm) 187 { 188 REG_UPDATE(DPG_WATERMARK_MASK_CONTROL, 189 URGENCY_WATERMARK_MASK, wm_select); 190 191 REG_SET_2(DPG_PIPE_URGENCY_CONTROL, 0, 192 URGENCY_LOW_WATERMARK, urgency_low_wm, 193 URGENCY_HIGH_WATERMARK, urgency_high_wm); 194 195 REG_SET_2(DPG_PIPE_URGENT_LEVEL_CONTROL, 0, 196 URGENT_LEVEL_LOW_WATERMARK, urgency_low_wm, 197 URGENT_LEVEL_HIGH_WATERMARK, urgency_high_wm); 198 199 } 200 201 static void program_nbp_watermark( 202 struct dce_mem_input *dce_mi, 203 uint32_t wm_select, 204 uint32_t nbp_wm) 205 { 206 if (REG(DPG_PIPE_NB_PSTATE_CHANGE_CONTROL)) { 207 REG_UPDATE(DPG_WATERMARK_MASK_CONTROL, 208 NB_PSTATE_CHANGE_WATERMARK_MASK, wm_select); 209 210 REG_UPDATE_3(DPG_PIPE_NB_PSTATE_CHANGE_CONTROL, 211 NB_PSTATE_CHANGE_ENABLE, 1, 212 NB_PSTATE_CHANGE_URGENT_DURING_REQUEST, 1, 213 NB_PSTATE_CHANGE_NOT_SELF_REFRESH_DURING_REQUEST, 1); 214 215 REG_UPDATE(DPG_PIPE_NB_PSTATE_CHANGE_CONTROL, 216 NB_PSTATE_CHANGE_WATERMARK, nbp_wm); 217 } 218 219 if (REG(DPG_PIPE_LOW_POWER_CONTROL)) { 220 REG_UPDATE(DPG_WATERMARK_MASK_CONTROL, 221 PSTATE_CHANGE_WATERMARK_MASK, wm_select); 222 223 REG_UPDATE_3(DPG_PIPE_LOW_POWER_CONTROL, 224 PSTATE_CHANGE_ENABLE, 1, 225 PSTATE_CHANGE_URGENT_DURING_REQUEST, 1, 226 PSTATE_CHANGE_NOT_SELF_REFRESH_DURING_REQUEST, 1); 227 228 REG_UPDATE(DPG_PIPE_LOW_POWER_CONTROL, 229 PSTATE_CHANGE_WATERMARK, nbp_wm); 230 } 231 } 232 233 static void dce120_program_stutter_watermark( 234 struct dce_mem_input *dce_mi, 235 uint32_t wm_select, 236 uint32_t stutter_mark, 237 uint32_t stutter_entry) 238 { 239 REG_UPDATE(DPG_WATERMARK_MASK_CONTROL, 240 STUTTER_EXIT_SELF_REFRESH_WATERMARK_MASK, wm_select); 241 242 if (REG(DPG_PIPE_STUTTER_CONTROL2)) 243 REG_UPDATE_2(DPG_PIPE_STUTTER_CONTROL2, 244 STUTTER_EXIT_SELF_REFRESH_WATERMARK, stutter_mark, 245 STUTTER_ENTER_SELF_REFRESH_WATERMARK, stutter_entry); 246 else 247 REG_UPDATE_2(DPG_PIPE_STUTTER_CONTROL, 248 STUTTER_EXIT_SELF_REFRESH_WATERMARK, stutter_mark, 249 STUTTER_ENTER_SELF_REFRESH_WATERMARK, stutter_entry); 250 } 251 252 static void program_stutter_watermark( 253 struct dce_mem_input *dce_mi, 254 uint32_t wm_select, 255 uint32_t stutter_mark) 256 { 257 REG_UPDATE(DPG_WATERMARK_MASK_CONTROL, 258 STUTTER_EXIT_SELF_REFRESH_WATERMARK_MASK, wm_select); 259 260 if (REG(DPG_PIPE_STUTTER_CONTROL2)) 261 REG_UPDATE(DPG_PIPE_STUTTER_CONTROL2, 262 STUTTER_EXIT_SELF_REFRESH_WATERMARK, stutter_mark); 263 else 264 REG_UPDATE(DPG_PIPE_STUTTER_CONTROL, 265 STUTTER_EXIT_SELF_REFRESH_WATERMARK, stutter_mark); 266 } 267 268 static void dce_mi_program_display_marks( 269 struct mem_input *mi, 270 struct dce_watermarks nbp, 271 struct dce_watermarks stutter_exit, 272 struct dce_watermarks stutter_enter, 273 struct dce_watermarks urgent, 274 uint32_t total_dest_line_time_ns) 275 { 276 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi); 277 uint32_t stutter_en = mi->ctx->dc->debug.disable_stutter ? 0 : 1; 278 279 program_urgency_watermark(dce_mi, 2, /* set a */ 280 urgent.a_mark, total_dest_line_time_ns); 281 program_urgency_watermark(dce_mi, 1, /* set d */ 282 urgent.d_mark, total_dest_line_time_ns); 283 284 REG_UPDATE_2(DPG_PIPE_STUTTER_CONTROL, 285 STUTTER_ENABLE, stutter_en, 286 STUTTER_IGNORE_FBC, 1); 287 program_nbp_watermark(dce_mi, 2, nbp.a_mark); /* set a */ 288 program_nbp_watermark(dce_mi, 1, nbp.d_mark); /* set d */ 289 290 program_stutter_watermark(dce_mi, 2, stutter_exit.a_mark); /* set a */ 291 program_stutter_watermark(dce_mi, 1, stutter_exit.d_mark); /* set d */ 292 } 293 294 static void dce112_mi_program_display_marks(struct mem_input *mi, 295 struct dce_watermarks nbp, 296 struct dce_watermarks stutter_exit, 297 struct dce_watermarks stutter_entry, 298 struct dce_watermarks urgent, 299 uint32_t total_dest_line_time_ns) 300 { 301 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi); 302 uint32_t stutter_en = mi->ctx->dc->debug.disable_stutter ? 0 : 1; 303 304 program_urgency_watermark(dce_mi, 0, /* set a */ 305 urgent.a_mark, total_dest_line_time_ns); 306 program_urgency_watermark(dce_mi, 1, /* set b */ 307 urgent.b_mark, total_dest_line_time_ns); 308 program_urgency_watermark(dce_mi, 2, /* set c */ 309 urgent.c_mark, total_dest_line_time_ns); 310 program_urgency_watermark(dce_mi, 3, /* set d */ 311 urgent.d_mark, total_dest_line_time_ns); 312 313 REG_UPDATE_2(DPG_PIPE_STUTTER_CONTROL, 314 STUTTER_ENABLE, stutter_en, 315 STUTTER_IGNORE_FBC, 1); 316 program_nbp_watermark(dce_mi, 0, nbp.a_mark); /* set a */ 317 program_nbp_watermark(dce_mi, 1, nbp.b_mark); /* set b */ 318 program_nbp_watermark(dce_mi, 2, nbp.c_mark); /* set c */ 319 program_nbp_watermark(dce_mi, 3, nbp.d_mark); /* set d */ 320 321 program_stutter_watermark(dce_mi, 0, stutter_exit.a_mark); /* set a */ 322 program_stutter_watermark(dce_mi, 1, stutter_exit.b_mark); /* set b */ 323 program_stutter_watermark(dce_mi, 2, stutter_exit.c_mark); /* set c */ 324 program_stutter_watermark(dce_mi, 3, stutter_exit.d_mark); /* set d */ 325 } 326 327 static void dce120_mi_program_display_marks(struct mem_input *mi, 328 struct dce_watermarks nbp, 329 struct dce_watermarks stutter_exit, 330 struct dce_watermarks stutter_entry, 331 struct dce_watermarks urgent, 332 uint32_t total_dest_line_time_ns) 333 { 334 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi); 335 uint32_t stutter_en = mi->ctx->dc->debug.disable_stutter ? 0 : 1; 336 337 dce120_program_urgency_watermark(dce_mi, 0, /* set a */ 338 urgent.a_mark, total_dest_line_time_ns); 339 dce120_program_urgency_watermark(dce_mi, 1, /* set b */ 340 urgent.b_mark, total_dest_line_time_ns); 341 dce120_program_urgency_watermark(dce_mi, 2, /* set c */ 342 urgent.c_mark, total_dest_line_time_ns); 343 dce120_program_urgency_watermark(dce_mi, 3, /* set d */ 344 urgent.d_mark, total_dest_line_time_ns); 345 346 REG_UPDATE_2(DPG_PIPE_STUTTER_CONTROL, 347 STUTTER_ENABLE, stutter_en, 348 STUTTER_IGNORE_FBC, 1); 349 program_nbp_watermark(dce_mi, 0, nbp.a_mark); /* set a */ 350 program_nbp_watermark(dce_mi, 1, nbp.b_mark); /* set b */ 351 program_nbp_watermark(dce_mi, 2, nbp.c_mark); /* set c */ 352 program_nbp_watermark(dce_mi, 3, nbp.d_mark); /* set d */ 353 354 dce120_program_stutter_watermark(dce_mi, 0, stutter_exit.a_mark, stutter_entry.a_mark); /* set a */ 355 dce120_program_stutter_watermark(dce_mi, 1, stutter_exit.b_mark, stutter_entry.b_mark); /* set b */ 356 dce120_program_stutter_watermark(dce_mi, 2, stutter_exit.c_mark, stutter_entry.c_mark); /* set c */ 357 dce120_program_stutter_watermark(dce_mi, 3, stutter_exit.d_mark, stutter_entry.d_mark); /* set d */ 358 } 359 360 static void program_tiling( 361 struct dce_mem_input *dce_mi, const union dc_tiling_info *info) 362 { 363 if (dce_mi->masks->GRPH_SW_MODE) { /* GFX9 */ 364 REG_UPDATE_6(GRPH_CONTROL, 365 GRPH_SW_MODE, info->gfx9.swizzle, 366 GRPH_NUM_BANKS, log_2(info->gfx9.num_banks), 367 GRPH_NUM_SHADER_ENGINES, log_2(info->gfx9.num_shader_engines), 368 GRPH_NUM_PIPES, log_2(info->gfx9.num_pipes), 369 GRPH_COLOR_EXPANSION_MODE, 1, 370 GRPH_SE_ENABLE, info->gfx9.shaderEnable); 371 /* TODO: DCP0_GRPH_CONTROL__GRPH_SE_ENABLE where to get info 372 GRPH_SE_ENABLE, 1, 373 GRPH_Z, 0); 374 */ 375 } 376 377 if (dce_mi->masks->GRPH_ARRAY_MODE) { /* GFX8 */ 378 REG_UPDATE_9(GRPH_CONTROL, 379 GRPH_NUM_BANKS, info->gfx8.num_banks, 380 GRPH_BANK_WIDTH, info->gfx8.bank_width, 381 GRPH_BANK_HEIGHT, info->gfx8.bank_height, 382 GRPH_MACRO_TILE_ASPECT, info->gfx8.tile_aspect, 383 GRPH_TILE_SPLIT, info->gfx8.tile_split, 384 GRPH_MICRO_TILE_MODE, info->gfx8.tile_mode, 385 GRPH_PIPE_CONFIG, info->gfx8.pipe_config, 386 GRPH_ARRAY_MODE, info->gfx8.array_mode, 387 GRPH_COLOR_EXPANSION_MODE, 1); 388 /* 01 - DCP_GRPH_COLOR_EXPANSION_MODE_ZEXP: zero expansion for YCbCr */ 389 /* 390 GRPH_Z, 0); 391 */ 392 } 393 } 394 395 396 static void program_size_and_rotation( 397 struct dce_mem_input *dce_mi, 398 enum dc_rotation_angle rotation, 399 const struct plane_size *plane_size) 400 { 401 const struct rect *in_rect = &plane_size->surface_size; 402 struct rect hw_rect = plane_size->surface_size; 403 const uint32_t rotation_angles[ROTATION_ANGLE_COUNT] = { 404 [ROTATION_ANGLE_0] = 0, 405 [ROTATION_ANGLE_90] = 1, 406 [ROTATION_ANGLE_180] = 2, 407 [ROTATION_ANGLE_270] = 3, 408 }; 409 410 if (rotation == ROTATION_ANGLE_90 || rotation == ROTATION_ANGLE_270) { 411 hw_rect.x = in_rect->y; 412 hw_rect.y = in_rect->x; 413 414 hw_rect.height = in_rect->width; 415 hw_rect.width = in_rect->height; 416 } 417 418 REG_SET(GRPH_X_START, 0, 419 GRPH_X_START, hw_rect.x); 420 421 REG_SET(GRPH_Y_START, 0, 422 GRPH_Y_START, hw_rect.y); 423 424 REG_SET(GRPH_X_END, 0, 425 GRPH_X_END, hw_rect.width); 426 427 REG_SET(GRPH_Y_END, 0, 428 GRPH_Y_END, hw_rect.height); 429 430 REG_SET(GRPH_PITCH, 0, 431 GRPH_PITCH, plane_size->surface_pitch); 432 433 REG_SET(HW_ROTATION, 0, 434 GRPH_ROTATION_ANGLE, rotation_angles[rotation]); 435 } 436 437 static void program_grph_pixel_format( 438 struct dce_mem_input *dce_mi, 439 enum surface_pixel_format format) 440 { 441 uint32_t red_xbar = 0, blue_xbar = 0; /* no swap */ 442 uint32_t grph_depth = 0, grph_format = 0; 443 uint32_t sign = 0, floating = 0; 444 445 if (format == SURFACE_PIXEL_FORMAT_GRPH_ABGR8888 || 446 /*todo: doesn't look like we handle BGRA here, 447 * should problem swap endian*/ 448 format == SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010 || 449 format == SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS || 450 format == SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F) { 451 /* ABGR formats */ 452 red_xbar = 2; 453 blue_xbar = 2; 454 } 455 456 REG_SET_2(GRPH_SWAP_CNTL, 0, 457 GRPH_RED_CROSSBAR, red_xbar, 458 GRPH_BLUE_CROSSBAR, blue_xbar); 459 460 switch (format) { 461 case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS: 462 grph_depth = 0; 463 grph_format = 0; 464 break; 465 case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555: 466 grph_depth = 1; 467 grph_format = 0; 468 break; 469 case SURFACE_PIXEL_FORMAT_GRPH_RGB565: 470 grph_depth = 1; 471 grph_format = 1; 472 break; 473 case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888: 474 case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888: 475 grph_depth = 2; 476 grph_format = 0; 477 break; 478 case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010: 479 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010: 480 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS: 481 grph_depth = 2; 482 grph_format = 1; 483 break; 484 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F: 485 sign = 1; 486 floating = 1; 487 /* fall through */ 488 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F: /* shouldn't this get float too? */ 489 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616: 490 grph_depth = 3; 491 grph_format = 0; 492 break; 493 default: 494 DC_ERR("unsupported grph pixel format"); 495 break; 496 } 497 498 REG_UPDATE_2(GRPH_CONTROL, 499 GRPH_DEPTH, grph_depth, 500 GRPH_FORMAT, grph_format); 501 502 REG_UPDATE_4(PRESCALE_GRPH_CONTROL, 503 GRPH_PRESCALE_SELECT, floating, 504 GRPH_PRESCALE_R_SIGN, sign, 505 GRPH_PRESCALE_G_SIGN, sign, 506 GRPH_PRESCALE_B_SIGN, sign); 507 } 508 509 static void dce_mi_program_surface_config( 510 struct mem_input *mi, 511 enum surface_pixel_format format, 512 union dc_tiling_info *tiling_info, 513 struct plane_size *plane_size, 514 enum dc_rotation_angle rotation, 515 struct dc_plane_dcc_param *dcc, 516 bool horizontal_mirror) 517 { 518 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi); 519 REG_UPDATE(GRPH_ENABLE, GRPH_ENABLE, 1); 520 521 program_tiling(dce_mi, tiling_info); 522 program_size_and_rotation(dce_mi, rotation, plane_size); 523 524 if (format >= SURFACE_PIXEL_FORMAT_GRPH_BEGIN && 525 format < SURFACE_PIXEL_FORMAT_VIDEO_BEGIN) 526 program_grph_pixel_format(dce_mi, format); 527 } 528 529 static uint32_t get_dmif_switch_time_us( 530 uint32_t h_total, 531 uint32_t v_total, 532 uint32_t pix_clk_khz) 533 { 534 uint32_t frame_time; 535 uint32_t pixels_per_second; 536 uint32_t pixels_per_frame; 537 uint32_t refresh_rate; 538 const uint32_t us_in_sec = 1000000; 539 const uint32_t min_single_frame_time_us = 30000; 540 /*return double of frame time*/ 541 const uint32_t single_frame_time_multiplier = 2; 542 543 if (!h_total || v_total || !pix_clk_khz) 544 return single_frame_time_multiplier * min_single_frame_time_us; 545 546 /*TODO: should we use pixel format normalized pixel clock here?*/ 547 pixels_per_second = pix_clk_khz * 1000; 548 pixels_per_frame = h_total * v_total; 549 550 if (!pixels_per_second || !pixels_per_frame) { 551 /* avoid division by zero */ 552 ASSERT(pixels_per_frame); 553 ASSERT(pixels_per_second); 554 return single_frame_time_multiplier * min_single_frame_time_us; 555 } 556 557 refresh_rate = pixels_per_second / pixels_per_frame; 558 559 if (!refresh_rate) { 560 /* avoid division by zero*/ 561 ASSERT(refresh_rate); 562 return single_frame_time_multiplier * min_single_frame_time_us; 563 } 564 565 frame_time = us_in_sec / refresh_rate; 566 567 if (frame_time < min_single_frame_time_us) 568 frame_time = min_single_frame_time_us; 569 570 frame_time *= single_frame_time_multiplier; 571 572 return frame_time; 573 } 574 575 static void dce_mi_allocate_dmif( 576 struct mem_input *mi, 577 uint32_t h_total, 578 uint32_t v_total, 579 uint32_t pix_clk_khz, 580 uint32_t total_stream_num) 581 { 582 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi); 583 const uint32_t retry_delay = 10; 584 uint32_t retry_count = get_dmif_switch_time_us( 585 h_total, 586 v_total, 587 pix_clk_khz) / retry_delay; 588 589 uint32_t pix_dur; 590 uint32_t buffers_allocated; 591 uint32_t dmif_buffer_control; 592 593 dmif_buffer_control = REG_GET(DMIF_BUFFER_CONTROL, 594 DMIF_BUFFERS_ALLOCATED, &buffers_allocated); 595 596 if (buffers_allocated == 2) 597 return; 598 599 REG_SET(DMIF_BUFFER_CONTROL, dmif_buffer_control, 600 DMIF_BUFFERS_ALLOCATED, 2); 601 602 REG_WAIT(DMIF_BUFFER_CONTROL, 603 DMIF_BUFFERS_ALLOCATION_COMPLETED, 1, 604 retry_delay, retry_count); 605 606 if (pix_clk_khz != 0) { 607 pix_dur = 1000000000ULL / pix_clk_khz; 608 609 REG_UPDATE(DPG_PIPE_ARBITRATION_CONTROL1, 610 PIXEL_DURATION, pix_dur); 611 } 612 613 if (dce_mi->wa.single_head_rdreq_dmif_limit) { 614 uint32_t enable = (total_stream_num > 1) ? 0 : 615 dce_mi->wa.single_head_rdreq_dmif_limit; 616 617 REG_UPDATE(MC_HUB_RDREQ_DMIF_LIMIT, 618 ENABLE, enable); 619 } 620 } 621 622 static void dce_mi_free_dmif( 623 struct mem_input *mi, 624 uint32_t total_stream_num) 625 { 626 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi); 627 uint32_t buffers_allocated; 628 uint32_t dmif_buffer_control; 629 630 dmif_buffer_control = REG_GET(DMIF_BUFFER_CONTROL, 631 DMIF_BUFFERS_ALLOCATED, &buffers_allocated); 632 633 if (buffers_allocated == 0) 634 return; 635 636 REG_SET(DMIF_BUFFER_CONTROL, dmif_buffer_control, 637 DMIF_BUFFERS_ALLOCATED, 0); 638 639 REG_WAIT(DMIF_BUFFER_CONTROL, 640 DMIF_BUFFERS_ALLOCATION_COMPLETED, 1, 641 10, 3500); 642 643 if (dce_mi->wa.single_head_rdreq_dmif_limit) { 644 uint32_t enable = (total_stream_num > 1) ? 0 : 645 dce_mi->wa.single_head_rdreq_dmif_limit; 646 647 REG_UPDATE(MC_HUB_RDREQ_DMIF_LIMIT, 648 ENABLE, enable); 649 } 650 } 651 652 653 static void program_sec_addr( 654 struct dce_mem_input *dce_mi, 655 PHYSICAL_ADDRESS_LOC address) 656 { 657 /*high register MUST be programmed first*/ 658 REG_SET(GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, 0, 659 GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, 660 address.high_part); 661 662 REG_SET_2(GRPH_SECONDARY_SURFACE_ADDRESS, 0, 663 GRPH_SECONDARY_SURFACE_ADDRESS, address.low_part >> 8, 664 GRPH_SECONDARY_DFQ_ENABLE, 0); 665 } 666 667 static void program_pri_addr( 668 struct dce_mem_input *dce_mi, 669 PHYSICAL_ADDRESS_LOC address) 670 { 671 /*high register MUST be programmed first*/ 672 REG_SET(GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, 0, 673 GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, 674 address.high_part); 675 676 REG_SET(GRPH_PRIMARY_SURFACE_ADDRESS, 0, 677 GRPH_PRIMARY_SURFACE_ADDRESS, 678 address.low_part >> 8); 679 } 680 681 682 static bool dce_mi_is_flip_pending(struct mem_input *mem_input) 683 { 684 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mem_input); 685 uint32_t update_pending; 686 687 REG_GET(GRPH_UPDATE, GRPH_SURFACE_UPDATE_PENDING, &update_pending); 688 if (update_pending) 689 return true; 690 691 mem_input->current_address = mem_input->request_address; 692 return false; 693 } 694 695 static bool dce_mi_program_surface_flip_and_addr( 696 struct mem_input *mem_input, 697 const struct dc_plane_address *address, 698 bool flip_immediate) 699 { 700 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mem_input); 701 702 REG_UPDATE(GRPH_UPDATE, GRPH_UPDATE_LOCK, 1); 703 704 REG_UPDATE( 705 GRPH_FLIP_CONTROL, 706 GRPH_SURFACE_UPDATE_H_RETRACE_EN, flip_immediate ? 1 : 0); 707 708 switch (address->type) { 709 case PLN_ADDR_TYPE_GRAPHICS: 710 if (address->grph.addr.quad_part == 0) 711 break; 712 program_pri_addr(dce_mi, address->grph.addr); 713 break; 714 case PLN_ADDR_TYPE_GRPH_STEREO: 715 if (address->grph_stereo.left_addr.quad_part == 0 || 716 address->grph_stereo.right_addr.quad_part == 0) 717 break; 718 program_pri_addr(dce_mi, address->grph_stereo.left_addr); 719 program_sec_addr(dce_mi, address->grph_stereo.right_addr); 720 break; 721 default: 722 /* not supported */ 723 BREAK_TO_DEBUGGER(); 724 break; 725 } 726 727 mem_input->request_address = *address; 728 729 if (flip_immediate) 730 mem_input->current_address = *address; 731 732 REG_UPDATE(GRPH_UPDATE, GRPH_UPDATE_LOCK, 0); 733 734 return true; 735 } 736 737 static const struct mem_input_funcs dce_mi_funcs = { 738 .mem_input_program_display_marks = dce_mi_program_display_marks, 739 .allocate_mem_input = dce_mi_allocate_dmif, 740 .free_mem_input = dce_mi_free_dmif, 741 .mem_input_program_surface_flip_and_addr = 742 dce_mi_program_surface_flip_and_addr, 743 .mem_input_program_pte_vm = dce_mi_program_pte_vm, 744 .mem_input_program_surface_config = 745 dce_mi_program_surface_config, 746 .mem_input_is_flip_pending = dce_mi_is_flip_pending 747 }; 748 749 static const struct mem_input_funcs dce112_mi_funcs = { 750 .mem_input_program_display_marks = dce112_mi_program_display_marks, 751 .allocate_mem_input = dce_mi_allocate_dmif, 752 .free_mem_input = dce_mi_free_dmif, 753 .mem_input_program_surface_flip_and_addr = 754 dce_mi_program_surface_flip_and_addr, 755 .mem_input_program_pte_vm = dce_mi_program_pte_vm, 756 .mem_input_program_surface_config = 757 dce_mi_program_surface_config, 758 .mem_input_is_flip_pending = dce_mi_is_flip_pending 759 }; 760 761 static const struct mem_input_funcs dce120_mi_funcs = { 762 .mem_input_program_display_marks = dce120_mi_program_display_marks, 763 .allocate_mem_input = dce_mi_allocate_dmif, 764 .free_mem_input = dce_mi_free_dmif, 765 .mem_input_program_surface_flip_and_addr = 766 dce_mi_program_surface_flip_and_addr, 767 .mem_input_program_pte_vm = dce_mi_program_pte_vm, 768 .mem_input_program_surface_config = 769 dce_mi_program_surface_config, 770 .mem_input_is_flip_pending = dce_mi_is_flip_pending 771 }; 772 773 void dce_mem_input_construct( 774 struct dce_mem_input *dce_mi, 775 struct dc_context *ctx, 776 int inst, 777 const struct dce_mem_input_registers *regs, 778 const struct dce_mem_input_shift *mi_shift, 779 const struct dce_mem_input_mask *mi_mask) 780 { 781 dce_mi->base.ctx = ctx; 782 783 dce_mi->base.inst = inst; 784 dce_mi->base.funcs = &dce_mi_funcs; 785 786 dce_mi->regs = regs; 787 dce_mi->shifts = mi_shift; 788 dce_mi->masks = mi_mask; 789 } 790 791 void dce112_mem_input_construct( 792 struct dce_mem_input *dce_mi, 793 struct dc_context *ctx, 794 int inst, 795 const struct dce_mem_input_registers *regs, 796 const struct dce_mem_input_shift *mi_shift, 797 const struct dce_mem_input_mask *mi_mask) 798 { 799 dce_mem_input_construct(dce_mi, ctx, inst, regs, mi_shift, mi_mask); 800 dce_mi->base.funcs = &dce112_mi_funcs; 801 } 802 803 void dce120_mem_input_construct( 804 struct dce_mem_input *dce_mi, 805 struct dc_context *ctx, 806 int inst, 807 const struct dce_mem_input_registers *regs, 808 const struct dce_mem_input_shift *mi_shift, 809 const struct dce_mem_input_mask *mi_mask) 810 { 811 dce_mem_input_construct(dce_mi, ctx, inst, regs, mi_shift, mi_mask); 812 dce_mi->base.funcs = &dce120_mi_funcs; 813 } 814