Lines Matching refs:rdev

113 int r600_debugfs_mc_info_init(struct radeon_device *rdev);
116 int r600_mc_wait_for_idle(struct radeon_device *rdev);
117 static void r600_gpu_init(struct radeon_device *rdev);
118 void r600_fini(struct radeon_device *rdev);
119 void r600_irq_disable(struct radeon_device *rdev);
120 static void r600_pcie_gen2_enable(struct radeon_device *rdev);
121 extern int evergreen_rlc_resume(struct radeon_device *rdev);
122 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
127 u32 r600_rcu_rreg(struct radeon_device *rdev, u32 reg)
132 spin_lock_irqsave(&rdev->rcu_idx_lock, flags);
135 spin_unlock_irqrestore(&rdev->rcu_idx_lock, flags);
139 void r600_rcu_wreg(struct radeon_device *rdev, u32 reg, u32 v)
143 spin_lock_irqsave(&rdev->rcu_idx_lock, flags);
146 spin_unlock_irqrestore(&rdev->rcu_idx_lock, flags);
149 u32 r600_uvd_ctx_rreg(struct radeon_device *rdev, u32 reg)
154 spin_lock_irqsave(&rdev->uvd_idx_lock, flags);
157 spin_unlock_irqrestore(&rdev->uvd_idx_lock, flags);
161 void r600_uvd_ctx_wreg(struct radeon_device *rdev, u32 reg, u32 v)
165 spin_lock_irqsave(&rdev->uvd_idx_lock, flags);
168 spin_unlock_irqrestore(&rdev->uvd_idx_lock, flags);
174 * @rdev: radeon_device pointer
181 int r600_get_allowed_info_register(struct radeon_device *rdev,
200 * @rdev: radeon_device pointer
205 u32 r600_get_xclk(struct radeon_device *rdev)
207 return rdev->clock.spll.reference_freq;
210 int r600_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
224 if (rdev->family >= CHIP_RS780)
234 if (rdev->clock.spll.reference_freq == 10000)
239 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 50000, 160000,
245 if (rdev->family >= CHIP_RV670 && rdev->family < CHIP_RS780)
250 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
258 if (rdev->family >= CHIP_RS780)
286 if (rdev->family >= CHIP_RS780)
289 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
306 struct radeon_device *rdev = dev->dev_private;
357 int rv6xx_get_temp(struct radeon_device *rdev)
369 void r600_pm_get_dynpm_state(struct radeon_device *rdev)
373 rdev->pm.dynpm_can_upclock = true;
374 rdev->pm.dynpm_can_downclock = true;
377 if ((rdev->flags & RADEON_IS_IGP) || (rdev->family == CHIP_R600)) {
380 if (rdev->pm.num_power_states > 2)
383 switch (rdev->pm.dynpm_planned_action) {
385 rdev->pm.requested_power_state_index = min_power_state_index;
386 rdev->pm.requested_clock_mode_index = 0;
387 rdev->pm.dynpm_can_downclock = false;
390 if (rdev->pm.current_power_state_index == min_power_state_index) {
391 rdev->pm.requested_power_state_index = rdev->pm.current_power_state_index;
392 rdev->pm.dynpm_can_downclock = false;
394 if (rdev->pm.active_crtc_count > 1) {
395 for (i = 0; i < rdev->pm.num_power_states; i++) {
396 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
398 else if (i >= rdev->pm.current_power_state_index) {
399 rdev->pm.requested_power_state_index =
400 rdev->pm.current_power_state_index;
403 rdev->pm.requested_power_state_index = i;
408 if (rdev->pm.current_power_state_index == 0)
409 rdev->pm.requested_power_state_index =
410 rdev->pm.num_power_states - 1;
412 rdev->pm.requested_power_state_index =
413 rdev->pm.current_power_state_index - 1;
416 rdev->pm.requested_clock_mode_index = 0;
418 if ((rdev->pm.active_crtc_count > 0) &&
419 (rdev->pm.power_state[rdev->pm.requested_power_state_index].
420 clock_info[rdev->pm.requested_clock_mode_index].flags &
422 rdev->pm.requested_power_state_index++;
426 if (rdev->pm.current_power_state_index == (rdev->pm.num_power_states - 1)) {
427 rdev->pm.requested_power_state_index = rdev->pm.current_power_state_index;
428 rdev->pm.dynpm_can_upclock = false;
430 if (rdev->pm.active_crtc_count > 1) {
431 for (i = (rdev->pm.num_power_states - 1); i >= 0; i--) {
432 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
434 else if (i <= rdev->pm.current_power_state_index) {
435 rdev->pm.requested_power_state_index =
436 rdev->pm.current_power_state_index;
439 rdev->pm.requested_power_state_index = i;
444 rdev->pm.requested_power_state_index =
445 rdev->pm.current_power_state_index + 1;
447 rdev->pm.requested_clock_mode_index = 0;
450 rdev->pm.requested_power_state_index = rdev->pm.default_power_state_index;
451 rdev->pm.requested_clock_mode_index = 0;
452 rdev->pm.dynpm_can_upclock = false;
463 if (rdev->pm.active_crtc_count > 1) {
464 rdev->pm.requested_power_state_index = -1;
466 for (i = 1; i < rdev->pm.num_power_states; i++) {
467 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
469 else if ((rdev->pm.power_state[i].type == POWER_STATE_TYPE_PERFORMANCE) ||
470 (rdev->pm.power_state[i].type == POWER_STATE_TYPE_BATTERY)) {
471 rdev->pm.requested_power_state_index = i;
476 if (rdev->pm.requested_power_state_index == -1)
477 rdev->pm.requested_power_state_index = 0;
479 rdev->pm.requested_power_state_index = 1;
481 switch (rdev->pm.dynpm_planned_action) {
483 rdev->pm.requested_clock_mode_index = 0;
484 rdev->pm.dynpm_can_downclock = false;
487 if (rdev->pm.requested_power_state_index == rdev->pm.current_power_state_index) {
488 if (rdev->pm.current_clock_mode_index == 0) {
489 rdev->pm.requested_clock_mode_index = 0;
490 rdev->pm.dynpm_can_downclock = false;
492 rdev->pm.requested_clock_mode_index =
493 rdev->pm.current_clock_mode_index - 1;
495 rdev->pm.requested_clock_mode_index = 0;
496 rdev->pm.dynpm_can_downclock = false;
499 if ((rdev->pm.active_crtc_count > 0) &&
500 (rdev->pm.power_state[rdev->pm.requested_power_state_index].
501 clock_info[rdev->pm.requested_clock_mode_index].flags &
503 rdev->pm.requested_clock_mode_index++;
507 if (rdev->pm.requested_power_state_index == rdev->pm.current_power_state_index) {
508 if (rdev->pm.current_clock_mode_index ==
509 (rdev->pm.power_state[rdev->pm.requested_power_state_index].num_clock_modes - 1)) {
510 rdev->pm.requested_clock_mode_index = rdev->pm.current_clock_mode_index;
511 rdev->pm.dynpm_can_upclock = false;
513 rdev->pm.requested_clock_mode_index =
514 rdev->pm.current_clock_mode_index + 1;
516 rdev->pm.requested_clock_mode_index =
517 rdev->pm.power_state[rdev->pm.requested_power_state_index].num_clock_modes - 1;
518 rdev->pm.dynpm_can_upclock = false;
522 rdev->pm.requested_power_state_index = rdev->pm.default_power_state_index;
523 rdev->pm.requested_clock_mode_index = 0;
524 rdev->pm.dynpm_can_upclock = false;
534 rdev->pm.power_state[rdev->pm.requested_power_state_index].
535 clock_info[rdev->pm.requested_clock_mode_index].sclk,
536 rdev->pm.power_state[rdev->pm.requested_power_state_index].
537 clock_info[rdev->pm.requested_clock_mode_index].mclk,
538 rdev->pm.power_state[rdev->pm.requested_power_state_index].
542 void rs780_pm_init_profile(struct radeon_device *rdev)
544 if (rdev->pm.num_power_states == 2) {
546 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
547 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
548 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
549 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
551 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 0;
552 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 0;
553 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
554 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
556 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 0;
557 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 0;
558 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
559 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
561 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 0;
562 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 1;
563 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
564 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
566 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 0;
567 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 0;
568 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
569 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
571 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 0;
572 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 0;
573 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
574 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
576 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 0;
577 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 1;
578 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
579 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
580 } else if (rdev->pm.num_power_states == 3) {
582 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
583 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
584 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
585 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
587 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 1;
588 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1;
589 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
590 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
592 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 1;
593 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 1;
594 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
595 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
597 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 1;
598 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 2;
599 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
600 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
602 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 1;
603 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 1;
604 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
605 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
607 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 1;
608 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 1;
609 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
610 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
612 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 1;
613 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 2;
614 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
615 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
618 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
619 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
620 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
621 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
623 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 2;
624 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 2;
625 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
626 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
628 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 2;
629 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 2;
630 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
631 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
633 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 2;
634 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 3;
635 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
636 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
638 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 2;
639 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 0;
640 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
641 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
643 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 2;
644 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 0;
645 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
646 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
648 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 2;
649 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 3;
650 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
651 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
655 void r600_pm_init_profile(struct radeon_device *rdev)
659 if (rdev->family == CHIP_R600) {
662 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
663 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
664 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
665 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
667 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
668 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
669 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
670 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
672 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
673 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
674 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
675 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
677 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
678 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
679 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
680 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
682 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
683 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
684 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
685 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
687 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
688 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
689 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
690 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
692 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
693 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
694 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
695 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
697 if (rdev->pm.num_power_states < 4) {
699 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
700 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
701 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
702 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
704 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 1;
705 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1;
706 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
707 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
709 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 1;
710 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 1;
711 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
712 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
714 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 1;
715 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 1;
716 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
717 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
719 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 2;
720 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 2;
721 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
722 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
724 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 2;
725 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 2;
726 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
727 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
729 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 2;
730 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 2;
731 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
732 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
735 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
736 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
737 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
738 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
740 if (rdev->flags & RADEON_IS_MOBILITY)
741 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
743 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
744 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
745 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
746 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
747 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
749 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
750 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
751 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
752 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
754 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
755 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
756 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
757 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
758 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
760 if (rdev->flags & RADEON_IS_MOBILITY)
761 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 1);
763 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1);
764 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
765 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
766 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
767 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
769 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
770 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
771 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
772 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
774 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1);
775 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
776 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
777 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
778 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
783 void r600_pm_misc(struct radeon_device *rdev)
785 int req_ps_idx = rdev->pm.requested_power_state_index;
786 int req_cm_idx = rdev->pm.requested_clock_mode_index;
787 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
794 if (voltage->voltage != rdev->pm.current_vddc) {
795 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
796 rdev->pm.current_vddc = voltage->voltage;
802 bool r600_gui_idle(struct radeon_device *rdev)
811 bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
815 if (ASIC_IS_DCE3(rdev)) {
866 void r600_hpd_set_polarity(struct radeon_device *rdev,
870 bool connected = r600_hpd_sense(rdev, hpd);
872 if (ASIC_IS_DCE3(rdev)) {
958 void r600_hpd_init(struct radeon_device *rdev)
960 struct drm_device *dev = rdev->ddev;
975 if (ASIC_IS_DCE3(rdev)) {
977 if (ASIC_IS_DCE32(rdev))
1020 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1022 radeon_irq_kms_enable_hpd(rdev, enable);
1025 void r600_hpd_fini(struct radeon_device *rdev)
1027 struct drm_device *dev = rdev->ddev;
1033 if (ASIC_IS_DCE3(rdev)) {
1075 radeon_irq_kms_disable_hpd(rdev, disable);
1103 void r600_pcie_gart_tlb_flush(struct radeon_device *rdev)
1109 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_RV740) &&
1110 !(rdev->flags & RADEON_IS_AGP)) {
1111 void __iomem *ptr = rdev->gart.ptr;
1123 WREG32(VM_CONTEXT0_INVALIDATION_LOW_ADDR, rdev->mc.gtt_start >> 12);
1124 WREG32(VM_CONTEXT0_INVALIDATION_HIGH_ADDR, (rdev->mc.gtt_end - 1) >> 12);
1126 for (i = 0; i < rdev->usec_timeout; i++) {
1146 int r600_pcie_gart_init(struct radeon_device *rdev)
1150 if (rdev->gart.robj) {
1155 r = radeon_gart_init(rdev);
1158 rdev->gart.table_size = rdev->gart.num_gpu_pages * 8;
1159 return radeon_gart_table_vram_alloc(rdev);
1162 static int r600_pcie_gart_enable(struct radeon_device *rdev)
1167 if (rdev->gart.robj == NULL) {
1168 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
1171 r = radeon_gart_table_vram_pin(rdev);
1202 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
1203 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
1204 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
1208 (u32)(rdev->dummy_page.addr >> 12));
1212 r600_pcie_gart_tlb_flush(rdev);
1214 (unsigned)(rdev->mc.gtt_size >> 20),
1215 (unsigned long long)rdev->gart.table_addr);
1216 rdev->gart.ready = true;
1220 static void r600_pcie_gart_disable(struct radeon_device *rdev)
1252 radeon_gart_table_vram_unpin(rdev);
1255 static void r600_pcie_gart_fini(struct radeon_device *rdev)
1257 radeon_gart_fini(rdev);
1258 r600_pcie_gart_disable(rdev);
1259 radeon_gart_table_vram_free(rdev);
1262 static void r600_agp_enable(struct radeon_device *rdev)
1296 int r600_mc_wait_for_idle(struct radeon_device *rdev)
1301 for (i = 0; i < rdev->usec_timeout; i++) {
1311 uint32_t rs780_mc_rreg(struct radeon_device *rdev, uint32_t reg)
1316 spin_lock_irqsave(&rdev->mc_idx_lock, flags);
1320 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
1324 void rs780_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
1328 spin_lock_irqsave(&rdev->mc_idx_lock, flags);
1333 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
1336 static void r600_mc_program(struct radeon_device *rdev)
1352 rv515_mc_stop(rdev, &save);
1353 if (r600_mc_wait_for_idle(rdev)) {
1354 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1359 if (rdev->flags & RADEON_IS_AGP) {
1360 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
1363 rdev->mc.vram_start >> 12);
1365 rdev->mc.gtt_end >> 12);
1369 rdev->mc.gtt_start >> 12);
1371 rdev->mc.vram_end >> 12);
1374 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR, rdev->mc.vram_start >> 12);
1375 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR, rdev->mc.vram_end >> 12);
1377 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
1378 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
1379 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
1381 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
1384 if (rdev->flags & RADEON_IS_AGP) {
1385 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 22);
1386 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 22);
1387 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
1393 if (r600_mc_wait_for_idle(rdev)) {
1394 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1396 rv515_mc_resume(rdev, &save);
1399 rv515_vga_render_disable(rdev);
1404 * @rdev: radeon device structure holding all necessary informations
1423 static void r600_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc)
1429 dev_warn(rdev->dev, "limiting VRAM\n");
1433 if (rdev->flags & RADEON_IS_AGP) {
1438 dev_warn(rdev->dev, "limiting VRAM\n");
1445 dev_warn(rdev->dev, "limiting VRAM\n");
1452 dev_info(rdev->dev, "VRAM: %"PRIu64"M 0x%08"PRIX64" - 0x%08"PRIX64" (%"PRIu64"M used)\n",
1457 if (rdev->flags & RADEON_IS_IGP) {
1461 radeon_vram_location(rdev, &rdev->mc, base);
1462 rdev->mc.gtt_base_align = 0;
1463 radeon_gtt_location(rdev, mc);
1467 static int r600_mc_init(struct radeon_device *rdev)
1475 rdev->mc.vram_is_ddr = true;
1500 rdev->mc.vram_width = numchan * chansize;
1502 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
1503 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
1505 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
1506 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
1507 rdev->mc.visible_vram_size = rdev->mc.aper_size;
1508 r600_vram_gtt_location(rdev, &rdev->mc);
1510 if (rdev->flags & RADEON_IS_IGP) {
1511 rs690_pm_info(rdev);
1512 rdev->mc.igp_sideport_enabled = radeon_atombios_sideport_present(rdev);
1514 if (rdev->family == CHIP_RS780 || rdev->family == CHIP_RS880) {
1516 rdev->fastfb_working = false;
1521 if (k8_addr + rdev->mc.visible_vram_size < 0x100000000ULL)
1527 if (rdev->mc.igp_sideport_enabled == false && radeon_fastfb == 1) {
1529 (unsigned long long)rdev->mc.aper_base, k8_addr);
1530 rdev->mc.aper_base = (resource_size_t)k8_addr;
1531 rdev->fastfb_working = true;
1537 radeon_update_bandwidth_info(rdev);
1541 int r600_vram_scratch_init(struct radeon_device *rdev)
1545 if (rdev->vram_scratch.robj == NULL) {
1546 r = radeon_bo_create(rdev, RADEON_GPU_PAGE_SIZE,
1548 0, NULL, NULL, &rdev->vram_scratch.robj);
1554 r = radeon_bo_reserve(rdev->vram_scratch.robj, false);
1557 r = radeon_bo_pin(rdev->vram_scratch.robj,
1558 RADEON_GEM_DOMAIN_VRAM, &rdev->vram_scratch.gpu_addr);
1560 radeon_bo_unreserve(rdev->vram_scratch.robj);
1563 r = radeon_bo_kmap(rdev->vram_scratch.robj,
1564 (void **)__UNVOLATILE(&rdev->vram_scratch.ptr));
1566 radeon_bo_unpin(rdev->vram_scratch.robj);
1567 radeon_bo_unreserve(rdev->vram_scratch.robj);
1572 void r600_vram_scratch_fini(struct radeon_device *rdev)
1576 if (rdev->vram_scratch.robj == NULL) {
1579 r = radeon_bo_reserve(rdev->vram_scratch.robj, false);
1581 radeon_bo_kunmap(rdev->vram_scratch.robj);
1582 radeon_bo_unpin(rdev->vram_scratch.robj);
1583 radeon_bo_unreserve(rdev->vram_scratch.robj);
1585 radeon_bo_unref(&rdev->vram_scratch.robj);
1588 void r600_set_bios_scratch_engine_hung(struct radeon_device *rdev, bool hung)
1600 static void r600_print_gpu_status_regs(struct radeon_device *rdev)
1602 dev_info(rdev->dev, " R_008010_GRBM_STATUS = 0x%08X\n",
1604 dev_info(rdev->dev, " R_008014_GRBM_STATUS2 = 0x%08X\n",
1606 dev_info(rdev->dev, " R_000E50_SRBM_STATUS = 0x%08X\n",
1608 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
1610 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
1612 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
1614 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
1616 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
1620 static bool r600_is_display_hung(struct radeon_device *rdev)
1626 for (i = 0; i < rdev->num_crtc; i++) {
1634 for (i = 0; i < rdev->num_crtc; i++) {
1649 u32 r600_gpu_check_soft_reset(struct radeon_device *rdev)
1656 if (rdev->family >= CHIP_RV770) {
1706 if (r600_is_display_hung(rdev))
1718 static void r600_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
1727 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
1729 r600_print_gpu_status_regs(rdev);
1732 if (rdev->family >= CHIP_RV770)
1749 rv515_mc_stop(rdev, &save);
1750 if (r600_mc_wait_for_idle(rdev)) {
1751 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1755 if (rdev->family >= CHIP_RV770)
1791 if (rdev->family >= CHIP_RV770)
1809 if (!(rdev->flags & RADEON_IS_IGP)) {
1820 dev_info(rdev->dev, "R_008020_GRBM_SOFT_RESET=0x%08X\n", tmp);
1834 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
1848 rv515_mc_resume(rdev, &save);
1851 r600_print_gpu_status_regs(rdev);
1854 static void r600_gpu_pci_config_reset(struct radeon_device *rdev)
1859 dev_info(rdev->dev, "GPU pci config reset\n");
1864 if (rdev->family >= CHIP_RV770)
1880 if (rdev->family >= CHIP_RV770)
1881 rv770_set_clk_bypass_mode(rdev);
1883 pci_clear_master(rdev->pdev);
1885 rv515_mc_stop(rdev, &save);
1886 if (r600_mc_wait_for_idle(rdev)) {
1887 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1898 radeon_pci_config_reset(rdev);
1908 for (i = 0; i < rdev->usec_timeout; i++) {
1915 int r600_asic_reset(struct radeon_device *rdev, bool hard)
1920 r600_gpu_pci_config_reset(rdev);
1924 reset_mask = r600_gpu_check_soft_reset(rdev);
1927 r600_set_bios_scratch_engine_hung(rdev, true);
1930 r600_gpu_soft_reset(rdev, reset_mask);
1932 reset_mask = r600_gpu_check_soft_reset(rdev);
1936 r600_gpu_pci_config_reset(rdev);
1938 reset_mask = r600_gpu_check_soft_reset(rdev);
1941 r600_set_bios_scratch_engine_hung(rdev, false);
1949 * @rdev: radeon_device pointer
1955 bool r600_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
1957 u32 reset_mask = r600_gpu_check_soft_reset(rdev);
1962 radeon_ring_lockup_update(rdev, ring);
1965 return radeon_ring_test_lockup(rdev, ring);
1968 u32 r6xx_remap_render_backend(struct radeon_device *rdev,
1992 if (rdev->family <= CHIP_RV740) {
2023 static void r600_gpu_init(struct radeon_device *rdev)
2038 rdev->config.r600.tiling_group_size = 256;
2039 switch (rdev->family) {
2041 rdev->config.r600.max_pipes = 4;
2042 rdev->config.r600.max_tile_pipes = 8;
2043 rdev->config.r600.max_simds = 4;
2044 rdev->config.r600.max_backends = 4;
2045 rdev->config.r600.max_gprs = 256;
2046 rdev->config.r600.max_threads = 192;
2047 rdev->config.r600.max_stack_entries = 256;
2048 rdev->config.r600.max_hw_contexts = 8;
2049 rdev->config.r600.max_gs_threads = 16;
2050 rdev->config.r600.sx_max_export_size = 128;
2051 rdev->config.r600.sx_max_export_pos_size = 16;
2052 rdev->config.r600.sx_max_export_smx_size = 128;
2053 rdev->config.r600.sq_num_cf_insts = 2;
2057 rdev->config.r600.max_pipes = 2;
2058 rdev->config.r600.max_tile_pipes = 2;
2059 rdev->config.r600.max_simds = 3;
2060 rdev->config.r600.max_backends = 1;
2061 rdev->config.r600.max_gprs = 128;
2062 rdev->config.r600.max_threads = 192;
2063 rdev->config.r600.max_stack_entries = 128;
2064 rdev->config.r600.max_hw_contexts = 8;
2065 rdev->config.r600.max_gs_threads = 4;
2066 rdev->config.r600.sx_max_export_size = 128;
2067 rdev->config.r600.sx_max_export_pos_size = 16;
2068 rdev->config.r600.sx_max_export_smx_size = 128;
2069 rdev->config.r600.sq_num_cf_insts = 2;
2075 rdev->config.r600.max_pipes = 1;
2076 rdev->config.r600.max_tile_pipes = 1;
2077 rdev->config.r600.max_simds = 2;
2078 rdev->config.r600.max_backends = 1;
2079 rdev->config.r600.max_gprs = 128;
2080 rdev->config.r600.max_threads = 192;
2081 rdev->config.r600.max_stack_entries = 128;
2082 rdev->config.r600.max_hw_contexts = 4;
2083 rdev->config.r600.max_gs_threads = 4;
2084 rdev->config.r600.sx_max_export_size = 128;
2085 rdev->config.r600.sx_max_export_pos_size = 16;
2086 rdev->config.r600.sx_max_export_smx_size = 128;
2087 rdev->config.r600.sq_num_cf_insts = 1;
2090 rdev->config.r600.max_pipes = 4;
2091 rdev->config.r600.max_tile_pipes = 4;
2092 rdev->config.r600.max_simds = 4;
2093 rdev->config.r600.max_backends = 4;
2094 rdev->config.r600.max_gprs = 192;
2095 rdev->config.r600.max_threads = 192;
2096 rdev->config.r600.max_stack_entries = 256;
2097 rdev->config.r600.max_hw_contexts = 8;
2098 rdev->config.r600.max_gs_threads = 16;
2099 rdev->config.r600.sx_max_export_size = 128;
2100 rdev->config.r600.sx_max_export_pos_size = 16;
2101 rdev->config.r600.sx_max_export_smx_size = 128;
2102 rdev->config.r600.sq_num_cf_insts = 2;
2122 switch (rdev->config.r600.max_tile_pipes) {
2138 rdev->config.r600.tiling_npipes = rdev->config.r600.max_tile_pipes;
2139 rdev->config.r600.tiling_nbanks = 4 << ((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT);
2154 tmp = rdev->config.r600.max_simds -
2156 rdev->config.r600.active_simds = tmp;
2160 for (i = 0; i < rdev->config.r600.max_backends; i++)
2164 for (i = 0; i < rdev->config.r600.max_backends; i++)
2168 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.r600.max_backends,
2171 rdev->config.r600.backend_map = tmp;
2173 rdev->config.r600.tile_config = tiling_config;
2190 if (rdev->family == CHIP_RV670)
2195 if ((rdev->family > CHIP_R600))
2199 if (((rdev->family) == CHIP_R600) ||
2200 ((rdev->family) == CHIP_RV630) ||
2201 ((rdev->family) == CHIP_RV610) ||
2202 ((rdev->family) == CHIP_RV620) ||
2203 ((rdev->family) == CHIP_RS780) ||
2204 ((rdev->family) == CHIP_RS880)) {
2219 if (((rdev->family) == CHIP_RV610) ||
2220 ((rdev->family) == CHIP_RV620) ||
2221 ((rdev->family) == CHIP_RS780) ||
2222 ((rdev->family) == CHIP_RS880)) {
2227 } else if (((rdev->family) == CHIP_R600) ||
2228 ((rdev->family) == CHIP_RV630)) {
2249 if ((rdev->family) == CHIP_R600) {
2263 } else if (((rdev->family) == CHIP_RV610) ||
2264 ((rdev->family) == CHIP_RV620) ||
2265 ((rdev->family) == CHIP_RS780) ||
2266 ((rdev->family) == CHIP_RS880)) {
2283 } else if (((rdev->family) == CHIP_RV630) ||
2284 ((rdev->family) == CHIP_RV635)) {
2298 } else if ((rdev->family) == CHIP_RV670) {
2321 if (((rdev->family) == CHIP_RV610) ||
2322 ((rdev->family) == CHIP_RV620) ||
2323 ((rdev->family) == CHIP_RS780) ||
2324 ((rdev->family) == CHIP_RS880)) {
2347 tmp = rdev->config.r600.max_pipes * 16;
2348 switch (rdev->family) {
2391 switch (rdev->family) {
2429 u32 r600_pciep_rreg(struct radeon_device *rdev, u32 reg)
2434 spin_lock_irqsave(&rdev->pciep_idx_lock, flags);
2438 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags);
2442 void r600_pciep_wreg(struct radeon_device *rdev, u32 reg, u32 v)
2446 spin_lock_irqsave(&rdev->pciep_idx_lock, flags);
2451 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags);
2457 void r600_cp_stop(struct radeon_device *rdev)
2459 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
2460 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
2463 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
2466 int r600_init_microcode(struct radeon_device *rdev)
2477 switch (rdev->family) {
2571 if (rdev->family >= CHIP_CEDAR) {
2575 } else if (rdev->family >= CHIP_RV770) {
2588 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev);
2591 if (rdev->pfp_fw->size != pfp_req_size) {
2593 rdev->pfp_fw->size, fw_name);
2599 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev);
2602 if (rdev->me_fw->size != me_req_size) {
2604 rdev->me_fw->size, fw_name);
2609 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev);
2612 if (rdev->rlc_fw->size != rlc_req_size) {
2614 rdev->rlc_fw->size, fw_name);
2618 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_HEMLOCK)) {
2620 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev);
2623 release_firmware(rdev->smc_fw);
2624 rdev->smc_fw = NULL;
2626 } else if (rdev->smc_fw->size != smc_req_size) {
2628 rdev->smc_fw->size, fw_name);
2638 release_firmware(rdev->pfp_fw);
2639 rdev->pfp_fw = NULL;
2640 release_firmware(rdev->me_fw);
2641 rdev->me_fw = NULL;
2642 release_firmware(rdev->rlc_fw);
2643 rdev->rlc_fw = NULL;
2644 release_firmware(rdev->smc_fw);
2645 rdev->smc_fw = NULL;
2650 u32 r600_gfx_get_rptr(struct radeon_device *rdev,
2655 if (rdev->wb.enabled)
2656 rptr = rdev->wb.wb[ring->rptr_offs/4];
2663 u32 r600_gfx_get_wptr(struct radeon_device *rdev,
2669 void r600_gfx_set_wptr(struct radeon_device *rdev,
2676 static int r600_cp_load_microcode(struct radeon_device *rdev)
2681 if (!rdev->me_fw || !rdev->pfp_fw)
2684 r600_cp_stop(rdev);
2700 fw_data = (const __be32 *)rdev->me_fw->data;
2706 fw_data = (const __be32 *)rdev->pfp_fw->data;
2718 int r600_cp_start(struct radeon_device *rdev)
2720 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2724 r = radeon_ring_lock(rdev, ring, 7);
2731 if (rdev->family >= CHIP_RV770) {
2733 radeon_ring_write(ring, rdev->config.rv770.max_hw_contexts - 1);
2736 radeon_ring_write(ring, rdev->config.r600.max_hw_contexts - 1);
2741 radeon_ring_unlock_commit(rdev, ring, false);
2748 int r600_cp_resume(struct radeon_device *rdev)
2750 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2781 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2782 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2783 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2785 if (rdev->wb.enabled)
2798 r600_cp_start(rdev);
2800 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2806 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
2807 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
2812 void r600_ring_init(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ring_size)
2823 if (radeon_ring_supports_scratch_reg(rdev, ring)) {
2824 r = radeon_scratch_get(rdev, &ring->rptr_save_reg);
2832 void r600_cp_fini(struct radeon_device *rdev)
2834 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2835 r600_cp_stop(rdev);
2836 radeon_ring_fini(rdev, ring);
2837 radeon_scratch_free(rdev, ring->rptr_save_reg);
2843 void r600_scratch_init(struct radeon_device *rdev)
2847 rdev->scratch.num_reg = 7;
2848 rdev->scratch.reg_base = SCRATCH_REG0;
2849 for (i = 0; i < rdev->scratch.num_reg; i++) {
2850 rdev->scratch.free[i] = true;
2851 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4);
2855 int r600_ring_test(struct radeon_device *rdev, struct radeon_ring *ring)
2862 r = radeon_scratch_get(rdev, &scratch);
2868 r = radeon_ring_lock(rdev, ring, 3);
2871 radeon_scratch_free(rdev, scratch);
2877 radeon_ring_unlock_commit(rdev, ring, false);
2878 for (i = 0; i < rdev->usec_timeout; i++) {
2884 if (i < rdev->usec_timeout) {
2891 radeon_scratch_free(rdev, scratch);
2899 void r600_fence_ring_emit(struct radeon_device *rdev,
2902 struct radeon_ring *ring = &rdev->ring[fence->ring];
2906 if (rdev->family >= CHIP_RV770)
2909 if (rdev->wb.use_event) {
2910 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
2939 radeon_ring_write(ring, ((rdev->fence_drv[fence->ring].scratch_reg - PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
2950 * @rdev: radeon_device pointer
2958 bool r600_semaphore_ring_emit(struct radeon_device *rdev,
2966 if (rdev->family < CHIP_CAYMAN)
2974 if (emit_wait && (rdev->family >= CHIP_CEDAR)) {
2986 * @rdev: radeon_device pointer
2996 struct radeon_fence *r600_copy_cpdma(struct radeon_device *rdev,
3003 int ring_index = rdev->asic->copy.blit_ring_index;
3004 struct radeon_ring *ring = &rdev->ring[ring_index];
3013 r = radeon_ring_lock(rdev, ring, num_loops * 6 + 24);
3016 radeon_sync_free(rdev, &sync, NULL);
3020 radeon_sync_resv(rdev, &sync, resv, false);
3021 radeon_sync_rings(rdev, &sync, ring->idx);
3047 r = radeon_fence_emit(rdev, &fence, ring->idx);
3049 radeon_ring_unlock_undo(rdev, ring);
3050 radeon_sync_free(rdev, &sync, NULL);
3054 radeon_ring_unlock_commit(rdev, ring, false);
3055 radeon_sync_free(rdev, &sync, fence);
3060 int r600_set_surface_reg(struct radeon_device *rdev, int reg,
3068 void r600_clear_surface_reg(struct radeon_device *rdev, int reg)
3073 static void r600_uvd_init(struct radeon_device *rdev)
3077 if (!rdev->has_uvd)
3080 r = radeon_uvd_init(rdev);
3082 dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
3084 * At this point rdev->uvd.vcpu_bo is NULL which trickles down
3089 rdev->has_uvd = false;
3092 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
3093 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
3096 static void r600_uvd_start(struct radeon_device *rdev)
3100 if (!rdev->has_uvd)
3103 r = uvd_v1_0_resume(rdev);
3105 dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
3108 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
3110 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
3116 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
3119 static void r600_uvd_resume(struct radeon_device *rdev)
3124 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
3127 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
3128 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
3130 dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
3133 r = uvd_v1_0_init(rdev);
3135 dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
3140 static int r600_startup(struct radeon_device *rdev)
3146 r600_pcie_gen2_enable(rdev);
3149 r = r600_vram_scratch_init(rdev);
3153 r600_mc_program(rdev);
3155 if (rdev->flags & RADEON_IS_AGP) {
3156 r600_agp_enable(rdev);
3158 r = r600_pcie_gart_enable(rdev);
3162 r600_gpu_init(rdev);
3165 r = radeon_wb_init(rdev);
3169 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
3171 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
3175 r600_uvd_start(rdev);
3178 if (!rdev->irq.installed) {
3179 r = radeon_irq_kms_init(rdev);
3184 r = r600_irq_init(rdev);
3187 radeon_irq_kms_fini(rdev);
3190 r600_irq_set(rdev);
3192 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3193 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
3198 r = r600_cp_load_microcode(rdev);
3201 r = r600_cp_resume(rdev);
3205 r600_uvd_resume(rdev);
3207 r = radeon_ib_pool_init(rdev);
3209 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
3213 r = radeon_audio_init(rdev);
3222 void r600_vga_set_state(struct radeon_device *rdev, bool state)
3236 int r600_resume(struct radeon_device *rdev)
3245 atom_asic_init(rdev->mode_info.atom_context);
3247 if (rdev->pm.pm_method == PM_METHOD_DPM)
3248 radeon_pm_resume(rdev);
3250 rdev->accel_working = true;
3251 r = r600_startup(rdev);
3254 rdev->accel_working = false;
3261 int r600_suspend(struct radeon_device *rdev)
3263 radeon_pm_suspend(rdev);
3264 radeon_audio_fini(rdev);
3265 r600_cp_stop(rdev);
3266 if (rdev->has_uvd) {
3267 uvd_v1_0_fini(rdev);
3268 radeon_uvd_suspend(rdev);
3270 r600_irq_suspend(rdev);
3271 radeon_wb_disable(rdev);
3272 r600_pcie_gart_disable(rdev);
3283 int r600_init(struct radeon_device *rdev)
3287 if (r600_debugfs_mc_info_init(rdev)) {
3291 if (!radeon_get_bios(rdev)) {
3292 if (ASIC_IS_AVIVO(rdev))
3296 if (!rdev->is_atom_bios) {
3297 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
3300 r = radeon_atombios_init(rdev);
3304 if (!radeon_card_posted(rdev)) {
3305 if (!rdev->bios) {
3306 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
3310 atom_asic_init(rdev->mode_info.atom_context);
3313 r600_scratch_init(rdev);
3315 radeon_surface_init(rdev);
3317 radeon_get_clock_info(rdev->ddev);
3319 r = radeon_fence_driver_init(rdev);
3322 if (rdev->flags & RADEON_IS_AGP) {
3323 r = radeon_agp_init(rdev);
3325 radeon_agp_disable(rdev);
3327 r = r600_mc_init(rdev);
3331 r = radeon_bo_init(rdev);
3335 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
3336 r = r600_init_microcode(rdev);
3344 radeon_pm_init(rdev);
3346 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
3347 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
3349 r600_uvd_init(rdev);
3351 rdev->ih.ring_obj = NULL;
3352 r600_ih_ring_init(rdev, 64 * 1024);
3354 r = r600_pcie_gart_init(rdev);
3358 rdev->accel_working = true;
3359 r = r600_startup(rdev);
3361 dev_err(rdev->dev, "disabling GPU acceleration\n");
3362 r600_cp_fini(rdev);
3363 r600_irq_fini(rdev);
3364 radeon_wb_fini(rdev);
3365 radeon_ib_pool_fini(rdev);
3366 radeon_irq_kms_fini(rdev);
3367 r600_pcie_gart_fini(rdev);
3368 rdev->accel_working = false;
3374 void r600_fini(struct radeon_device *rdev)
3376 radeon_pm_fini(rdev);
3377 radeon_audio_fini(rdev);
3378 r600_cp_fini(rdev);
3379 r600_irq_fini(rdev);
3380 if (rdev->has_uvd) {
3381 uvd_v1_0_fini(rdev);
3382 radeon_uvd_fini(rdev);
3384 radeon_wb_fini(rdev);
3385 radeon_ib_pool_fini(rdev);
3386 radeon_irq_kms_fini(rdev);
3387 r600_pcie_gart_fini(rdev);
3388 r600_vram_scratch_fini(rdev);
3389 radeon_agp_fini(rdev);
3390 radeon_gem_fini(rdev);
3391 radeon_fence_driver_fini(rdev);
3392 radeon_bo_fini(rdev);
3393 radeon_atombios_fini(rdev);
3394 kfree(rdev->bios);
3395 rdev->bios = NULL;
3402 void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3404 struct radeon_ring *ring = &rdev->ring[ib->ring];
3413 } else if (rdev->wb.enabled) {
3432 int r600_ib_test(struct radeon_device *rdev, struct radeon_ring *ring)
3440 r = radeon_scratch_get(rdev, &scratch);
3446 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256);
3455 r = radeon_ib_schedule(rdev, &ib, NULL, false);
3471 for (i = 0; i < rdev->usec_timeout; i++) {
3477 if (i < rdev->usec_timeout) {
3485 radeon_ib_free(rdev, &ib);
3487 radeon_scratch_free(rdev, scratch);
3502 void r600_ih_ring_init(struct radeon_device *rdev, unsigned ring_size)
3509 rdev->ih.ring_size = ring_size;
3510 rdev->ih.ptr_mask = rdev->ih.ring_size - 1;
3511 rdev->ih.rptr = 0;
3514 int r600_ih_ring_alloc(struct radeon_device *rdev)
3519 if (rdev->ih.ring_obj == NULL) {
3520 r = radeon_bo_create(rdev, rdev->ih.ring_size,
3523 NULL, NULL, &rdev->ih.ring_obj);
3528 r = radeon_bo_reserve(rdev->ih.ring_obj, false);
3531 r = radeon_bo_pin(rdev->ih.ring_obj,
3533 &rdev->ih.gpu_addr);
3535 radeon_bo_unreserve(rdev->ih.ring_obj);
3539 r = radeon_bo_kmap(rdev->ih.ring_obj,
3540 (void **)__UNVOLATILE(&rdev->ih.ring));
3541 radeon_bo_unreserve(rdev->ih.ring_obj);
3550 void r600_ih_ring_fini(struct radeon_device *rdev)
3553 if (rdev->ih.ring_obj) {
3554 r = radeon_bo_reserve(rdev->ih.ring_obj, false);
3556 radeon_bo_kunmap(rdev->ih.ring_obj);
3557 radeon_bo_unpin(rdev->ih.ring_obj);
3558 radeon_bo_unreserve(rdev->ih.ring_obj);
3560 radeon_bo_unref(&rdev->ih.ring_obj);
3561 rdev->ih.ring = NULL;
3562 rdev->ih.ring_obj = NULL;
3566 void r600_rlc_stop(struct radeon_device *rdev)
3569 if ((rdev->family >= CHIP_RV770) &&
3570 (rdev->family <= CHIP_RV740)) {
3582 static void r600_rlc_start(struct radeon_device *rdev)
3587 static int r600_rlc_resume(struct radeon_device *rdev)
3592 if (!rdev->rlc_fw)
3595 r600_rlc_stop(rdev);
3607 fw_data = (const __be32 *)rdev->rlc_fw->data;
3608 if (rdev->family >= CHIP_RV770) {
3621 r600_rlc_start(rdev);
3626 static void r600_enable_interrupts(struct radeon_device *rdev)
3635 rdev->ih.enabled = true;
3638 void r600_disable_interrupts(struct radeon_device *rdev)
3650 rdev->ih.enabled = false;
3651 rdev->ih.rptr = 0;
3654 static void r600_disable_interrupt_state(struct radeon_device *rdev)
3665 if (ASIC_IS_DCE3(rdev)) {
3676 if (ASIC_IS_DCE32(rdev)) {
3707 int r600_irq_init(struct radeon_device *rdev)
3714 ret = r600_ih_ring_alloc(rdev);
3719 r600_disable_interrupts(rdev);
3722 if (rdev->family >= CHIP_CEDAR)
3723 ret = evergreen_rlc_resume(rdev);
3725 ret = r600_rlc_resume(rdev);
3727 r600_ih_ring_fini(rdev);
3733 WREG32(INTERRUPT_CNTL2, rdev->dummy_page.addr >> 8);
3743 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8);
3744 rb_bufsz = order_base_2(rdev->ih.ring_size / 4);
3750 if (rdev->wb.enabled)
3754 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC);
3755 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF);
3766 if (rdev->msi_enabled)
3771 if (rdev->family >= CHIP_CEDAR)
3772 evergreen_disable_interrupt_state(rdev);
3774 r600_disable_interrupt_state(rdev);
3777 pci_set_master(rdev->pdev);
3780 r600_enable_interrupts(rdev);
3785 void r600_irq_suspend(struct radeon_device *rdev)
3787 r600_irq_disable(rdev);
3788 r600_rlc_stop(rdev);
3791 void r600_irq_fini(struct radeon_device *rdev)
3793 r600_irq_suspend(rdev);
3794 r600_ih_ring_fini(rdev);
3797 int r600_irq_set(struct radeon_device *rdev)
3807 if (!rdev->irq.installed) {
3812 if (!rdev->ih.enabled) {
3813 r600_disable_interrupts(rdev);
3815 r600_disable_interrupt_state(rdev);
3819 if (ASIC_IS_DCE3(rdev)) {
3824 if (ASIC_IS_DCE32(rdev)) {
3843 if ((rdev->family > CHIP_R600) && (rdev->family < CHIP_RV770)) {
3846 } else if (rdev->family >= CHIP_RV770) {
3850 if (rdev->irq.dpm_thermal) {
3855 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
3861 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
3866 if (rdev->irq.crtc_vblank_int[0] ||
3867 atomic_read(&rdev->irq.pflip[0])) {
3871 if (rdev->irq.crtc_vblank_int[1] ||
3872 atomic_read(&rdev->irq.pflip[1])) {
3876 if (rdev->irq.hpd[0]) {
3880 if (rdev->irq.hpd[1]) {
3884 if (rdev->irq.hpd[2]) {
3888 if (rdev->irq.hpd[3]) {
3892 if (rdev->irq.hpd[4]) {
3896 if (rdev->irq.hpd[5]) {
3900 if (rdev->irq.afmt[0]) {
3904 if (rdev->irq.afmt[1]) {
3915 if (ASIC_IS_DCE3(rdev)) {
3920 if (ASIC_IS_DCE32(rdev)) {
3936 if ((rdev->family > CHIP_R600) && (rdev->family < CHIP_RV770)) {
3938 } else if (rdev->family >= CHIP_RV770) {
3948 static void r600_irq_ack(struct radeon_device *rdev)
3952 if (ASIC_IS_DCE3(rdev)) {
3953 rdev->irq.stat_regs.r600.disp_int = RREG32(DCE3_DISP_INTERRUPT_STATUS);
3954 rdev->irq.stat_regs.r600.disp_int_cont = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE);
3955 rdev->irq.stat_regs.r600.disp_int_cont2 = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE2);
3956 if (ASIC_IS_DCE32(rdev)) {
3957 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(AFMT_STATUS + DCE3_HDMI_OFFSET0);
3958 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(AFMT_STATUS + DCE3_HDMI_OFFSET1);
3960 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(HDMI0_STATUS);
3961 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(DCE3_HDMI1_STATUS);
3964 rdev->irq.stat_regs.r600.disp_int = RREG32(DISP_INTERRUPT_STATUS);
3965 rdev->irq.stat_regs.r600.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
3966 rdev->irq.stat_regs.r600.disp_int_cont2 = 0;
3967 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(HDMI0_STATUS);
3968 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(HDMI1_STATUS);
3970 rdev->irq.stat_regs.r600.d1grph_int = RREG32(D1GRPH_INTERRUPT_STATUS);
3971 rdev->irq.stat_regs.r600.d2grph_int = RREG32(D2GRPH_INTERRUPT_STATUS);
3973 if (rdev->irq.stat_regs.r600.d1grph_int & DxGRPH_PFLIP_INT_OCCURRED)
3975 if (rdev->irq.stat_regs.r600.d2grph_int & DxGRPH_PFLIP_INT_OCCURRED)
3977 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VBLANK_INTERRUPT)
3979 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VLINE_INTERRUPT)
3981 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VBLANK_INTERRUPT)
3983 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VLINE_INTERRUPT)
3985 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD1_INTERRUPT) {
3986 if (ASIC_IS_DCE3(rdev)) {
3996 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD2_INTERRUPT) {
3997 if (ASIC_IS_DCE3(rdev)) {
4007 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD3_INTERRUPT) {
4008 if (ASIC_IS_DCE3(rdev)) {
4018 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD4_INTERRUPT) {
4023 if (ASIC_IS_DCE32(rdev)) {
4024 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD5_INTERRUPT) {
4029 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD6_INTERRUPT) {
4034 if (rdev->irq.stat_regs.r600.hdmi0_status & AFMT_AZ_FORMAT_WTRIG) {
4039 if (rdev->irq.stat_regs.r600.hdmi1_status & AFMT_AZ_FORMAT_WTRIG) {
4045 if (rdev->irq.stat_regs.r600.hdmi0_status & HDMI0_AZ_FORMAT_WTRIG) {
4050 if (rdev->irq.stat_regs.r600.hdmi1_status & HDMI0_AZ_FORMAT_WTRIG) {
4051 if (ASIC_IS_DCE3(rdev)) {
4064 void r600_irq_disable(struct radeon_device *rdev)
4066 r600_disable_interrupts(rdev);
4069 r600_irq_ack(rdev);
4070 r600_disable_interrupt_state(rdev);
4073 static u32 r600_get_ih_wptr(struct radeon_device *rdev)
4077 if (rdev->wb.enabled)
4078 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4088 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
4089 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
4090 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4095 return (wptr & rdev->ih.ptr_mask);
4128 int r600_irq_process(struct radeon_device *rdev)
4138 if (!rdev->ih.enabled || rdev->shutdown)
4142 if (!rdev->msi_enabled)
4145 wptr = r600_get_ih_wptr(rdev);
4149 if (atomic_xchg(&rdev->ih.lock, 1))
4152 rptr = rdev->ih.rptr;
4159 r600_irq_ack(rdev);
4164 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4165 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4171 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D1_VBLANK_INTERRUPT))
4174 if (rdev->irq.crtc_vblank_int[0]) {
4175 drm_handle_vblank(rdev->ddev, 0);
4177 spin_lock(&rdev->irq.vblank_lock);
4178 rdev->pm.vblank_sync = true;
4179 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
4180 spin_unlock(&rdev->irq.vblank_lock);
4182 rdev->pm.vblank_sync = true;
4183 wake_up(&rdev->irq.vblank_queue);
4186 if (atomic_read(&rdev->irq.pflip[0]))
4187 radeon_crtc_handle_vblank(rdev, 0);
4188 rdev->irq.stat_regs.r600.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4193 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D1_VLINE_INTERRUPT))
4196 rdev->irq.stat_regs.r600.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4208 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D2_VBLANK_INTERRUPT))
4211 if (rdev->irq.crtc_vblank_int[1]) {
4212 drm_handle_vblank(rdev->ddev, 1);
4214 spin_lock(&rdev->irq.vblank_lock);
4215 rdev->pm.vblank_sync = true;
4216 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
4217 spin_unlock(&rdev->irq.vblank_lock);
4219 rdev->pm.vblank_sync = true;
4220 wake_up(&rdev->irq.vblank_queue);
4223 if (atomic_read(&rdev->irq.pflip[1]))
4224 radeon_crtc_handle_vblank(rdev, 1);
4225 rdev->irq.stat_regs.r600.disp_int &= ~LB_D2_VBLANK_INTERRUPT;
4230 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D2_VLINE_INTERRUPT))
4233 rdev->irq.stat_regs.r600.disp_int &= ~LB_D2_VLINE_INTERRUPT;
4245 radeon_crtc_handle_flip(rdev, 0);
4250 radeon_crtc_handle_flip(rdev, 1);
4255 if (!(rdev->irq.stat_regs.r600.disp_int & DC_HPD1_INTERRUPT))
4258 rdev->irq.stat_regs.r600.disp_int &= ~DC_HPD1_INTERRUPT;
4263 if (!(rdev->irq.stat_regs.r600.disp_int & DC_HPD2_INTERRUPT))
4266 rdev->irq.stat_regs.r600.disp_int &= ~DC_HPD2_INTERRUPT;
4271 if (!(rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD3_INTERRUPT))
4274 rdev->irq.stat_regs.r600.disp_int_cont &= ~DC_HPD3_INTERRUPT;
4279 if (!(rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD4_INTERRUPT))
4282 rdev->irq.stat_regs.r600.disp_int_cont &= ~DC_HPD4_INTERRUPT;
4287 if (!(rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD5_INTERRUPT))
4290 rdev->irq.stat_regs.r600.disp_int_cont2 &= ~DC_HPD5_INTERRUPT;
4295 if (!(rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD6_INTERRUPT))
4298 rdev->irq.stat_regs.r600.disp_int_cont2 &= ~DC_HPD6_INTERRUPT;
4311 if (!(rdev->irq.stat_regs.r600.hdmi0_status & HDMI0_AZ_FORMAT_WTRIG))
4314 rdev->irq.stat_regs.r600.hdmi0_status &= ~HDMI0_AZ_FORMAT_WTRIG;
4320 if (!(rdev->irq.stat_regs.r600.hdmi1_status & HDMI0_AZ_FORMAT_WTRIG))
4323 rdev->irq.stat_regs.r600.hdmi1_status &= ~HDMI0_AZ_FORMAT_WTRIG;
4335 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
4341 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4345 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4349 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4353 rdev->pm.dpm.thermal.high_to_low = false;
4358 rdev->pm.dpm.thermal.high_to_low = true;
4371 rptr &= rdev->ih.ptr_mask;
4375 schedule_delayed_work(&rdev->hotplug_work, 0);
4377 schedule_work(&rdev->audio_work);
4378 if (queue_thermal && rdev->pm.dpm_enabled)
4379 schedule_work(&rdev->pm.dpm.thermal.work);
4380 rdev->ih.rptr = rptr;
4381 atomic_set(&rdev->ih.lock, 0);
4384 wptr = r600_get_ih_wptr(rdev);
4400 struct radeon_device *rdev = dev->dev_private;
4402 DREG32_SYS(m, rdev, R_000E50_SRBM_STATUS);
4403 DREG32_SYS(m, rdev, VM_L2_STATUS);
4412 int r600_debugfs_mc_info_init(struct radeon_device *rdev)
4415 return radeon_debugfs_add_files(rdev, r600_mc_info_list, ARRAY_SIZE(r600_mc_info_list));
4428 * rdev: radeon device structure
4435 void r600_mmio_hdp_flush(struct radeon_device *rdev)
4442 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_RV740) &&
4443 rdev->vram_scratch.ptr && !(rdev->flags & RADEON_IS_AGP)) {
4444 void __iomem *ptr = rdev->vram_scratch.ptr;
4457 void r600_set_pcie_lanes(struct radeon_device *rdev, int lanes)
4461 if (rdev->flags & RADEON_IS_IGP)
4464 if (!(rdev->flags & RADEON_IS_PCIE))
4468 if (ASIC_IS_X2(rdev))
4471 radeon_gui_idle(rdev);
4510 int r600_get_pcie_lanes(struct radeon_device *rdev)
4514 if (rdev->flags & RADEON_IS_IGP)
4517 if (!(rdev->flags & RADEON_IS_PCIE))
4521 if (ASIC_IS_X2(rdev))
4524 radeon_gui_idle(rdev);
4547 static void r600_pcie_gen2_enable(struct radeon_device *rdev)
4555 if (rdev->flags & RADEON_IS_IGP)
4558 if (!(rdev->flags & RADEON_IS_PCIE))
4562 if (ASIC_IS_X2(rdev))
4566 if (rdev->family <= CHIP_R600)
4569 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
4570 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
4582 if ((rdev->family == CHIP_RV670) ||
4583 (rdev->family == CHIP_RV620) ||
4584 (rdev->family == CHIP_RV635)) {
4607 if ((rdev->family == CHIP_RV670) ||
4608 (rdev->family == CHIP_RV620) ||
4609 (rdev->family == CHIP_RV635)) {
4634 if ((rdev->family == CHIP_RV670) ||
4635 (rdev->family == CHIP_RV620) ||
4636 (rdev->family == CHIP_RV635)) {
4664 * @rdev: radeon_device pointer
4669 uint64_t r600_get_gpu_clock_counter(struct radeon_device *rdev)
4673 mutex_lock(&rdev->gpu_clock_mutex);
4677 mutex_unlock(&rdev->gpu_clock_mutex);