Lines Matching refs:rdev
52 u32 tn_smc_rreg(struct radeon_device *rdev, u32 reg)
57 spin_lock_irqsave(&rdev->smc_idx_lock, flags);
60 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags);
64 void tn_smc_wreg(struct radeon_device *rdev, u32 reg, u32 v)
68 spin_lock_irqsave(&rdev->smc_idx_lock, flags);
71 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags);
200 extern bool evergreen_is_display_hung(struct radeon_device *rdev);
201 extern void evergreen_print_gpu_status_regs(struct radeon_device *rdev);
202 extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save);
203 extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save);
204 extern int evergreen_mc_wait_for_idle(struct radeon_device *rdev);
205 extern void evergreen_mc_program(struct radeon_device *rdev);
206 extern void evergreen_irq_suspend(struct radeon_device *rdev);
207 extern int evergreen_mc_init(struct radeon_device *rdev);
208 extern void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev);
209 extern void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
210 extern void evergreen_program_aspm(struct radeon_device *rdev);
211 extern void sumo_rlc_fini(struct radeon_device *rdev);
212 extern int sumo_rlc_init(struct radeon_device *rdev);
213 extern void evergreen_gpu_pci_config_reset(struct radeon_device *rdev);
463 static void ni_init_golden_registers(struct radeon_device *rdev)
465 switch (rdev->family) {
467 radeon_program_register_sequence(rdev,
470 radeon_program_register_sequence(rdev,
475 if ((rdev->pdev->device == 0x9900) ||
476 (rdev->pdev->device == 0x9901) ||
477 (rdev->pdev->device == 0x9903) ||
478 (rdev->pdev->device == 0x9904) ||
479 (rdev->pdev->device == 0x9905) ||
480 (rdev->pdev->device == 0x9906) ||
481 (rdev->pdev->device == 0x9907) ||
482 (rdev->pdev->device == 0x9908) ||
483 (rdev->pdev->device == 0x9909) ||
484 (rdev->pdev->device == 0x990A) ||
485 (rdev->pdev->device == 0x990B) ||
486 (rdev->pdev->device == 0x990C) ||
487 (rdev->pdev->device == 0x990D) ||
488 (rdev->pdev->device == 0x990E) ||
489 (rdev->pdev->device == 0x990F) ||
490 (rdev->pdev->device == 0x9910) ||
491 (rdev->pdev->device == 0x9913) ||
492 (rdev->pdev->device == 0x9917) ||
493 (rdev->pdev->device == 0x9918)) {
494 radeon_program_register_sequence(rdev,
497 radeon_program_register_sequence(rdev,
501 radeon_program_register_sequence(rdev,
504 radeon_program_register_sequence(rdev,
644 int ni_mc_load_microcode(struct radeon_device *rdev)
651 if (!rdev->mc_fw)
654 switch (rdev->family) {
697 fw_data = (const __be32 *)rdev->mc_fw->data;
707 for (i = 0; i < rdev->usec_timeout; i++) {
720 int ni_init_microcode(struct radeon_device *rdev)
731 switch (rdev->family) {
783 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev);
786 if (rdev->pfp_fw->size != pfp_req_size) {
788 rdev->pfp_fw->size, fw_name);
794 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev);
797 if (rdev->me_fw->size != me_req_size) {
799 rdev->me_fw->size, fw_name);
804 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev);
807 if (rdev->rlc_fw->size != rlc_req_size) {
809 rdev->rlc_fw->size, fw_name);
814 if (!(rdev->flags & RADEON_IS_IGP)) {
816 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev);
819 if (rdev->mc_fw->size != mc_req_size) {
821 rdev->mc_fw->size, fw_name);
826 if ((rdev->family >= CHIP_BARTS) && (rdev->family <= CHIP_CAYMAN)) {
828 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev);
831 release_firmware(rdev->smc_fw);
832 rdev->smc_fw = NULL;
834 } else if (rdev->smc_fw->size != smc_req_size) {
836 rdev->mc_fw->size, fw_name);
846 release_firmware(rdev->pfp_fw);
847 rdev->pfp_fw = NULL;
848 release_firmware(rdev->me_fw);
849 rdev->me_fw = NULL;
850 release_firmware(rdev->rlc_fw);
851 rdev->rlc_fw = NULL;
852 release_firmware(rdev->mc_fw);
853 rdev->mc_fw = NULL;
861 * @rdev: radeon_device pointer
868 int cayman_get_allowed_info_register(struct radeon_device *rdev,
887 int tn_get_temp(struct radeon_device *rdev)
898 static void cayman_gpu_init(struct radeon_device *rdev)
911 switch (rdev->family) {
913 rdev->config.cayman.max_shader_engines = 2;
914 rdev->config.cayman.max_pipes_per_simd = 4;
915 rdev->config.cayman.max_tile_pipes = 8;
916 rdev->config.cayman.max_simds_per_se = 12;
917 rdev->config.cayman.max_backends_per_se = 4;
918 rdev->config.cayman.max_texture_channel_caches = 8;
919 rdev->config.cayman.max_gprs = 256;
920 rdev->config.cayman.max_threads = 256;
921 rdev->config.cayman.max_gs_threads = 32;
922 rdev->config.cayman.max_stack_entries = 512;
923 rdev->config.cayman.sx_num_of_sets = 8;
924 rdev->config.cayman.sx_max_export_size = 256;
925 rdev->config.cayman.sx_max_export_pos_size = 64;
926 rdev->config.cayman.sx_max_export_smx_size = 192;
927 rdev->config.cayman.max_hw_contexts = 8;
928 rdev->config.cayman.sq_num_cf_insts = 2;
930 rdev->config.cayman.sc_prim_fifo_size = 0x100;
931 rdev->config.cayman.sc_hiz_tile_fifo_size = 0x30;
932 rdev->config.cayman.sc_earlyz_tile_fifo_size = 0x130;
937 rdev->config.cayman.max_shader_engines = 1;
938 rdev->config.cayman.max_pipes_per_simd = 4;
939 rdev->config.cayman.max_tile_pipes = 2;
940 if ((rdev->pdev->device == 0x9900) ||
941 (rdev->pdev->device == 0x9901) ||
942 (rdev->pdev->device == 0x9905) ||
943 (rdev->pdev->device == 0x9906) ||
944 (rdev->pdev->device == 0x9907) ||
945 (rdev->pdev->device == 0x9908) ||
946 (rdev->pdev->device == 0x9909) ||
947 (rdev->pdev->device == 0x990B) ||
948 (rdev->pdev->device == 0x990C) ||
949 (rdev->pdev->device == 0x990F) ||
950 (rdev->pdev->device == 0x9910) ||
951 (rdev->pdev->device == 0x9917) ||
952 (rdev->pdev->device == 0x9999) ||
953 (rdev->pdev->device == 0x999C)) {
954 rdev->config.cayman.max_simds_per_se = 6;
955 rdev->config.cayman.max_backends_per_se = 2;
956 rdev->config.cayman.max_hw_contexts = 8;
957 rdev->config.cayman.sx_max_export_size = 256;
958 rdev->config.cayman.sx_max_export_pos_size = 64;
959 rdev->config.cayman.sx_max_export_smx_size = 192;
960 } else if ((rdev->pdev->device == 0x9903) ||
961 (rdev->pdev->device == 0x9904) ||
962 (rdev->pdev->device == 0x990A) ||
963 (rdev->pdev->device == 0x990D) ||
964 (rdev->pdev->device == 0x990E) ||
965 (rdev->pdev->device == 0x9913) ||
966 (rdev->pdev->device == 0x9918) ||
967 (rdev->pdev->device == 0x999D)) {
968 rdev->config.cayman.max_simds_per_se = 4;
969 rdev->config.cayman.max_backends_per_se = 2;
970 rdev->config.cayman.max_hw_contexts = 8;
971 rdev->config.cayman.sx_max_export_size = 256;
972 rdev->config.cayman.sx_max_export_pos_size = 64;
973 rdev->config.cayman.sx_max_export_smx_size = 192;
974 } else if ((rdev->pdev->device == 0x9919) ||
975 (rdev->pdev->device == 0x9990) ||
976 (rdev->pdev->device == 0x9991) ||
977 (rdev->pdev->device == 0x9994) ||
978 (rdev->pdev->device == 0x9995) ||
979 (rdev->pdev->device == 0x9996) ||
980 (rdev->pdev->device == 0x999A) ||
981 (rdev->pdev->device == 0x99A0)) {
982 rdev->config.cayman.max_simds_per_se = 3;
983 rdev->config.cayman.max_backends_per_se = 1;
984 rdev->config.cayman.max_hw_contexts = 4;
985 rdev->config.cayman.sx_max_export_size = 128;
986 rdev->config.cayman.sx_max_export_pos_size = 32;
987 rdev->config.cayman.sx_max_export_smx_size = 96;
989 rdev->config.cayman.max_simds_per_se = 2;
990 rdev->config.cayman.max_backends_per_se = 1;
991 rdev->config.cayman.max_hw_contexts = 4;
992 rdev->config.cayman.sx_max_export_size = 128;
993 rdev->config.cayman.sx_max_export_pos_size = 32;
994 rdev->config.cayman.sx_max_export_smx_size = 96;
996 rdev->config.cayman.max_texture_channel_caches = 2;
997 rdev->config.cayman.max_gprs = 256;
998 rdev->config.cayman.max_threads = 256;
999 rdev->config.cayman.max_gs_threads = 32;
1000 rdev->config.cayman.max_stack_entries = 512;
1001 rdev->config.cayman.sx_num_of_sets = 8;
1002 rdev->config.cayman.sq_num_cf_insts = 2;
1004 rdev->config.cayman.sc_prim_fifo_size = 0x40;
1005 rdev->config.cayman.sc_hiz_tile_fifo_size = 0x30;
1006 rdev->config.cayman.sc_earlyz_tile_fifo_size = 0x130;
1024 evergreen_fix_pci_max_read_req_size(rdev);
1030 rdev->config.cayman.mem_row_size_in_kb = (4 * (1 << (8 + tmp))) / 1024;
1031 if (rdev->config.cayman.mem_row_size_in_kb > 4)
1032 rdev->config.cayman.mem_row_size_in_kb = 4;
1034 rdev->config.cayman.shader_engine_tile_size = 32;
1035 rdev->config.cayman.num_gpus = 1;
1036 rdev->config.cayman.multi_gpu_tile_size = 64;
1039 rdev->config.cayman.num_tile_pipes = (1 << tmp);
1041 rdev->config.cayman.mem_max_burst_length_bytes = (tmp + 1) * 256;
1043 rdev->config.cayman.num_shader_engines = tmp + 1;
1045 rdev->config.cayman.num_gpus = tmp + 1;
1047 rdev->config.cayman.multi_gpu_tile_size = 1 << tmp;
1049 rdev->config.cayman.mem_row_size_in_kb = 1 << tmp;
1059 rdev->config.cayman.tile_config = 0;
1060 switch (rdev->config.cayman.num_tile_pipes) {
1063 rdev->config.cayman.tile_config |= (0 << 0);
1066 rdev->config.cayman.tile_config |= (1 << 0);
1069 rdev->config.cayman.tile_config |= (2 << 0);
1072 rdev->config.cayman.tile_config |= (3 << 0);
1077 if (rdev->flags & RADEON_IS_IGP)
1078 rdev->config.cayman.tile_config |= 1 << 4;
1082 rdev->config.cayman.tile_config |= 0 << 4;
1085 rdev->config.cayman.tile_config |= 1 << 4;
1089 rdev->config.cayman.tile_config |= 2 << 4;
1093 rdev->config.cayman.tile_config |=
1095 rdev->config.cayman.tile_config |=
1099 for (i = (rdev->config.cayman.max_shader_engines - 1); i >= 0; i--) {
1111 for (i = 0; i < (rdev->config.cayman.max_backends_per_se * rdev->config.cayman.max_shader_engines); i++)
1115 for (i = 0; i < (rdev->config.cayman.max_backends_per_se * rdev->config.cayman.max_shader_engines); i++)
1119 for (i = 0; i < rdev->config.cayman.max_shader_engines; i++) {
1125 simd_disable_bitmap |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
1129 rdev->config.cayman.active_simds = hweight32(~tmp);
1136 if (ASIC_IS_DCE6(rdev))
1145 if ((rdev->config.cayman.max_backends_per_se == 1) &&
1146 (rdev->flags & RADEON_IS_IGP)) {
1156 tmp = r6xx_remap_render_backend(rdev, tmp,
1157 rdev->config.cayman.max_backends_per_se *
1158 rdev->config.cayman.max_shader_engines,
1161 rdev->config.cayman.backend_map = tmp;
1165 for (i = 0; i < rdev->config.cayman.max_texture_channel_caches; i++)
1187 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.cayman.sx_num_of_sets);
1203 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.cayman.sx_max_export_size / 4) - 1) |
1204 POSITION_BUFFER_SIZE((rdev->config.cayman.sx_max_export_pos_size / 4) - 1) |
1205 SMX_BUFFER_SIZE((rdev->config.cayman.sx_max_export_smx_size / 4) - 1)));
1207 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.cayman.sc_prim_fifo_size) |
1208 SC_HIZ_TILE_FIFO_SIZE(rdev->config.cayman.sc_hiz_tile_fifo_size) |
1209 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.cayman.sc_earlyz_tile_fifo_size)));
1216 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.cayman.sq_num_cf_insts) |
1259 if (rdev->family == CHIP_ARUBA) {
1272 void cayman_pcie_gart_tlb_flush(struct radeon_device *rdev)
1281 static int cayman_pcie_gart_enable(struct radeon_device *rdev)
1285 if (rdev->gart.robj == NULL) {
1286 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
1289 r = radeon_gart_table_vram_pin(rdev);
1312 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
1313 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
1314 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
1316 (u32)(rdev->dummy_page.addr >> 12));
1333 rdev->vm_manager.max_pfn - 1);
1335 rdev->vm_manager.saved_table_addr[i]);
1340 (u32)(rdev->dummy_page.addr >> 12));
1357 cayman_pcie_gart_tlb_flush(rdev);
1359 (unsigned)(rdev->mc.gtt_size >> 20),
1360 (unsigned long long)rdev->gart.table_addr);
1361 rdev->gart.ready = true;
1365 static void cayman_pcie_gart_disable(struct radeon_device *rdev)
1370 rdev->vm_manager.saved_table_addr[i] = RREG32(
1389 radeon_gart_table_vram_unpin(rdev);
1392 static void cayman_pcie_gart_fini(struct radeon_device *rdev)
1394 cayman_pcie_gart_disable(rdev);
1395 radeon_gart_table_vram_free(rdev);
1396 radeon_gart_fini(rdev);
1399 void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
1409 void cayman_fence_ring_emit(struct radeon_device *rdev,
1412 struct radeon_ring *ring = &rdev->ring[fence->ring];
1413 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
1432 void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
1434 struct radeon_ring *ring = &rdev->ring[ib->ring];
1468 static void cayman_cp_enable(struct radeon_device *rdev, bool enable)
1473 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
1474 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
1477 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
1481 u32 cayman_gfx_get_rptr(struct radeon_device *rdev,
1486 if (rdev->wb.enabled)
1487 rptr = rdev->wb.wb[ring->rptr_offs/4];
1500 u32 cayman_gfx_get_wptr(struct radeon_device *rdev,
1515 void cayman_gfx_set_wptr(struct radeon_device *rdev,
1530 static int cayman_cp_load_microcode(struct radeon_device *rdev)
1535 if (!rdev->me_fw || !rdev->pfp_fw)
1538 cayman_cp_enable(rdev, false);
1540 fw_data = (const __be32 *)rdev->pfp_fw->data;
1546 fw_data = (const __be32 *)rdev->me_fw->data;
1557 static int cayman_cp_start(struct radeon_device *rdev)
1559 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
1562 r = radeon_ring_lock(rdev, ring, 7);
1570 radeon_ring_write(ring, rdev->config.cayman.max_hw_contexts - 1);
1574 radeon_ring_unlock_commit(rdev, ring, false);
1576 cayman_cp_enable(rdev, true);
1578 r = radeon_ring_lock(rdev, ring, cayman_default_size + 19);
1616 radeon_ring_unlock_commit(rdev, ring, false);
1623 static void cayman_cp_fini(struct radeon_device *rdev)
1625 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
1626 cayman_cp_enable(rdev, false);
1627 radeon_ring_fini(rdev, ring);
1628 radeon_scratch_free(rdev, ring->rptr_save_reg);
1631 static int cayman_cp_resume(struct radeon_device *rdev)
1692 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
1700 ring = &rdev->ring[ridx[i]];
1709 addr = rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET;
1716 ring = &rdev->ring[ridx[i]];
1722 ring = &rdev->ring[ridx[i]];
1734 cayman_cp_start(rdev);
1735 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true;
1736 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
1737 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
1739 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]);
1741 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
1742 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
1743 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
1747 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
1748 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
1753 u32 cayman_gpu_check_soft_reset(struct radeon_device *rdev)
1814 if (evergreen_is_display_hung(rdev))
1831 static void cayman_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
1840 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
1842 evergreen_print_gpu_status_regs(rdev);
1843 dev_info(rdev->dev, " VM_CONTEXT0_PROTECTION_FAULT_ADDR 0x%08X\n",
1845 dev_info(rdev->dev, " VM_CONTEXT0_PROTECTION_FAULT_STATUS 0x%08X\n",
1847 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
1849 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
1871 evergreen_mc_stop(rdev, &save);
1872 if (evergreen_mc_wait_for_idle(rdev)) {
1873 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1921 if (!(rdev->flags & RADEON_IS_IGP)) {
1929 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
1943 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
1957 evergreen_mc_resume(rdev, &save);
1960 evergreen_print_gpu_status_regs(rdev);
1963 int cayman_asic_reset(struct radeon_device *rdev, bool hard)
1968 evergreen_gpu_pci_config_reset(rdev);
1972 reset_mask = cayman_gpu_check_soft_reset(rdev);
1975 r600_set_bios_scratch_engine_hung(rdev, true);
1977 cayman_gpu_soft_reset(rdev, reset_mask);
1979 reset_mask = cayman_gpu_check_soft_reset(rdev);
1982 evergreen_gpu_pci_config_reset(rdev);
1984 r600_set_bios_scratch_engine_hung(rdev, false);
1992 * @rdev: radeon_device pointer
1998 bool cayman_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
2000 u32 reset_mask = cayman_gpu_check_soft_reset(rdev);
2005 radeon_ring_lockup_update(rdev, ring);
2008 return radeon_ring_test_lockup(rdev, ring);
2011 static void cayman_uvd_init(struct radeon_device *rdev)
2015 if (!rdev->has_uvd)
2018 r = radeon_uvd_init(rdev);
2020 dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
2022 * At this point rdev->uvd.vcpu_bo is NULL which trickles down
2027 rdev->has_uvd = false;
2030 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
2031 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
2034 static void cayman_uvd_start(struct radeon_device *rdev)
2038 if (!rdev->has_uvd)
2041 r = uvd_v2_2_resume(rdev);
2043 dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
2046 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
2048 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
2054 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
2057 static void cayman_uvd_resume(struct radeon_device *rdev)
2062 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
2065 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
2066 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
2068 dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
2071 r = uvd_v1_0_init(rdev);
2073 dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
2078 static void cayman_vce_init(struct radeon_device *rdev)
2083 if (!rdev->has_vce)
2086 r = radeon_vce_init(rdev);
2088 dev_err(rdev->dev, "failed VCE (%d) init.\n", r);
2090 * At this point rdev->vce.vcpu_bo is NULL which trickles down
2095 rdev->has_vce = false;
2098 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_obj = NULL;
2099 r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE1_INDEX], 4096);
2100 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_obj = NULL;
2101 r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE2_INDEX], 4096);
2104 static void cayman_vce_start(struct radeon_device *rdev)
2108 if (!rdev->has_vce)
2111 r = radeon_vce_resume(rdev);
2113 dev_err(rdev->dev, "failed VCE resume (%d).\n", r);
2116 r = vce_v1_0_resume(rdev);
2118 dev_err(rdev->dev, "failed VCE resume (%d).\n", r);
2121 r = radeon_fence_driver_start_ring(rdev, TN_RING_TYPE_VCE1_INDEX);
2123 dev_err(rdev->dev, "failed initializing VCE1 fences (%d).\n", r);
2126 r = radeon_fence_driver_start_ring(rdev, TN_RING_TYPE_VCE2_INDEX);
2128 dev_err(rdev->dev, "failed initializing VCE2 fences (%d).\n", r);
2134 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0;
2135 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0;
2138 static void cayman_vce_resume(struct radeon_device *rdev)
2143 if (!rdev->has_vce || !rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size)
2146 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX];
2147 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, 0x0);
2149 dev_err(rdev->dev, "failed initializing VCE1 ring (%d).\n", r);
2152 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX];
2153 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, 0x0);
2155 dev_err(rdev->dev, "failed initializing VCE1 ring (%d).\n", r);
2158 r = vce_v1_0_init(rdev);
2160 dev_err(rdev->dev, "failed initializing VCE (%d).\n", r);
2165 static int cayman_startup(struct radeon_device *rdev)
2167 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2171 evergreen_pcie_gen2_enable(rdev);
2173 evergreen_program_aspm(rdev);
2176 r = r600_vram_scratch_init(rdev);
2180 evergreen_mc_program(rdev);
2182 if (!(rdev->flags & RADEON_IS_IGP) && !rdev->pm.dpm_enabled) {
2183 r = ni_mc_load_microcode(rdev);
2190 r = cayman_pcie_gart_enable(rdev);
2193 cayman_gpu_init(rdev);
2196 if (rdev->flags & RADEON_IS_IGP) {
2197 rdev->rlc.reg_list = tn_rlc_save_restore_register_list;
2198 rdev->rlc.reg_list_size =
2200 rdev->rlc.cs_data = cayman_cs_data;
2201 r = sumo_rlc_init(rdev);
2209 r = radeon_wb_init(rdev);
2213 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
2215 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
2219 cayman_uvd_start(rdev);
2220 cayman_vce_start(rdev);
2222 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
2224 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
2228 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
2230 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
2234 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
2236 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
2240 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
2242 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
2247 if (!rdev->irq.installed) {
2248 r = radeon_irq_kms_init(rdev);
2253 r = r600_irq_init(rdev);
2256 radeon_irq_kms_fini(rdev);
2259 evergreen_irq_set(rdev);
2261 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
2266 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
2267 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
2272 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
2273 r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET,
2278 r = cayman_cp_load_microcode(rdev);
2281 r = cayman_cp_resume(rdev);
2285 r = cayman_dma_resume(rdev);
2289 cayman_uvd_resume(rdev);
2290 cayman_vce_resume(rdev);
2292 r = radeon_ib_pool_init(rdev);
2294 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
2298 r = radeon_vm_manager_init(rdev);
2300 dev_err(rdev->dev, "vm manager initialization failed (%d).\n", r);
2304 r = radeon_audio_init(rdev);
2311 int cayman_resume(struct radeon_device *rdev)
2320 atom_asic_init(rdev->mode_info.atom_context);
2323 ni_init_golden_registers(rdev);
2325 if (rdev->pm.pm_method == PM_METHOD_DPM)
2326 radeon_pm_resume(rdev);
2328 rdev->accel_working = true;
2329 r = cayman_startup(rdev);
2332 rdev->accel_working = false;
2338 int cayman_suspend(struct radeon_device *rdev)
2340 radeon_pm_suspend(rdev);
2341 radeon_audio_fini(rdev);
2342 radeon_vm_manager_fini(rdev);
2343 cayman_cp_enable(rdev, false);
2344 cayman_dma_stop(rdev);
2345 if (rdev->has_uvd) {
2346 uvd_v1_0_fini(rdev);
2347 radeon_uvd_suspend(rdev);
2349 evergreen_irq_suspend(rdev);
2350 radeon_wb_disable(rdev);
2351 cayman_pcie_gart_disable(rdev);
2361 int cayman_init(struct radeon_device *rdev)
2363 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2367 if (!radeon_get_bios(rdev)) {
2368 if (ASIC_IS_AVIVO(rdev))
2372 if (!rdev->is_atom_bios) {
2373 dev_err(rdev->dev, "Expecting atombios for cayman GPU\n");
2376 r = radeon_atombios_init(rdev);
2381 if (!radeon_card_posted(rdev)) {
2382 if (!rdev->bios) {
2383 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
2387 atom_asic_init(rdev->mode_info.atom_context);
2390 ni_init_golden_registers(rdev);
2392 r600_scratch_init(rdev);
2394 radeon_surface_init(rdev);
2396 radeon_get_clock_info(rdev->ddev);
2398 r = radeon_fence_driver_init(rdev);
2402 r = evergreen_mc_init(rdev);
2406 r = radeon_bo_init(rdev);
2410 if (rdev->flags & RADEON_IS_IGP) {
2411 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
2412 r = ni_init_microcode(rdev);
2419 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
2420 r = ni_init_microcode(rdev);
2429 radeon_pm_init(rdev);
2432 r600_ring_init(rdev, ring, 1024 * 1024);
2434 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
2436 r600_ring_init(rdev, ring, 64 * 1024);
2438 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
2440 r600_ring_init(rdev, ring, 64 * 1024);
2442 cayman_uvd_init(rdev);
2443 cayman_vce_init(rdev);
2445 rdev->ih.ring_obj = NULL;
2446 r600_ih_ring_init(rdev, 64 * 1024);
2448 r = r600_pcie_gart_init(rdev);
2452 rdev->accel_working = true;
2453 r = cayman_startup(rdev);
2455 dev_err(rdev->dev, "disabling GPU acceleration\n");
2456 cayman_cp_fini(rdev);
2457 cayman_dma_fini(rdev);
2458 r600_irq_fini(rdev);
2459 if (rdev->flags & RADEON_IS_IGP)
2460 sumo_rlc_fini(rdev);
2461 radeon_wb_fini(rdev);
2462 radeon_ib_pool_fini(rdev);
2463 radeon_vm_manager_fini(rdev);
2464 radeon_irq_kms_fini(rdev);
2465 cayman_pcie_gart_fini(rdev);
2466 rdev->accel_working = false;
2476 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
2484 void cayman_fini(struct radeon_device *rdev)
2486 radeon_pm_fini(rdev);
2487 cayman_cp_fini(rdev);
2488 cayman_dma_fini(rdev);
2489 r600_irq_fini(rdev);
2490 if (rdev->flags & RADEON_IS_IGP)
2491 sumo_rlc_fini(rdev);
2492 radeon_wb_fini(rdev);
2493 radeon_vm_manager_fini(rdev);
2494 radeon_ib_pool_fini(rdev);
2495 radeon_irq_kms_fini(rdev);
2496 uvd_v1_0_fini(rdev);
2497 radeon_uvd_fini(rdev);
2498 if (rdev->has_vce)
2499 radeon_vce_fini(rdev);
2500 cayman_pcie_gart_fini(rdev);
2501 r600_vram_scratch_fini(rdev);
2502 radeon_gem_fini(rdev);
2503 radeon_fence_driver_fini(rdev);
2504 radeon_bo_fini(rdev);
2505 radeon_atombios_fini(rdev);
2506 kfree(rdev->bios);
2507 rdev->bios = NULL;
2513 int cayman_vm_init(struct radeon_device *rdev)
2516 rdev->vm_manager.nvm = 8;
2518 if (rdev->flags & RADEON_IS_IGP) {
2521 rdev->vm_manager.vram_base_offset = tmp;
2523 rdev->vm_manager.vram_base_offset = 0;
2527 void cayman_vm_fini(struct radeon_device *rdev)
2534 * @rdev: radeon_device pointer
2540 void cayman_vm_decode_fault(struct radeon_device *rdev,
2695 * @rdev: radeon_device pointer
2700 void cayman_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring,
2729 int tn_set_vce_clocks(struct radeon_device *rdev, u32 evclk, u32 ecclk)
2734 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,