2 * Copyright 2010 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
22 * Authors: Alex Deucher
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include <linux/slab.h>
29 #include "radeon_asic.h"
30 #include "radeon_drm.h"
31 #include "evergreend.h"
34 #include "evergreen_reg.h"
36 #define EVERGREEN_PFP_UCODE_SIZE 1120
37 #define EVERGREEN_PM4_UCODE_SIZE 1376
39 static void evergreen_gpu_init(struct radeon_device *rdev);
40 void evergreen_fini(struct radeon_device *rdev);
42 /* get temperature in millidegrees */
43 u32 evergreen_get_temp(struct radeon_device *rdev)
45 u32 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
51 else if ((temp >> 9) & 1)
54 actual_temp = (temp >> 1) & 0xff;
56 return actual_temp * 1000;
59 void evergreen_pm_misc(struct radeon_device *rdev)
61 int req_ps_idx = rdev->pm.requested_power_state_index;
62 int req_cm_idx = rdev->pm.requested_clock_mode_index;
63 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
64 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
66 if ((voltage->type == VOLTAGE_SW) && voltage->voltage) {
67 if (voltage->voltage != rdev->pm.current_vddc) {
68 radeon_atom_set_voltage(rdev, voltage->voltage);
69 rdev->pm.current_vddc = voltage->voltage;
70 DRM_DEBUG("Setting: v: %d\n", voltage->voltage);
75 void evergreen_pm_prepare(struct radeon_device *rdev)
77 struct drm_device *ddev = rdev->ddev;
78 struct drm_crtc *crtc;
79 struct radeon_crtc *radeon_crtc;
82 /* disable any active CRTCs */
83 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
84 radeon_crtc = to_radeon_crtc(crtc);
85 if (radeon_crtc->enabled) {
86 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
87 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
88 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
93 void evergreen_pm_finish(struct radeon_device *rdev)
95 struct drm_device *ddev = rdev->ddev;
96 struct drm_crtc *crtc;
97 struct radeon_crtc *radeon_crtc;
100 /* enable any active CRTCs */
101 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
102 radeon_crtc = to_radeon_crtc(crtc);
103 if (radeon_crtc->enabled) {
104 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
105 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
106 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
111 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
113 bool connected = false;
117 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
121 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
125 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
129 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
133 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
137 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
147 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
148 enum radeon_hpd_id hpd)
151 bool connected = evergreen_hpd_sense(rdev, hpd);
155 tmp = RREG32(DC_HPD1_INT_CONTROL);
157 tmp &= ~DC_HPDx_INT_POLARITY;
159 tmp |= DC_HPDx_INT_POLARITY;
160 WREG32(DC_HPD1_INT_CONTROL, tmp);
163 tmp = RREG32(DC_HPD2_INT_CONTROL);
165 tmp &= ~DC_HPDx_INT_POLARITY;
167 tmp |= DC_HPDx_INT_POLARITY;
168 WREG32(DC_HPD2_INT_CONTROL, tmp);
171 tmp = RREG32(DC_HPD3_INT_CONTROL);
173 tmp &= ~DC_HPDx_INT_POLARITY;
175 tmp |= DC_HPDx_INT_POLARITY;
176 WREG32(DC_HPD3_INT_CONTROL, tmp);
179 tmp = RREG32(DC_HPD4_INT_CONTROL);
181 tmp &= ~DC_HPDx_INT_POLARITY;
183 tmp |= DC_HPDx_INT_POLARITY;
184 WREG32(DC_HPD4_INT_CONTROL, tmp);
187 tmp = RREG32(DC_HPD5_INT_CONTROL);
189 tmp &= ~DC_HPDx_INT_POLARITY;
191 tmp |= DC_HPDx_INT_POLARITY;
192 WREG32(DC_HPD5_INT_CONTROL, tmp);
195 tmp = RREG32(DC_HPD6_INT_CONTROL);
197 tmp &= ~DC_HPDx_INT_POLARITY;
199 tmp |= DC_HPDx_INT_POLARITY;
200 WREG32(DC_HPD6_INT_CONTROL, tmp);
207 void evergreen_hpd_init(struct radeon_device *rdev)
209 struct drm_device *dev = rdev->ddev;
210 struct drm_connector *connector;
211 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
212 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
214 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
215 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
216 switch (radeon_connector->hpd.hpd) {
218 WREG32(DC_HPD1_CONTROL, tmp);
219 rdev->irq.hpd[0] = true;
222 WREG32(DC_HPD2_CONTROL, tmp);
223 rdev->irq.hpd[1] = true;
226 WREG32(DC_HPD3_CONTROL, tmp);
227 rdev->irq.hpd[2] = true;
230 WREG32(DC_HPD4_CONTROL, tmp);
231 rdev->irq.hpd[3] = true;
234 WREG32(DC_HPD5_CONTROL, tmp);
235 rdev->irq.hpd[4] = true;
238 WREG32(DC_HPD6_CONTROL, tmp);
239 rdev->irq.hpd[5] = true;
245 if (rdev->irq.installed)
246 evergreen_irq_set(rdev);
249 void evergreen_hpd_fini(struct radeon_device *rdev)
251 struct drm_device *dev = rdev->ddev;
252 struct drm_connector *connector;
254 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
255 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
256 switch (radeon_connector->hpd.hpd) {
258 WREG32(DC_HPD1_CONTROL, 0);
259 rdev->irq.hpd[0] = false;
262 WREG32(DC_HPD2_CONTROL, 0);
263 rdev->irq.hpd[1] = false;
266 WREG32(DC_HPD3_CONTROL, 0);
267 rdev->irq.hpd[2] = false;
270 WREG32(DC_HPD4_CONTROL, 0);
271 rdev->irq.hpd[3] = false;
274 WREG32(DC_HPD5_CONTROL, 0);
275 rdev->irq.hpd[4] = false;
278 WREG32(DC_HPD6_CONTROL, 0);
279 rdev->irq.hpd[5] = false;
287 void evergreen_bandwidth_update(struct radeon_device *rdev)
292 static int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
297 for (i = 0; i < rdev->usec_timeout; i++) {
299 tmp = RREG32(SRBM_STATUS) & 0x1F00;
310 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
315 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
316 for (i = 0; i < rdev->usec_timeout; i++) {
318 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
319 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
321 printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
331 int evergreen_pcie_gart_enable(struct radeon_device *rdev)
336 if (rdev->gart.table.vram.robj == NULL) {
337 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
340 r = radeon_gart_table_vram_pin(rdev);
343 radeon_gart_restore(rdev);
345 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
346 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
347 EFFECTIVE_L2_QUEUE_SIZE(7));
348 WREG32(VM_L2_CNTL2, 0);
349 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
350 /* Setup TLB control */
351 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
352 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
353 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
354 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
355 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
356 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
357 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
358 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
359 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
360 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
361 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
362 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
363 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
364 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
365 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
366 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
367 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
368 (u32)(rdev->dummy_page.addr >> 12));
369 WREG32(VM_CONTEXT1_CNTL, 0);
371 evergreen_pcie_gart_tlb_flush(rdev);
372 rdev->gart.ready = true;
376 void evergreen_pcie_gart_disable(struct radeon_device *rdev)
381 /* Disable all tables */
382 WREG32(VM_CONTEXT0_CNTL, 0);
383 WREG32(VM_CONTEXT1_CNTL, 0);
386 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
387 EFFECTIVE_L2_QUEUE_SIZE(7));
388 WREG32(VM_L2_CNTL2, 0);
389 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
390 /* Setup TLB control */
391 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
392 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
393 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
394 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
395 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
396 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
397 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
398 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
399 if (rdev->gart.table.vram.robj) {
400 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
401 if (likely(r == 0)) {
402 radeon_bo_kunmap(rdev->gart.table.vram.robj);
403 radeon_bo_unpin(rdev->gart.table.vram.robj);
404 radeon_bo_unreserve(rdev->gart.table.vram.robj);
409 void evergreen_pcie_gart_fini(struct radeon_device *rdev)
411 evergreen_pcie_gart_disable(rdev);
412 radeon_gart_table_vram_free(rdev);
413 radeon_gart_fini(rdev);
417 void evergreen_agp_enable(struct radeon_device *rdev)
422 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
423 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
424 EFFECTIVE_L2_QUEUE_SIZE(7));
425 WREG32(VM_L2_CNTL2, 0);
426 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
427 /* Setup TLB control */
428 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
429 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
430 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
431 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
432 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
433 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
434 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
435 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
436 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
437 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
438 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
439 WREG32(VM_CONTEXT0_CNTL, 0);
440 WREG32(VM_CONTEXT1_CNTL, 0);
443 static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
445 save->vga_control[0] = RREG32(D1VGA_CONTROL);
446 save->vga_control[1] = RREG32(D2VGA_CONTROL);
447 save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
448 save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
449 save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
450 save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
451 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
452 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
453 save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
454 save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
455 save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
456 save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
457 save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
458 save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
461 WREG32(VGA_RENDER_CONTROL, 0);
462 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
463 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
464 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
465 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
466 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
467 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
468 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
469 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
470 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
471 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
472 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
473 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
474 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
475 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
476 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
477 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
478 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
479 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
481 WREG32(D1VGA_CONTROL, 0);
482 WREG32(D2VGA_CONTROL, 0);
483 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
484 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
485 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
486 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
489 static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
491 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
492 upper_32_bits(rdev->mc.vram_start));
493 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
494 upper_32_bits(rdev->mc.vram_start));
495 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
496 (u32)rdev->mc.vram_start);
497 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
498 (u32)rdev->mc.vram_start);
500 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
501 upper_32_bits(rdev->mc.vram_start));
502 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
503 upper_32_bits(rdev->mc.vram_start));
504 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
505 (u32)rdev->mc.vram_start);
506 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
507 (u32)rdev->mc.vram_start);
509 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
510 upper_32_bits(rdev->mc.vram_start));
511 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
512 upper_32_bits(rdev->mc.vram_start));
513 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
514 (u32)rdev->mc.vram_start);
515 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
516 (u32)rdev->mc.vram_start);
518 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
519 upper_32_bits(rdev->mc.vram_start));
520 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
521 upper_32_bits(rdev->mc.vram_start));
522 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
523 (u32)rdev->mc.vram_start);
524 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
525 (u32)rdev->mc.vram_start);
527 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
528 upper_32_bits(rdev->mc.vram_start));
529 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
530 upper_32_bits(rdev->mc.vram_start));
531 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
532 (u32)rdev->mc.vram_start);
533 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
534 (u32)rdev->mc.vram_start);
536 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
537 upper_32_bits(rdev->mc.vram_start));
538 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
539 upper_32_bits(rdev->mc.vram_start));
540 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
541 (u32)rdev->mc.vram_start);
542 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
543 (u32)rdev->mc.vram_start);
545 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
546 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
547 /* Unlock host access */
548 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
550 /* Restore video state */
551 WREG32(D1VGA_CONTROL, save->vga_control[0]);
552 WREG32(D2VGA_CONTROL, save->vga_control[1]);
553 WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
554 WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
555 WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
556 WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
557 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
558 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
559 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
560 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
561 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
562 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
563 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
564 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
565 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
566 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
567 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
568 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
569 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
570 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
571 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
572 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
573 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
574 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
575 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
578 static void evergreen_mc_program(struct radeon_device *rdev)
580 struct evergreen_mc_save save;
585 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
586 WREG32((0x2c14 + j), 0x00000000);
587 WREG32((0x2c18 + j), 0x00000000);
588 WREG32((0x2c1c + j), 0x00000000);
589 WREG32((0x2c20 + j), 0x00000000);
590 WREG32((0x2c24 + j), 0x00000000);
592 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
594 evergreen_mc_stop(rdev, &save);
595 if (evergreen_mc_wait_for_idle(rdev)) {
596 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
598 /* Lockout access through VGA aperture*/
599 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
600 /* Update configuration */
601 if (rdev->flags & RADEON_IS_AGP) {
602 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
603 /* VRAM before AGP */
604 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
605 rdev->mc.vram_start >> 12);
606 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
607 rdev->mc.gtt_end >> 12);
610 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
611 rdev->mc.gtt_start >> 12);
612 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
613 rdev->mc.vram_end >> 12);
616 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
617 rdev->mc.vram_start >> 12);
618 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
619 rdev->mc.vram_end >> 12);
621 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
622 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
623 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
624 WREG32(MC_VM_FB_LOCATION, tmp);
625 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
626 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
627 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
628 if (rdev->flags & RADEON_IS_AGP) {
629 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
630 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
631 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
633 WREG32(MC_VM_AGP_BASE, 0);
634 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
635 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
637 if (evergreen_mc_wait_for_idle(rdev)) {
638 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
640 evergreen_mc_resume(rdev, &save);
641 /* we need to own VRAM, so turn off the VGA renderer here
642 * to stop it overwriting our objects */
643 rv515_vga_render_disable(rdev);
650 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
652 const __be32 *fw_data;
655 if (!rdev->me_fw || !rdev->pfp_fw)
659 WREG32(CP_RB_CNTL, RB_NO_UPDATE | (15 << 8) | (3 << 0));
661 fw_data = (const __be32 *)rdev->pfp_fw->data;
662 WREG32(CP_PFP_UCODE_ADDR, 0);
663 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
664 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
665 WREG32(CP_PFP_UCODE_ADDR, 0);
667 fw_data = (const __be32 *)rdev->me_fw->data;
668 WREG32(CP_ME_RAM_WADDR, 0);
669 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
670 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
672 WREG32(CP_PFP_UCODE_ADDR, 0);
673 WREG32(CP_ME_RAM_WADDR, 0);
674 WREG32(CP_ME_RAM_RADDR, 0);
678 static int evergreen_cp_start(struct radeon_device *rdev)
683 r = radeon_ring_lock(rdev, 7);
685 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
688 radeon_ring_write(rdev, PACKET3(PACKET3_ME_INITIALIZE, 5));
689 radeon_ring_write(rdev, 0x1);
690 radeon_ring_write(rdev, 0x0);
691 radeon_ring_write(rdev, rdev->config.evergreen.max_hw_contexts - 1);
692 radeon_ring_write(rdev, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
693 radeon_ring_write(rdev, 0);
694 radeon_ring_write(rdev, 0);
695 radeon_ring_unlock_commit(rdev);
698 WREG32(CP_ME_CNTL, cp_me);
700 r = radeon_ring_lock(rdev, 4);
702 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
705 /* init some VGT regs */
706 radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
707 radeon_ring_write(rdev, (VGT_VERTEX_REUSE_BLOCK_CNTL - PACKET3_SET_CONTEXT_REG_START) >> 2);
708 radeon_ring_write(rdev, 0xe);
709 radeon_ring_write(rdev, 0x10);
710 radeon_ring_unlock_commit(rdev);
715 int evergreen_cp_resume(struct radeon_device *rdev)
721 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
722 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
727 RREG32(GRBM_SOFT_RESET);
729 WREG32(GRBM_SOFT_RESET, 0);
730 RREG32(GRBM_SOFT_RESET);
732 /* Set ring buffer size */
733 rb_bufsz = drm_order(rdev->cp.ring_size / 8);
734 tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
736 tmp |= BUF_SWAP_32BIT;
738 WREG32(CP_RB_CNTL, tmp);
739 WREG32(CP_SEM_WAIT_TIMER, 0x4);
741 /* Set the write pointer delay */
742 WREG32(CP_RB_WPTR_DELAY, 0);
744 /* Initialize the ring buffer's read and write pointers */
745 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
746 WREG32(CP_RB_RPTR_WR, 0);
747 WREG32(CP_RB_WPTR, 0);
749 /* set the wb address wether it's enabled or not */
750 WREG32(CP_RB_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC);
751 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
752 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
754 if (rdev->wb.enabled)
755 WREG32(SCRATCH_UMSK, 0xff);
758 WREG32(SCRATCH_UMSK, 0);
762 WREG32(CP_RB_CNTL, tmp);
764 WREG32(CP_RB_BASE, rdev->cp.gpu_addr >> 8);
765 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
767 rdev->cp.rptr = RREG32(CP_RB_RPTR);
768 rdev->cp.wptr = RREG32(CP_RB_WPTR);
770 evergreen_cp_start(rdev);
771 rdev->cp.ready = true;
772 r = radeon_ring_test(rdev);
774 rdev->cp.ready = false;
783 static u32 evergreen_get_tile_pipe_to_backend_map(struct radeon_device *rdev,
786 u32 backend_disable_mask)
789 u32 enabled_backends_mask = 0;
790 u32 enabled_backends_count = 0;
792 u32 swizzle_pipe[EVERGREEN_MAX_PIPES];
795 bool force_no_swizzle;
797 if (num_tile_pipes > EVERGREEN_MAX_PIPES)
798 num_tile_pipes = EVERGREEN_MAX_PIPES;
799 if (num_tile_pipes < 1)
801 if (num_backends > EVERGREEN_MAX_BACKENDS)
802 num_backends = EVERGREEN_MAX_BACKENDS;
803 if (num_backends < 1)
806 for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
807 if (((backend_disable_mask >> i) & 1) == 0) {
808 enabled_backends_mask |= (1 << i);
809 ++enabled_backends_count;
811 if (enabled_backends_count == num_backends)
815 if (enabled_backends_count == 0) {
816 enabled_backends_mask = 1;
817 enabled_backends_count = 1;
820 if (enabled_backends_count != num_backends)
821 num_backends = enabled_backends_count;
823 memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * EVERGREEN_MAX_PIPES);
824 switch (rdev->family) {
827 force_no_swizzle = false;
833 force_no_swizzle = true;
836 if (force_no_swizzle) {
837 bool last_backend_enabled = false;
839 force_no_swizzle = false;
840 for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
841 if (((enabled_backends_mask >> i) & 1) == 1) {
842 if (last_backend_enabled)
843 force_no_swizzle = true;
844 last_backend_enabled = true;
846 last_backend_enabled = false;
850 switch (num_tile_pipes) {
855 DRM_ERROR("odd number of pipes!\n");
862 if (force_no_swizzle) {
875 if (force_no_swizzle) {
892 if (force_no_swizzle) {
914 for (cur_pipe = 0; cur_pipe < num_tile_pipes; ++cur_pipe) {
915 while (((1 << cur_backend) & enabled_backends_mask) == 0)
916 cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
918 backend_map |= (((cur_backend & 0xf) << (swizzle_pipe[cur_pipe] * 4)));
920 cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
926 static void evergreen_gpu_init(struct radeon_device *rdev)
928 u32 cc_rb_backend_disable = 0;
929 u32 cc_gc_shader_pipe_config;
930 u32 gb_addr_config = 0;
931 u32 mc_shared_chmap, mc_arb_ramcfg;
937 u32 sq_lds_resource_mgmt;
938 u32 sq_gpr_resource_mgmt_1;
939 u32 sq_gpr_resource_mgmt_2;
940 u32 sq_gpr_resource_mgmt_3;
941 u32 sq_thread_resource_mgmt;
942 u32 sq_thread_resource_mgmt_2;
943 u32 sq_stack_resource_mgmt_1;
944 u32 sq_stack_resource_mgmt_2;
945 u32 sq_stack_resource_mgmt_3;
946 u32 vgt_cache_invalidation;
947 u32 hdp_host_path_cntl;
948 int i, j, num_shader_engines, ps_thread_count;
950 switch (rdev->family) {
953 rdev->config.evergreen.num_ses = 2;
954 rdev->config.evergreen.max_pipes = 4;
955 rdev->config.evergreen.max_tile_pipes = 8;
956 rdev->config.evergreen.max_simds = 10;
957 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
958 rdev->config.evergreen.max_gprs = 256;
959 rdev->config.evergreen.max_threads = 248;
960 rdev->config.evergreen.max_gs_threads = 32;
961 rdev->config.evergreen.max_stack_entries = 512;
962 rdev->config.evergreen.sx_num_of_sets = 4;
963 rdev->config.evergreen.sx_max_export_size = 256;
964 rdev->config.evergreen.sx_max_export_pos_size = 64;
965 rdev->config.evergreen.sx_max_export_smx_size = 192;
966 rdev->config.evergreen.max_hw_contexts = 8;
967 rdev->config.evergreen.sq_num_cf_insts = 2;
969 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
970 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
971 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
974 rdev->config.evergreen.num_ses = 1;
975 rdev->config.evergreen.max_pipes = 4;
976 rdev->config.evergreen.max_tile_pipes = 4;
977 rdev->config.evergreen.max_simds = 10;
978 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
979 rdev->config.evergreen.max_gprs = 256;
980 rdev->config.evergreen.max_threads = 248;
981 rdev->config.evergreen.max_gs_threads = 32;
982 rdev->config.evergreen.max_stack_entries = 512;
983 rdev->config.evergreen.sx_num_of_sets = 4;
984 rdev->config.evergreen.sx_max_export_size = 256;
985 rdev->config.evergreen.sx_max_export_pos_size = 64;
986 rdev->config.evergreen.sx_max_export_smx_size = 192;
987 rdev->config.evergreen.max_hw_contexts = 8;
988 rdev->config.evergreen.sq_num_cf_insts = 2;
990 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
991 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
992 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
995 rdev->config.evergreen.num_ses = 1;
996 rdev->config.evergreen.max_pipes = 4;
997 rdev->config.evergreen.max_tile_pipes = 4;
998 rdev->config.evergreen.max_simds = 5;
999 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
1000 rdev->config.evergreen.max_gprs = 256;
1001 rdev->config.evergreen.max_threads = 248;
1002 rdev->config.evergreen.max_gs_threads = 32;
1003 rdev->config.evergreen.max_stack_entries = 256;
1004 rdev->config.evergreen.sx_num_of_sets = 4;
1005 rdev->config.evergreen.sx_max_export_size = 256;
1006 rdev->config.evergreen.sx_max_export_pos_size = 64;
1007 rdev->config.evergreen.sx_max_export_smx_size = 192;
1008 rdev->config.evergreen.max_hw_contexts = 8;
1009 rdev->config.evergreen.sq_num_cf_insts = 2;
1011 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1012 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1013 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1017 rdev->config.evergreen.num_ses = 1;
1018 rdev->config.evergreen.max_pipes = 2;
1019 rdev->config.evergreen.max_tile_pipes = 2;
1020 rdev->config.evergreen.max_simds = 2;
1021 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
1022 rdev->config.evergreen.max_gprs = 256;
1023 rdev->config.evergreen.max_threads = 192;
1024 rdev->config.evergreen.max_gs_threads = 16;
1025 rdev->config.evergreen.max_stack_entries = 256;
1026 rdev->config.evergreen.sx_num_of_sets = 4;
1027 rdev->config.evergreen.sx_max_export_size = 128;
1028 rdev->config.evergreen.sx_max_export_pos_size = 32;
1029 rdev->config.evergreen.sx_max_export_smx_size = 96;
1030 rdev->config.evergreen.max_hw_contexts = 4;
1031 rdev->config.evergreen.sq_num_cf_insts = 1;
1033 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1034 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1035 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1039 /* Initialize HDP */
1040 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
1041 WREG32((0x2c14 + j), 0x00000000);
1042 WREG32((0x2c18 + j), 0x00000000);
1043 WREG32((0x2c1c + j), 0x00000000);
1044 WREG32((0x2c20 + j), 0x00000000);
1045 WREG32((0x2c24 + j), 0x00000000);
1048 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
1050 cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & ~2;
1052 cc_gc_shader_pipe_config |=
1053 INACTIVE_QD_PIPES((EVERGREEN_MAX_PIPES_MASK << rdev->config.evergreen.max_pipes)
1054 & EVERGREEN_MAX_PIPES_MASK);
1055 cc_gc_shader_pipe_config |=
1056 INACTIVE_SIMDS((EVERGREEN_MAX_SIMDS_MASK << rdev->config.evergreen.max_simds)
1057 & EVERGREEN_MAX_SIMDS_MASK);
1059 cc_rb_backend_disable =
1060 BACKEND_DISABLE((EVERGREEN_MAX_BACKENDS_MASK << rdev->config.evergreen.max_backends)
1061 & EVERGREEN_MAX_BACKENDS_MASK);
1064 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
1065 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
1067 switch (rdev->config.evergreen.max_tile_pipes) {
1070 gb_addr_config |= NUM_PIPES(0);
1073 gb_addr_config |= NUM_PIPES(1);
1076 gb_addr_config |= NUM_PIPES(2);
1079 gb_addr_config |= NUM_PIPES(3);
1083 gb_addr_config |= PIPE_INTERLEAVE_SIZE((mc_arb_ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT);
1084 gb_addr_config |= BANK_INTERLEAVE_SIZE(0);
1085 gb_addr_config |= NUM_SHADER_ENGINES(rdev->config.evergreen.num_ses - 1);
1086 gb_addr_config |= SHADER_ENGINE_TILE_SIZE(1);
1087 gb_addr_config |= NUM_GPUS(0); /* Hemlock? */
1088 gb_addr_config |= MULTI_GPU_TILE_SIZE(2);
1090 if (((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT) > 2)
1091 gb_addr_config |= ROW_SIZE(2);
1093 gb_addr_config |= ROW_SIZE((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT);
1095 if (rdev->ddev->pdev->device == 0x689e) {
1098 u8 efuse_box_bit_131_124;
1100 WREG32(RCU_IND_INDEX, 0x204);
1101 efuse_straps_4 = RREG32(RCU_IND_DATA);
1102 WREG32(RCU_IND_INDEX, 0x203);
1103 efuse_straps_3 = RREG32(RCU_IND_DATA);
1104 efuse_box_bit_131_124 = (u8)(((efuse_straps_4 & 0xf) << 4) | ((efuse_straps_3 & 0xf0000000) >> 28));
1106 switch(efuse_box_bit_131_124) {
1108 gb_backend_map = 0x76543210;
1111 gb_backend_map = 0x77553311;
1114 gb_backend_map = 0x77553300;
1117 gb_backend_map = 0x77552211;
1120 gb_backend_map = 0x77443300;
1123 gb_backend_map = 0x66552211;
1126 gb_backend_map = 0x77552200;
1129 gb_backend_map = 0x66442200;
1132 gb_backend_map = 0x66553311;
1135 DRM_ERROR("bad backend map, using default\n");
1137 evergreen_get_tile_pipe_to_backend_map(rdev,
1138 rdev->config.evergreen.max_tile_pipes,
1139 rdev->config.evergreen.max_backends,
1140 ((EVERGREEN_MAX_BACKENDS_MASK <<
1141 rdev->config.evergreen.max_backends) &
1142 EVERGREEN_MAX_BACKENDS_MASK));
1145 } else if (rdev->ddev->pdev->device == 0x68b9) {
1147 u8 efuse_box_bit_127_124;
1149 WREG32(RCU_IND_INDEX, 0x203);
1150 efuse_straps_3 = RREG32(RCU_IND_DATA);
1151 efuse_box_bit_127_124 = (u8)((efuse_straps_3 & 0xF0000000) >> 28);
1153 switch(efuse_box_bit_127_124) {
1155 gb_backend_map = 0x00003210;
1161 gb_backend_map = 0x00003311;
1164 DRM_ERROR("bad backend map, using default\n");
1166 evergreen_get_tile_pipe_to_backend_map(rdev,
1167 rdev->config.evergreen.max_tile_pipes,
1168 rdev->config.evergreen.max_backends,
1169 ((EVERGREEN_MAX_BACKENDS_MASK <<
1170 rdev->config.evergreen.max_backends) &
1171 EVERGREEN_MAX_BACKENDS_MASK));
1175 switch (rdev->family) {
1178 gb_backend_map = 0x66442200;
1181 gb_backend_map = 0x00006420;
1185 evergreen_get_tile_pipe_to_backend_map(rdev,
1186 rdev->config.evergreen.max_tile_pipes,
1187 rdev->config.evergreen.max_backends,
1188 ((EVERGREEN_MAX_BACKENDS_MASK <<
1189 rdev->config.evergreen.max_backends) &
1190 EVERGREEN_MAX_BACKENDS_MASK));
1194 rdev->config.evergreen.tile_config = gb_addr_config;
1195 WREG32(GB_BACKEND_MAP, gb_backend_map);
1196 WREG32(GB_ADDR_CONFIG, gb_addr_config);
1197 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
1198 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
1200 num_shader_engines = ((RREG32(GB_ADDR_CONFIG) & NUM_SHADER_ENGINES(3)) >> 12) + 1;
1201 grbm_gfx_index = INSTANCE_BROADCAST_WRITES;
1203 for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
1204 u32 rb = cc_rb_backend_disable | (0xf0 << 16);
1205 u32 sp = cc_gc_shader_pipe_config;
1206 u32 gfx = grbm_gfx_index | SE_INDEX(i);
1208 if (i == num_shader_engines) {
1209 rb |= BACKEND_DISABLE(EVERGREEN_MAX_BACKENDS_MASK);
1210 sp |= INACTIVE_SIMDS(EVERGREEN_MAX_SIMDS_MASK);
1213 WREG32(GRBM_GFX_INDEX, gfx);
1214 WREG32(RLC_GFX_INDEX, gfx);
1216 WREG32(CC_RB_BACKEND_DISABLE, rb);
1217 WREG32(CC_SYS_RB_BACKEND_DISABLE, rb);
1218 WREG32(GC_USER_RB_BACKEND_DISABLE, rb);
1219 WREG32(CC_GC_SHADER_PIPE_CONFIG, sp);
1222 grbm_gfx_index |= SE_BROADCAST_WRITES;
1223 WREG32(GRBM_GFX_INDEX, grbm_gfx_index);
1224 WREG32(RLC_GFX_INDEX, grbm_gfx_index);
1226 WREG32(CGTS_SYS_TCC_DISABLE, 0);
1227 WREG32(CGTS_TCC_DISABLE, 0);
1228 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
1229 WREG32(CGTS_USER_TCC_DISABLE, 0);
1231 /* set HW defaults for 3D engine */
1232 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
1233 ROQ_IB2_START(0x2b)));
1235 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
1237 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
1242 sx_debug_1 = RREG32(SX_DEBUG_1);
1243 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
1244 WREG32(SX_DEBUG_1, sx_debug_1);
1247 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
1248 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
1249 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
1250 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
1252 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
1253 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
1254 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
1256 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
1257 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
1258 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
1260 WREG32(VGT_NUM_INSTANCES, 1);
1261 WREG32(SPI_CONFIG_CNTL, 0);
1262 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
1263 WREG32(CP_PERFMON_CNTL, 0);
1265 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
1266 FETCH_FIFO_HIWATER(0x4) |
1267 DONE_FIFO_HIWATER(0xe0) |
1268 ALU_UPDATE_FIFO_HIWATER(0x8)));
1270 sq_config = RREG32(SQ_CONFIG);
1271 sq_config &= ~(PS_PRIO(3) |
1275 sq_config |= (VC_ENABLE |
1282 if (rdev->family == CHIP_CEDAR)
1283 /* no vertex cache */
1284 sq_config &= ~VC_ENABLE;
1286 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
1288 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
1289 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
1290 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
1291 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1292 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1293 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1294 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1296 if (rdev->family == CHIP_CEDAR)
1297 ps_thread_count = 96;
1299 ps_thread_count = 128;
1301 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
1302 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1303 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1304 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1305 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1306 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1308 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1309 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1310 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1311 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1312 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1313 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1315 WREG32(SQ_CONFIG, sq_config);
1316 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
1317 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
1318 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
1319 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
1320 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
1321 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
1322 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
1323 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
1324 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
1325 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
1327 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
1328 FORCE_EOV_MAX_REZ_CNT(255)));
1330 if (rdev->family == CHIP_CEDAR)
1331 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
1333 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
1334 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
1335 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
1337 WREG32(VGT_GS_VERTEX_REUSE, 16);
1338 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
1340 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
1341 WREG32(VGT_OUT_DEALLOC_CNTL, 16);
1343 WREG32(CB_PERF_CTR0_SEL_0, 0);
1344 WREG32(CB_PERF_CTR0_SEL_1, 0);
1345 WREG32(CB_PERF_CTR1_SEL_0, 0);
1346 WREG32(CB_PERF_CTR1_SEL_1, 0);
1347 WREG32(CB_PERF_CTR2_SEL_0, 0);
1348 WREG32(CB_PERF_CTR2_SEL_1, 0);
1349 WREG32(CB_PERF_CTR3_SEL_0, 0);
1350 WREG32(CB_PERF_CTR3_SEL_1, 0);
1352 /* clear render buffer base addresses */
1353 WREG32(CB_COLOR0_BASE, 0);
1354 WREG32(CB_COLOR1_BASE, 0);
1355 WREG32(CB_COLOR2_BASE, 0);
1356 WREG32(CB_COLOR3_BASE, 0);
1357 WREG32(CB_COLOR4_BASE, 0);
1358 WREG32(CB_COLOR5_BASE, 0);
1359 WREG32(CB_COLOR6_BASE, 0);
1360 WREG32(CB_COLOR7_BASE, 0);
1361 WREG32(CB_COLOR8_BASE, 0);
1362 WREG32(CB_COLOR9_BASE, 0);
1363 WREG32(CB_COLOR10_BASE, 0);
1364 WREG32(CB_COLOR11_BASE, 0);
1366 /* set the shader const cache sizes to 0 */
1367 for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
1369 for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
1372 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
1373 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
1375 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
1381 int evergreen_mc_init(struct radeon_device *rdev)
1384 int chansize, numchan;
1386 /* Get VRAM informations */
1387 rdev->mc.vram_is_ddr = true;
1388 tmp = RREG32(MC_ARB_RAMCFG);
1389 if (tmp & CHANSIZE_OVERRIDE) {
1391 } else if (tmp & CHANSIZE_MASK) {
1396 tmp = RREG32(MC_SHARED_CHMAP);
1397 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1412 rdev->mc.vram_width = numchan * chansize;
1413 /* Could aper size report 0 ? */
1414 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
1415 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
1416 /* Setup GPU memory space */
1417 /* size in MB on evergreen */
1418 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1419 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1420 rdev->mc.visible_vram_size = rdev->mc.aper_size;
1421 rdev->mc.active_vram_size = rdev->mc.visible_vram_size;
1422 r600_vram_gtt_location(rdev, &rdev->mc);
1423 radeon_update_bandwidth_info(rdev);
1428 bool evergreen_gpu_is_lockup(struct radeon_device *rdev)
1430 /* FIXME: implement for evergreen */
1434 static int evergreen_gpu_soft_reset(struct radeon_device *rdev)
1436 struct evergreen_mc_save save;
1440 dev_info(rdev->dev, "GPU softreset \n");
1441 dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n",
1442 RREG32(GRBM_STATUS));
1443 dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n",
1444 RREG32(GRBM_STATUS_SE0));
1445 dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n",
1446 RREG32(GRBM_STATUS_SE1));
1447 dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n",
1448 RREG32(SRBM_STATUS));
1449 evergreen_mc_stop(rdev, &save);
1450 if (evergreen_mc_wait_for_idle(rdev)) {
1451 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1453 /* Disable CP parsing/prefetching */
1454 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
1456 /* reset all the gfx blocks */
1457 grbm_reset = (SOFT_RESET_CP |
1470 dev_info(rdev->dev, " GRBM_SOFT_RESET=0x%08X\n", grbm_reset);
1471 WREG32(GRBM_SOFT_RESET, grbm_reset);
1472 (void)RREG32(GRBM_SOFT_RESET);
1474 WREG32(GRBM_SOFT_RESET, 0);
1475 (void)RREG32(GRBM_SOFT_RESET);
1477 /* reset all the system blocks */
1478 srbm_reset = SRBM_SOFT_RESET_ALL_MASK;
1480 dev_info(rdev->dev, " SRBM_SOFT_RESET=0x%08X\n", srbm_reset);
1481 WREG32(SRBM_SOFT_RESET, srbm_reset);
1482 (void)RREG32(SRBM_SOFT_RESET);
1484 WREG32(SRBM_SOFT_RESET, 0);
1485 (void)RREG32(SRBM_SOFT_RESET);
1486 /* Wait a little for things to settle down */
1488 dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n",
1489 RREG32(GRBM_STATUS));
1490 dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n",
1491 RREG32(GRBM_STATUS_SE0));
1492 dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n",
1493 RREG32(GRBM_STATUS_SE1));
1494 dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n",
1495 RREG32(SRBM_STATUS));
1496 /* After reset we need to reinit the asic as GPU often endup in an
1499 atom_asic_init(rdev->mode_info.atom_context);
1500 evergreen_mc_resume(rdev, &save);
1504 int evergreen_asic_reset(struct radeon_device *rdev)
1506 return evergreen_gpu_soft_reset(rdev);
1511 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
1515 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC0_REGISTER_OFFSET);
1517 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC1_REGISTER_OFFSET);
1519 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC2_REGISTER_OFFSET);
1521 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC3_REGISTER_OFFSET);
1523 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC4_REGISTER_OFFSET);
1525 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC5_REGISTER_OFFSET);
1531 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
1535 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
1536 WREG32(GRBM_INT_CNTL, 0);
1537 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1538 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
1539 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1540 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1541 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1542 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1544 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1545 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
1546 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1547 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1548 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1549 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1551 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
1552 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
1554 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1555 WREG32(DC_HPD1_INT_CONTROL, tmp);
1556 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1557 WREG32(DC_HPD2_INT_CONTROL, tmp);
1558 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1559 WREG32(DC_HPD3_INT_CONTROL, tmp);
1560 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1561 WREG32(DC_HPD4_INT_CONTROL, tmp);
1562 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1563 WREG32(DC_HPD5_INT_CONTROL, tmp);
1564 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1565 WREG32(DC_HPD6_INT_CONTROL, tmp);
1569 int evergreen_irq_set(struct radeon_device *rdev)
1571 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
1572 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
1573 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
1574 u32 grbm_int_cntl = 0;
1576 if (!rdev->irq.installed) {
1577 WARN(1, "Can't enable IRQ/MSI because no handler is installed.\n");
1580 /* don't enable anything if the ih is disabled */
1581 if (!rdev->ih.enabled) {
1582 r600_disable_interrupts(rdev);
1583 /* force the active interrupt state to all disabled */
1584 evergreen_disable_interrupt_state(rdev);
1588 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
1589 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
1590 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
1591 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
1592 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
1593 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
1595 if (rdev->irq.sw_int) {
1596 DRM_DEBUG("evergreen_irq_set: sw int\n");
1597 cp_int_cntl |= RB_INT_ENABLE;
1598 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
1600 if (rdev->irq.crtc_vblank_int[0]) {
1601 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
1602 crtc1 |= VBLANK_INT_MASK;
1604 if (rdev->irq.crtc_vblank_int[1]) {
1605 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
1606 crtc2 |= VBLANK_INT_MASK;
1608 if (rdev->irq.crtc_vblank_int[2]) {
1609 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
1610 crtc3 |= VBLANK_INT_MASK;
1612 if (rdev->irq.crtc_vblank_int[3]) {
1613 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
1614 crtc4 |= VBLANK_INT_MASK;
1616 if (rdev->irq.crtc_vblank_int[4]) {
1617 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
1618 crtc5 |= VBLANK_INT_MASK;
1620 if (rdev->irq.crtc_vblank_int[5]) {
1621 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
1622 crtc6 |= VBLANK_INT_MASK;
1624 if (rdev->irq.hpd[0]) {
1625 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
1626 hpd1 |= DC_HPDx_INT_EN;
1628 if (rdev->irq.hpd[1]) {
1629 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
1630 hpd2 |= DC_HPDx_INT_EN;
1632 if (rdev->irq.hpd[2]) {
1633 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
1634 hpd3 |= DC_HPDx_INT_EN;
1636 if (rdev->irq.hpd[3]) {
1637 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
1638 hpd4 |= DC_HPDx_INT_EN;
1640 if (rdev->irq.hpd[4]) {
1641 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
1642 hpd5 |= DC_HPDx_INT_EN;
1644 if (rdev->irq.hpd[5]) {
1645 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
1646 hpd6 |= DC_HPDx_INT_EN;
1648 if (rdev->irq.gui_idle) {
1649 DRM_DEBUG("gui idle\n");
1650 grbm_int_cntl |= GUI_IDLE_INT_ENABLE;
1653 WREG32(CP_INT_CNTL, cp_int_cntl);
1654 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
1656 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
1657 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
1658 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
1659 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
1660 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
1661 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
1663 WREG32(DC_HPD1_INT_CONTROL, hpd1);
1664 WREG32(DC_HPD2_INT_CONTROL, hpd2);
1665 WREG32(DC_HPD3_INT_CONTROL, hpd3);
1666 WREG32(DC_HPD4_INT_CONTROL, hpd4);
1667 WREG32(DC_HPD5_INT_CONTROL, hpd5);
1668 WREG32(DC_HPD6_INT_CONTROL, hpd6);
1673 static inline void evergreen_irq_ack(struct radeon_device *rdev,
1676 u32 *disp_int_cont2,
1677 u32 *disp_int_cont3,
1678 u32 *disp_int_cont4,
1679 u32 *disp_int_cont5)
1683 *disp_int = RREG32(DISP_INTERRUPT_STATUS);
1684 *disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
1685 *disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
1686 *disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
1687 *disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
1688 *disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
1690 if (*disp_int & LB_D1_VBLANK_INTERRUPT)
1691 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
1692 if (*disp_int & LB_D1_VLINE_INTERRUPT)
1693 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
1695 if (*disp_int_cont & LB_D2_VBLANK_INTERRUPT)
1696 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
1697 if (*disp_int_cont & LB_D2_VLINE_INTERRUPT)
1698 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
1700 if (*disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
1701 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
1702 if (*disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
1703 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
1705 if (*disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
1706 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
1707 if (*disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
1708 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
1710 if (*disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
1711 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
1712 if (*disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
1713 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
1715 if (*disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
1716 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
1717 if (*disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
1718 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
1720 if (*disp_int & DC_HPD1_INTERRUPT) {
1721 tmp = RREG32(DC_HPD1_INT_CONTROL);
1722 tmp |= DC_HPDx_INT_ACK;
1723 WREG32(DC_HPD1_INT_CONTROL, tmp);
1725 if (*disp_int_cont & DC_HPD2_INTERRUPT) {
1726 tmp = RREG32(DC_HPD2_INT_CONTROL);
1727 tmp |= DC_HPDx_INT_ACK;
1728 WREG32(DC_HPD2_INT_CONTROL, tmp);
1730 if (*disp_int_cont2 & DC_HPD3_INTERRUPT) {
1731 tmp = RREG32(DC_HPD3_INT_CONTROL);
1732 tmp |= DC_HPDx_INT_ACK;
1733 WREG32(DC_HPD3_INT_CONTROL, tmp);
1735 if (*disp_int_cont3 & DC_HPD4_INTERRUPT) {
1736 tmp = RREG32(DC_HPD4_INT_CONTROL);
1737 tmp |= DC_HPDx_INT_ACK;
1738 WREG32(DC_HPD4_INT_CONTROL, tmp);
1740 if (*disp_int_cont4 & DC_HPD5_INTERRUPT) {
1741 tmp = RREG32(DC_HPD5_INT_CONTROL);
1742 tmp |= DC_HPDx_INT_ACK;
1743 WREG32(DC_HPD5_INT_CONTROL, tmp);
1745 if (*disp_int_cont5 & DC_HPD6_INTERRUPT) {
1746 tmp = RREG32(DC_HPD5_INT_CONTROL);
1747 tmp |= DC_HPDx_INT_ACK;
1748 WREG32(DC_HPD6_INT_CONTROL, tmp);
1752 void evergreen_irq_disable(struct radeon_device *rdev)
1754 u32 disp_int, disp_int_cont, disp_int_cont2;
1755 u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
1757 r600_disable_interrupts(rdev);
1758 /* Wait and acknowledge irq */
1760 evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
1761 &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
1762 evergreen_disable_interrupt_state(rdev);
1765 static void evergreen_irq_suspend(struct radeon_device *rdev)
1767 evergreen_irq_disable(rdev);
1768 r600_rlc_stop(rdev);
1771 static inline u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
1775 if (rdev->wb.enabled)
1776 wptr = rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4];
1778 wptr = RREG32(IH_RB_WPTR);
1780 if (wptr & RB_OVERFLOW) {
1781 /* When a ring buffer overflow happen start parsing interrupt
1782 * from the last not overwritten vector (wptr + 16). Hopefully
1783 * this should allow us to catchup.
1785 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
1786 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
1787 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
1788 tmp = RREG32(IH_RB_CNTL);
1789 tmp |= IH_WPTR_OVERFLOW_CLEAR;
1790 WREG32(IH_RB_CNTL, tmp);
1792 return (wptr & rdev->ih.ptr_mask);
1795 int evergreen_irq_process(struct radeon_device *rdev)
1797 u32 wptr = evergreen_get_ih_wptr(rdev);
1798 u32 rptr = rdev->ih.rptr;
1799 u32 src_id, src_data;
1801 u32 disp_int, disp_int_cont, disp_int_cont2;
1802 u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
1803 unsigned long flags;
1804 bool queue_hotplug = false;
1806 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
1807 if (!rdev->ih.enabled)
1810 spin_lock_irqsave(&rdev->ih.lock, flags);
1813 spin_unlock_irqrestore(&rdev->ih.lock, flags);
1816 if (rdev->shutdown) {
1817 spin_unlock_irqrestore(&rdev->ih.lock, flags);
1822 /* display interrupts */
1823 evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
1824 &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
1826 rdev->ih.wptr = wptr;
1827 while (rptr != wptr) {
1828 /* wptr/rptr are in bytes! */
1829 ring_index = rptr / 4;
1830 src_id = rdev->ih.ring[ring_index] & 0xff;
1831 src_data = rdev->ih.ring[ring_index + 1] & 0xfffffff;
1834 case 1: /* D1 vblank/vline */
1836 case 0: /* D1 vblank */
1837 if (disp_int & LB_D1_VBLANK_INTERRUPT) {
1838 drm_handle_vblank(rdev->ddev, 0);
1839 wake_up(&rdev->irq.vblank_queue);
1840 disp_int &= ~LB_D1_VBLANK_INTERRUPT;
1841 DRM_DEBUG("IH: D1 vblank\n");
1844 case 1: /* D1 vline */
1845 if (disp_int & LB_D1_VLINE_INTERRUPT) {
1846 disp_int &= ~LB_D1_VLINE_INTERRUPT;
1847 DRM_DEBUG("IH: D1 vline\n");
1851 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1855 case 2: /* D2 vblank/vline */
1857 case 0: /* D2 vblank */
1858 if (disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
1859 drm_handle_vblank(rdev->ddev, 1);
1860 wake_up(&rdev->irq.vblank_queue);
1861 disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
1862 DRM_DEBUG("IH: D2 vblank\n");
1865 case 1: /* D2 vline */
1866 if (disp_int_cont & LB_D2_VLINE_INTERRUPT) {
1867 disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
1868 DRM_DEBUG("IH: D2 vline\n");
1872 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1876 case 3: /* D3 vblank/vline */
1878 case 0: /* D3 vblank */
1879 if (disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
1880 drm_handle_vblank(rdev->ddev, 2);
1881 wake_up(&rdev->irq.vblank_queue);
1882 disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
1883 DRM_DEBUG("IH: D3 vblank\n");
1886 case 1: /* D3 vline */
1887 if (disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
1888 disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
1889 DRM_DEBUG("IH: D3 vline\n");
1893 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1897 case 4: /* D4 vblank/vline */
1899 case 0: /* D4 vblank */
1900 if (disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
1901 drm_handle_vblank(rdev->ddev, 3);
1902 wake_up(&rdev->irq.vblank_queue);
1903 disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
1904 DRM_DEBUG("IH: D4 vblank\n");
1907 case 1: /* D4 vline */
1908 if (disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
1909 disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
1910 DRM_DEBUG("IH: D4 vline\n");
1914 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1918 case 5: /* D5 vblank/vline */
1920 case 0: /* D5 vblank */
1921 if (disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
1922 drm_handle_vblank(rdev->ddev, 4);
1923 wake_up(&rdev->irq.vblank_queue);
1924 disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
1925 DRM_DEBUG("IH: D5 vblank\n");
1928 case 1: /* D5 vline */
1929 if (disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
1930 disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
1931 DRM_DEBUG("IH: D5 vline\n");
1935 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1939 case 6: /* D6 vblank/vline */
1941 case 0: /* D6 vblank */
1942 if (disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
1943 drm_handle_vblank(rdev->ddev, 5);
1944 wake_up(&rdev->irq.vblank_queue);
1945 disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
1946 DRM_DEBUG("IH: D6 vblank\n");
1949 case 1: /* D6 vline */
1950 if (disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
1951 disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
1952 DRM_DEBUG("IH: D6 vline\n");
1956 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1960 case 42: /* HPD hotplug */
1963 if (disp_int & DC_HPD1_INTERRUPT) {
1964 disp_int &= ~DC_HPD1_INTERRUPT;
1965 queue_hotplug = true;
1966 DRM_DEBUG("IH: HPD1\n");
1970 if (disp_int_cont & DC_HPD2_INTERRUPT) {
1971 disp_int_cont &= ~DC_HPD2_INTERRUPT;
1972 queue_hotplug = true;
1973 DRM_DEBUG("IH: HPD2\n");
1977 if (disp_int_cont2 & DC_HPD3_INTERRUPT) {
1978 disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
1979 queue_hotplug = true;
1980 DRM_DEBUG("IH: HPD3\n");
1984 if (disp_int_cont3 & DC_HPD4_INTERRUPT) {
1985 disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
1986 queue_hotplug = true;
1987 DRM_DEBUG("IH: HPD4\n");
1991 if (disp_int_cont4 & DC_HPD5_INTERRUPT) {
1992 disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
1993 queue_hotplug = true;
1994 DRM_DEBUG("IH: HPD5\n");
1998 if (disp_int_cont5 & DC_HPD6_INTERRUPT) {
1999 disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
2000 queue_hotplug = true;
2001 DRM_DEBUG("IH: HPD6\n");
2005 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2009 case 176: /* CP_INT in ring buffer */
2010 case 177: /* CP_INT in IB1 */
2011 case 178: /* CP_INT in IB2 */
2012 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
2013 radeon_fence_process(rdev);
2015 case 181: /* CP EOP event */
2016 DRM_DEBUG("IH: CP EOP\n");
2017 radeon_fence_process(rdev);
2019 case 233: /* GUI IDLE */
2020 DRM_DEBUG("IH: CP EOP\n");
2021 rdev->pm.gui_idle = true;
2022 wake_up(&rdev->irq.idle_queue);
2025 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2029 /* wptr/rptr are in bytes! */
2031 rptr &= rdev->ih.ptr_mask;
2033 /* make sure wptr hasn't changed while processing */
2034 wptr = evergreen_get_ih_wptr(rdev);
2035 if (wptr != rdev->ih.wptr)
2038 queue_work(rdev->wq, &rdev->hotplug_work);
2039 rdev->ih.rptr = rptr;
2040 WREG32(IH_RB_RPTR, rdev->ih.rptr);
2041 spin_unlock_irqrestore(&rdev->ih.lock, flags);
2045 static int evergreen_startup(struct radeon_device *rdev)
2049 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
2050 r = r600_init_microcode(rdev);
2052 DRM_ERROR("Failed to load firmware!\n");
2057 evergreen_mc_program(rdev);
2058 if (rdev->flags & RADEON_IS_AGP) {
2059 evergreen_agp_enable(rdev);
2061 r = evergreen_pcie_gart_enable(rdev);
2065 evergreen_gpu_init(rdev);
2067 r = evergreen_blit_init(rdev);
2069 evergreen_blit_fini(rdev);
2070 rdev->asic->copy = NULL;
2071 dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
2074 /* allocate wb buffer */
2075 r = radeon_wb_init(rdev);
2080 r = r600_irq_init(rdev);
2082 DRM_ERROR("radeon: IH init failed (%d).\n", r);
2083 radeon_irq_kms_fini(rdev);
2086 evergreen_irq_set(rdev);
2088 r = radeon_ring_init(rdev, rdev->cp.ring_size);
2091 r = evergreen_cp_load_microcode(rdev);
2094 r = evergreen_cp_resume(rdev);
2101 int evergreen_resume(struct radeon_device *rdev)
2105 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
2106 * posting will perform necessary task to bring back GPU into good
2110 atom_asic_init(rdev->mode_info.atom_context);
2112 r = evergreen_startup(rdev);
2114 DRM_ERROR("r600 startup failed on resume\n");
2118 r = r600_ib_test(rdev);
2120 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
2128 int evergreen_suspend(struct radeon_device *rdev)
2132 /* FIXME: we should wait for ring to be empty */
2134 rdev->cp.ready = false;
2135 evergreen_irq_suspend(rdev);
2136 radeon_wb_disable(rdev);
2137 evergreen_pcie_gart_disable(rdev);
2139 /* unpin shaders bo */
2140 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
2141 if (likely(r == 0)) {
2142 radeon_bo_unpin(rdev->r600_blit.shader_obj);
2143 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
2149 int evergreen_copy_blit(struct radeon_device *rdev,
2150 uint64_t src_offset, uint64_t dst_offset,
2151 unsigned num_pages, struct radeon_fence *fence)
2155 mutex_lock(&rdev->r600_blit.mutex);
2156 rdev->r600_blit.vb_ib = NULL;
2157 r = evergreen_blit_prepare_copy(rdev, num_pages * RADEON_GPU_PAGE_SIZE);
2159 if (rdev->r600_blit.vb_ib)
2160 radeon_ib_free(rdev, &rdev->r600_blit.vb_ib);
2161 mutex_unlock(&rdev->r600_blit.mutex);
2164 evergreen_kms_blit_copy(rdev, src_offset, dst_offset, num_pages * RADEON_GPU_PAGE_SIZE);
2165 evergreen_blit_done_copy(rdev, fence);
2166 mutex_unlock(&rdev->r600_blit.mutex);
2170 static bool evergreen_card_posted(struct radeon_device *rdev)
2174 /* first check CRTCs */
2175 reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
2176 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
2177 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
2178 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
2179 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
2180 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
2181 if (reg & EVERGREEN_CRTC_MASTER_EN)
2184 /* then check MEM_SIZE, in case the crtcs are off */
2185 if (RREG32(CONFIG_MEMSIZE))
2191 /* Plan is to move initialization in that function and use
2192 * helper function so that radeon_device_init pretty much
2193 * do nothing more than calling asic specific function. This
2194 * should also allow to remove a bunch of callback function
2197 int evergreen_init(struct radeon_device *rdev)
2201 r = radeon_dummy_page_init(rdev);
2204 /* This don't do much */
2205 r = radeon_gem_init(rdev);
2209 if (!radeon_get_bios(rdev)) {
2210 if (ASIC_IS_AVIVO(rdev))
2213 /* Must be an ATOMBIOS */
2214 if (!rdev->is_atom_bios) {
2215 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
2218 r = radeon_atombios_init(rdev);
2221 /* Post card if necessary */
2222 if (!evergreen_card_posted(rdev)) {
2224 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
2227 DRM_INFO("GPU not posted. posting now...\n");
2228 atom_asic_init(rdev->mode_info.atom_context);
2230 /* Initialize scratch registers */
2231 r600_scratch_init(rdev);
2232 /* Initialize surface registers */
2233 radeon_surface_init(rdev);
2234 /* Initialize clocks */
2235 radeon_get_clock_info(rdev->ddev);
2237 r = radeon_fence_driver_init(rdev);
2240 /* initialize AGP */
2241 if (rdev->flags & RADEON_IS_AGP) {
2242 r = radeon_agp_init(rdev);
2244 radeon_agp_disable(rdev);
2246 /* initialize memory controller */
2247 r = evergreen_mc_init(rdev);
2250 /* Memory manager */
2251 r = radeon_bo_init(rdev);
2255 r = radeon_irq_kms_init(rdev);
2259 rdev->cp.ring_obj = NULL;
2260 r600_ring_init(rdev, 1024 * 1024);
2262 rdev->ih.ring_obj = NULL;
2263 r600_ih_ring_init(rdev, 64 * 1024);
2265 r = r600_pcie_gart_init(rdev);
2269 rdev->accel_working = true;
2270 r = evergreen_startup(rdev);
2272 dev_err(rdev->dev, "disabling GPU acceleration\n");
2274 r600_irq_fini(rdev);
2275 radeon_wb_fini(rdev);
2276 radeon_irq_kms_fini(rdev);
2277 evergreen_pcie_gart_fini(rdev);
2278 rdev->accel_working = false;
2280 if (rdev->accel_working) {
2281 r = radeon_ib_pool_init(rdev);
2283 DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
2284 rdev->accel_working = false;
2286 r = r600_ib_test(rdev);
2288 DRM_ERROR("radeon: failed testing IB (%d).\n", r);
2289 rdev->accel_working = false;
2295 void evergreen_fini(struct radeon_device *rdev)
2297 evergreen_blit_fini(rdev);
2299 r600_irq_fini(rdev);
2300 radeon_wb_fini(rdev);
2301 radeon_irq_kms_fini(rdev);
2302 evergreen_pcie_gart_fini(rdev);
2303 radeon_gem_fini(rdev);
2304 radeon_fence_driver_fini(rdev);
2305 radeon_agp_fini(rdev);
2306 radeon_bo_fini(rdev);
2307 radeon_atombios_fini(rdev);
2310 radeon_dummy_page_fini(rdev);