2 * Copyright 2010 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
22 * Authors: Alex Deucher
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include <linux/slab.h>
29 #include "radeon_asic.h"
30 #include "radeon_drm.h"
31 #include "evergreend.h"
34 #include "evergreen_reg.h"
36 #define EVERGREEN_PFP_UCODE_SIZE 1120
37 #define EVERGREEN_PM4_UCODE_SIZE 1376
39 static void evergreen_gpu_init(struct radeon_device *rdev);
40 void evergreen_fini(struct radeon_device *rdev);
42 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
44 bool connected = false;
48 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
52 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
56 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
60 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
64 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
68 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
78 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
79 enum radeon_hpd_id hpd)
82 bool connected = evergreen_hpd_sense(rdev, hpd);
86 tmp = RREG32(DC_HPD1_INT_CONTROL);
88 tmp &= ~DC_HPDx_INT_POLARITY;
90 tmp |= DC_HPDx_INT_POLARITY;
91 WREG32(DC_HPD1_INT_CONTROL, tmp);
94 tmp = RREG32(DC_HPD2_INT_CONTROL);
96 tmp &= ~DC_HPDx_INT_POLARITY;
98 tmp |= DC_HPDx_INT_POLARITY;
99 WREG32(DC_HPD2_INT_CONTROL, tmp);
102 tmp = RREG32(DC_HPD3_INT_CONTROL);
104 tmp &= ~DC_HPDx_INT_POLARITY;
106 tmp |= DC_HPDx_INT_POLARITY;
107 WREG32(DC_HPD3_INT_CONTROL, tmp);
110 tmp = RREG32(DC_HPD4_INT_CONTROL);
112 tmp &= ~DC_HPDx_INT_POLARITY;
114 tmp |= DC_HPDx_INT_POLARITY;
115 WREG32(DC_HPD4_INT_CONTROL, tmp);
118 tmp = RREG32(DC_HPD5_INT_CONTROL);
120 tmp &= ~DC_HPDx_INT_POLARITY;
122 tmp |= DC_HPDx_INT_POLARITY;
123 WREG32(DC_HPD5_INT_CONTROL, tmp);
126 tmp = RREG32(DC_HPD6_INT_CONTROL);
128 tmp &= ~DC_HPDx_INT_POLARITY;
130 tmp |= DC_HPDx_INT_POLARITY;
131 WREG32(DC_HPD6_INT_CONTROL, tmp);
138 void evergreen_hpd_init(struct radeon_device *rdev)
140 struct drm_device *dev = rdev->ddev;
141 struct drm_connector *connector;
142 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
143 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
145 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
146 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
147 switch (radeon_connector->hpd.hpd) {
149 WREG32(DC_HPD1_CONTROL, tmp);
150 rdev->irq.hpd[0] = true;
153 WREG32(DC_HPD2_CONTROL, tmp);
154 rdev->irq.hpd[1] = true;
157 WREG32(DC_HPD3_CONTROL, tmp);
158 rdev->irq.hpd[2] = true;
161 WREG32(DC_HPD4_CONTROL, tmp);
162 rdev->irq.hpd[3] = true;
165 WREG32(DC_HPD5_CONTROL, tmp);
166 rdev->irq.hpd[4] = true;
169 WREG32(DC_HPD6_CONTROL, tmp);
170 rdev->irq.hpd[5] = true;
176 if (rdev->irq.installed)
177 evergreen_irq_set(rdev);
180 void evergreen_hpd_fini(struct radeon_device *rdev)
182 struct drm_device *dev = rdev->ddev;
183 struct drm_connector *connector;
185 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
186 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
187 switch (radeon_connector->hpd.hpd) {
189 WREG32(DC_HPD1_CONTROL, 0);
190 rdev->irq.hpd[0] = false;
193 WREG32(DC_HPD2_CONTROL, 0);
194 rdev->irq.hpd[1] = false;
197 WREG32(DC_HPD3_CONTROL, 0);
198 rdev->irq.hpd[2] = false;
201 WREG32(DC_HPD4_CONTROL, 0);
202 rdev->irq.hpd[3] = false;
205 WREG32(DC_HPD5_CONTROL, 0);
206 rdev->irq.hpd[4] = false;
209 WREG32(DC_HPD6_CONTROL, 0);
210 rdev->irq.hpd[5] = false;
218 void evergreen_bandwidth_update(struct radeon_device *rdev)
223 static int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
228 for (i = 0; i < rdev->usec_timeout; i++) {
230 tmp = RREG32(SRBM_STATUS) & 0x1F00;
241 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
246 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
247 for (i = 0; i < rdev->usec_timeout; i++) {
249 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
250 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
252 printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
262 int evergreen_pcie_gart_enable(struct radeon_device *rdev)
267 if (rdev->gart.table.vram.robj == NULL) {
268 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
271 r = radeon_gart_table_vram_pin(rdev);
274 radeon_gart_restore(rdev);
276 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
277 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
278 EFFECTIVE_L2_QUEUE_SIZE(7));
279 WREG32(VM_L2_CNTL2, 0);
280 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
281 /* Setup TLB control */
282 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
283 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
284 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
285 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
286 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
287 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
288 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
289 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
290 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
291 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
292 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
293 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
294 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
295 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
296 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
297 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
298 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
299 (u32)(rdev->dummy_page.addr >> 12));
300 WREG32(VM_CONTEXT1_CNTL, 0);
302 evergreen_pcie_gart_tlb_flush(rdev);
303 rdev->gart.ready = true;
307 void evergreen_pcie_gart_disable(struct radeon_device *rdev)
312 /* Disable all tables */
313 WREG32(VM_CONTEXT0_CNTL, 0);
314 WREG32(VM_CONTEXT1_CNTL, 0);
317 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
318 EFFECTIVE_L2_QUEUE_SIZE(7));
319 WREG32(VM_L2_CNTL2, 0);
320 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
321 /* Setup TLB control */
322 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
323 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
324 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
325 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
326 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
327 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
328 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
329 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
330 if (rdev->gart.table.vram.robj) {
331 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
332 if (likely(r == 0)) {
333 radeon_bo_kunmap(rdev->gart.table.vram.robj);
334 radeon_bo_unpin(rdev->gart.table.vram.robj);
335 radeon_bo_unreserve(rdev->gart.table.vram.robj);
340 void evergreen_pcie_gart_fini(struct radeon_device *rdev)
342 evergreen_pcie_gart_disable(rdev);
343 radeon_gart_table_vram_free(rdev);
344 radeon_gart_fini(rdev);
348 void evergreen_agp_enable(struct radeon_device *rdev)
353 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
354 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
355 EFFECTIVE_L2_QUEUE_SIZE(7));
356 WREG32(VM_L2_CNTL2, 0);
357 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
358 /* Setup TLB control */
359 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
360 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
361 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
362 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
363 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
364 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
365 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
366 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
367 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
368 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
369 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
370 WREG32(VM_CONTEXT0_CNTL, 0);
371 WREG32(VM_CONTEXT1_CNTL, 0);
374 static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
376 save->vga_control[0] = RREG32(D1VGA_CONTROL);
377 save->vga_control[1] = RREG32(D2VGA_CONTROL);
378 save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
379 save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
380 save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
381 save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
382 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
383 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
384 save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
385 save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
386 save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
387 save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
388 save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
389 save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
392 WREG32(VGA_RENDER_CONTROL, 0);
393 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
394 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
395 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
396 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
397 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
398 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
399 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
400 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
401 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
402 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
403 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
404 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
405 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
406 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
407 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
408 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
409 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
410 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
412 WREG32(D1VGA_CONTROL, 0);
413 WREG32(D2VGA_CONTROL, 0);
414 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
415 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
416 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
417 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
420 static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
422 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
423 upper_32_bits(rdev->mc.vram_start));
424 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
425 upper_32_bits(rdev->mc.vram_start));
426 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
427 (u32)rdev->mc.vram_start);
428 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
429 (u32)rdev->mc.vram_start);
431 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
432 upper_32_bits(rdev->mc.vram_start));
433 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
434 upper_32_bits(rdev->mc.vram_start));
435 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
436 (u32)rdev->mc.vram_start);
437 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
438 (u32)rdev->mc.vram_start);
440 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
441 upper_32_bits(rdev->mc.vram_start));
442 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
443 upper_32_bits(rdev->mc.vram_start));
444 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
445 (u32)rdev->mc.vram_start);
446 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
447 (u32)rdev->mc.vram_start);
449 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
450 upper_32_bits(rdev->mc.vram_start));
451 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
452 upper_32_bits(rdev->mc.vram_start));
453 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
454 (u32)rdev->mc.vram_start);
455 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
456 (u32)rdev->mc.vram_start);
458 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
459 upper_32_bits(rdev->mc.vram_start));
460 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
461 upper_32_bits(rdev->mc.vram_start));
462 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
463 (u32)rdev->mc.vram_start);
464 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
465 (u32)rdev->mc.vram_start);
467 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
468 upper_32_bits(rdev->mc.vram_start));
469 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
470 upper_32_bits(rdev->mc.vram_start));
471 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
472 (u32)rdev->mc.vram_start);
473 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
474 (u32)rdev->mc.vram_start);
476 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
477 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
478 /* Unlock host access */
479 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
481 /* Restore video state */
482 WREG32(D1VGA_CONTROL, save->vga_control[0]);
483 WREG32(D2VGA_CONTROL, save->vga_control[1]);
484 WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
485 WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
486 WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
487 WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
488 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
489 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
490 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
491 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
492 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
493 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
494 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
495 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
496 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
497 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
498 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
499 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
500 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
501 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
502 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
503 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
504 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
505 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
506 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
509 static void evergreen_mc_program(struct radeon_device *rdev)
511 struct evergreen_mc_save save;
516 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
517 WREG32((0x2c14 + j), 0x00000000);
518 WREG32((0x2c18 + j), 0x00000000);
519 WREG32((0x2c1c + j), 0x00000000);
520 WREG32((0x2c20 + j), 0x00000000);
521 WREG32((0x2c24 + j), 0x00000000);
523 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
525 evergreen_mc_stop(rdev, &save);
526 if (evergreen_mc_wait_for_idle(rdev)) {
527 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
529 /* Lockout access through VGA aperture*/
530 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
531 /* Update configuration */
532 if (rdev->flags & RADEON_IS_AGP) {
533 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
534 /* VRAM before AGP */
535 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
536 rdev->mc.vram_start >> 12);
537 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
538 rdev->mc.gtt_end >> 12);
541 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
542 rdev->mc.gtt_start >> 12);
543 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
544 rdev->mc.vram_end >> 12);
547 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
548 rdev->mc.vram_start >> 12);
549 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
550 rdev->mc.vram_end >> 12);
552 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
553 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
554 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
555 WREG32(MC_VM_FB_LOCATION, tmp);
556 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
557 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
558 WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF);
559 if (rdev->flags & RADEON_IS_AGP) {
560 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
561 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
562 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
564 WREG32(MC_VM_AGP_BASE, 0);
565 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
566 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
568 if (evergreen_mc_wait_for_idle(rdev)) {
569 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
571 evergreen_mc_resume(rdev, &save);
572 /* we need to own VRAM, so turn off the VGA renderer here
573 * to stop it overwriting our objects */
574 rv515_vga_render_disable(rdev);
581 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
583 const __be32 *fw_data;
586 if (!rdev->me_fw || !rdev->pfp_fw)
590 WREG32(CP_RB_CNTL, RB_NO_UPDATE | (15 << 8) | (3 << 0));
592 fw_data = (const __be32 *)rdev->pfp_fw->data;
593 WREG32(CP_PFP_UCODE_ADDR, 0);
594 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
595 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
596 WREG32(CP_PFP_UCODE_ADDR, 0);
598 fw_data = (const __be32 *)rdev->me_fw->data;
599 WREG32(CP_ME_RAM_WADDR, 0);
600 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
601 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
603 WREG32(CP_PFP_UCODE_ADDR, 0);
604 WREG32(CP_ME_RAM_WADDR, 0);
605 WREG32(CP_ME_RAM_RADDR, 0);
609 int evergreen_cp_resume(struct radeon_device *rdev)
615 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
616 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
621 RREG32(GRBM_SOFT_RESET);
623 WREG32(GRBM_SOFT_RESET, 0);
624 RREG32(GRBM_SOFT_RESET);
626 /* Set ring buffer size */
627 rb_bufsz = drm_order(rdev->cp.ring_size / 8);
628 tmp = RB_NO_UPDATE | (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
630 tmp |= BUF_SWAP_32BIT;
632 WREG32(CP_RB_CNTL, tmp);
633 WREG32(CP_SEM_WAIT_TIMER, 0x4);
635 /* Set the write pointer delay */
636 WREG32(CP_RB_WPTR_DELAY, 0);
638 /* Initialize the ring buffer's read and write pointers */
639 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
640 WREG32(CP_RB_RPTR_WR, 0);
641 WREG32(CP_RB_WPTR, 0);
642 WREG32(CP_RB_RPTR_ADDR, rdev->cp.gpu_addr & 0xFFFFFFFF);
643 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->cp.gpu_addr));
645 WREG32(CP_RB_CNTL, tmp);
647 WREG32(CP_RB_BASE, rdev->cp.gpu_addr >> 8);
648 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
650 rdev->cp.rptr = RREG32(CP_RB_RPTR);
651 rdev->cp.wptr = RREG32(CP_RB_WPTR);
654 rdev->cp.ready = true;
655 r = radeon_ring_test(rdev);
657 rdev->cp.ready = false;
666 static u32 evergreen_get_tile_pipe_to_backend_map(struct radeon_device *rdev,
669 u32 backend_disable_mask)
672 u32 enabled_backends_mask = 0;
673 u32 enabled_backends_count = 0;
675 u32 swizzle_pipe[EVERGREEN_MAX_PIPES];
678 bool force_no_swizzle;
680 if (num_tile_pipes > EVERGREEN_MAX_PIPES)
681 num_tile_pipes = EVERGREEN_MAX_PIPES;
682 if (num_tile_pipes < 1)
684 if (num_backends > EVERGREEN_MAX_BACKENDS)
685 num_backends = EVERGREEN_MAX_BACKENDS;
686 if (num_backends < 1)
689 for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
690 if (((backend_disable_mask >> i) & 1) == 0) {
691 enabled_backends_mask |= (1 << i);
692 ++enabled_backends_count;
694 if (enabled_backends_count == num_backends)
698 if (enabled_backends_count == 0) {
699 enabled_backends_mask = 1;
700 enabled_backends_count = 1;
703 if (enabled_backends_count != num_backends)
704 num_backends = enabled_backends_count;
706 memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * EVERGREEN_MAX_PIPES);
707 switch (rdev->family) {
710 force_no_swizzle = false;
716 force_no_swizzle = true;
719 if (force_no_swizzle) {
720 bool last_backend_enabled = false;
722 force_no_swizzle = false;
723 for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
724 if (((enabled_backends_mask >> i) & 1) == 1) {
725 if (last_backend_enabled)
726 force_no_swizzle = true;
727 last_backend_enabled = true;
729 last_backend_enabled = false;
733 switch (num_tile_pipes) {
738 DRM_ERROR("odd number of pipes!\n");
745 if (force_no_swizzle) {
758 if (force_no_swizzle) {
775 if (force_no_swizzle) {
797 for (cur_pipe = 0; cur_pipe < num_tile_pipes; ++cur_pipe) {
798 while (((1 << cur_backend) & enabled_backends_mask) == 0)
799 cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
801 backend_map |= (((cur_backend & 0xf) << (swizzle_pipe[cur_pipe] * 4)));
803 cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
809 static void evergreen_gpu_init(struct radeon_device *rdev)
811 u32 cc_rb_backend_disable = 0;
812 u32 cc_gc_shader_pipe_config;
813 u32 gb_addr_config = 0;
814 u32 mc_shared_chmap, mc_arb_ramcfg;
820 u32 sq_lds_resource_mgmt;
821 u32 sq_gpr_resource_mgmt_1;
822 u32 sq_gpr_resource_mgmt_2;
823 u32 sq_gpr_resource_mgmt_3;
824 u32 sq_thread_resource_mgmt;
825 u32 sq_thread_resource_mgmt_2;
826 u32 sq_stack_resource_mgmt_1;
827 u32 sq_stack_resource_mgmt_2;
828 u32 sq_stack_resource_mgmt_3;
829 u32 vgt_cache_invalidation;
830 u32 hdp_host_path_cntl;
831 int i, j, num_shader_engines, ps_thread_count;
833 switch (rdev->family) {
836 rdev->config.evergreen.num_ses = 2;
837 rdev->config.evergreen.max_pipes = 4;
838 rdev->config.evergreen.max_tile_pipes = 8;
839 rdev->config.evergreen.max_simds = 10;
840 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
841 rdev->config.evergreen.max_gprs = 256;
842 rdev->config.evergreen.max_threads = 248;
843 rdev->config.evergreen.max_gs_threads = 32;
844 rdev->config.evergreen.max_stack_entries = 512;
845 rdev->config.evergreen.sx_num_of_sets = 4;
846 rdev->config.evergreen.sx_max_export_size = 256;
847 rdev->config.evergreen.sx_max_export_pos_size = 64;
848 rdev->config.evergreen.sx_max_export_smx_size = 192;
849 rdev->config.evergreen.max_hw_contexts = 8;
850 rdev->config.evergreen.sq_num_cf_insts = 2;
852 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
853 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
854 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
857 rdev->config.evergreen.num_ses = 1;
858 rdev->config.evergreen.max_pipes = 4;
859 rdev->config.evergreen.max_tile_pipes = 4;
860 rdev->config.evergreen.max_simds = 10;
861 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
862 rdev->config.evergreen.max_gprs = 256;
863 rdev->config.evergreen.max_threads = 248;
864 rdev->config.evergreen.max_gs_threads = 32;
865 rdev->config.evergreen.max_stack_entries = 512;
866 rdev->config.evergreen.sx_num_of_sets = 4;
867 rdev->config.evergreen.sx_max_export_size = 256;
868 rdev->config.evergreen.sx_max_export_pos_size = 64;
869 rdev->config.evergreen.sx_max_export_smx_size = 192;
870 rdev->config.evergreen.max_hw_contexts = 8;
871 rdev->config.evergreen.sq_num_cf_insts = 2;
873 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
874 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
875 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
878 rdev->config.evergreen.num_ses = 1;
879 rdev->config.evergreen.max_pipes = 4;
880 rdev->config.evergreen.max_tile_pipes = 4;
881 rdev->config.evergreen.max_simds = 5;
882 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
883 rdev->config.evergreen.max_gprs = 256;
884 rdev->config.evergreen.max_threads = 248;
885 rdev->config.evergreen.max_gs_threads = 32;
886 rdev->config.evergreen.max_stack_entries = 256;
887 rdev->config.evergreen.sx_num_of_sets = 4;
888 rdev->config.evergreen.sx_max_export_size = 256;
889 rdev->config.evergreen.sx_max_export_pos_size = 64;
890 rdev->config.evergreen.sx_max_export_smx_size = 192;
891 rdev->config.evergreen.max_hw_contexts = 8;
892 rdev->config.evergreen.sq_num_cf_insts = 2;
894 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
895 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
896 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
900 rdev->config.evergreen.num_ses = 1;
901 rdev->config.evergreen.max_pipes = 2;
902 rdev->config.evergreen.max_tile_pipes = 2;
903 rdev->config.evergreen.max_simds = 2;
904 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
905 rdev->config.evergreen.max_gprs = 256;
906 rdev->config.evergreen.max_threads = 192;
907 rdev->config.evergreen.max_gs_threads = 16;
908 rdev->config.evergreen.max_stack_entries = 256;
909 rdev->config.evergreen.sx_num_of_sets = 4;
910 rdev->config.evergreen.sx_max_export_size = 128;
911 rdev->config.evergreen.sx_max_export_pos_size = 32;
912 rdev->config.evergreen.sx_max_export_smx_size = 96;
913 rdev->config.evergreen.max_hw_contexts = 4;
914 rdev->config.evergreen.sq_num_cf_insts = 1;
916 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
917 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
918 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
923 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
924 WREG32((0x2c14 + j), 0x00000000);
925 WREG32((0x2c18 + j), 0x00000000);
926 WREG32((0x2c1c + j), 0x00000000);
927 WREG32((0x2c20 + j), 0x00000000);
928 WREG32((0x2c24 + j), 0x00000000);
931 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
933 cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & ~2;
935 cc_gc_shader_pipe_config |=
936 INACTIVE_QD_PIPES((EVERGREEN_MAX_PIPES_MASK << rdev->config.evergreen.max_pipes)
937 & EVERGREEN_MAX_PIPES_MASK);
938 cc_gc_shader_pipe_config |=
939 INACTIVE_SIMDS((EVERGREEN_MAX_SIMDS_MASK << rdev->config.evergreen.max_simds)
940 & EVERGREEN_MAX_SIMDS_MASK);
942 cc_rb_backend_disable =
943 BACKEND_DISABLE((EVERGREEN_MAX_BACKENDS_MASK << rdev->config.evergreen.max_backends)
944 & EVERGREEN_MAX_BACKENDS_MASK);
947 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
948 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
950 switch (rdev->config.evergreen.max_tile_pipes) {
953 gb_addr_config |= NUM_PIPES(0);
956 gb_addr_config |= NUM_PIPES(1);
959 gb_addr_config |= NUM_PIPES(2);
962 gb_addr_config |= NUM_PIPES(3);
966 gb_addr_config |= PIPE_INTERLEAVE_SIZE((mc_arb_ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT);
967 gb_addr_config |= BANK_INTERLEAVE_SIZE(0);
968 gb_addr_config |= NUM_SHADER_ENGINES(rdev->config.evergreen.num_ses - 1);
969 gb_addr_config |= SHADER_ENGINE_TILE_SIZE(1);
970 gb_addr_config |= NUM_GPUS(0); /* Hemlock? */
971 gb_addr_config |= MULTI_GPU_TILE_SIZE(2);
973 if (((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT) > 2)
974 gb_addr_config |= ROW_SIZE(2);
976 gb_addr_config |= ROW_SIZE((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT);
978 if (rdev->ddev->pdev->device == 0x689e) {
981 u8 efuse_box_bit_131_124;
983 WREG32(RCU_IND_INDEX, 0x204);
984 efuse_straps_4 = RREG32(RCU_IND_DATA);
985 WREG32(RCU_IND_INDEX, 0x203);
986 efuse_straps_3 = RREG32(RCU_IND_DATA);
987 efuse_box_bit_131_124 = (u8)(((efuse_straps_4 & 0xf) << 4) | ((efuse_straps_3 & 0xf0000000) >> 28));
989 switch(efuse_box_bit_131_124) {
991 gb_backend_map = 0x76543210;
994 gb_backend_map = 0x77553311;
997 gb_backend_map = 0x77553300;
1000 gb_backend_map = 0x77552211;
1003 gb_backend_map = 0x77443300;
1006 gb_backend_map = 0x66552211;
1009 gb_backend_map = 0x77552200;
1012 gb_backend_map = 0x66442200;
1015 gb_backend_map = 0x66553311;
1018 DRM_ERROR("bad backend map, using default\n");
1020 evergreen_get_tile_pipe_to_backend_map(rdev,
1021 rdev->config.evergreen.max_tile_pipes,
1022 rdev->config.evergreen.max_backends,
1023 ((EVERGREEN_MAX_BACKENDS_MASK <<
1024 rdev->config.evergreen.max_backends) &
1025 EVERGREEN_MAX_BACKENDS_MASK));
1028 } else if (rdev->ddev->pdev->device == 0x68b9) {
1030 u8 efuse_box_bit_127_124;
1032 WREG32(RCU_IND_INDEX, 0x203);
1033 efuse_straps_3 = RREG32(RCU_IND_DATA);
1034 efuse_box_bit_127_124 = (u8)(efuse_straps_3 & 0xF0000000) >> 28;
1036 switch(efuse_box_bit_127_124) {
1038 gb_backend_map = 0x00003210;
1044 gb_backend_map = 0x00003311;
1047 DRM_ERROR("bad backend map, using default\n");
1049 evergreen_get_tile_pipe_to_backend_map(rdev,
1050 rdev->config.evergreen.max_tile_pipes,
1051 rdev->config.evergreen.max_backends,
1052 ((EVERGREEN_MAX_BACKENDS_MASK <<
1053 rdev->config.evergreen.max_backends) &
1054 EVERGREEN_MAX_BACKENDS_MASK));
1059 evergreen_get_tile_pipe_to_backend_map(rdev,
1060 rdev->config.evergreen.max_tile_pipes,
1061 rdev->config.evergreen.max_backends,
1062 ((EVERGREEN_MAX_BACKENDS_MASK <<
1063 rdev->config.evergreen.max_backends) &
1064 EVERGREEN_MAX_BACKENDS_MASK));
1066 WREG32(GB_BACKEND_MAP, gb_backend_map);
1067 WREG32(GB_ADDR_CONFIG, gb_addr_config);
1068 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
1069 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
1071 num_shader_engines = ((RREG32(GB_ADDR_CONFIG) & NUM_SHADER_ENGINES(3)) >> 12) + 1;
1072 grbm_gfx_index = INSTANCE_BROADCAST_WRITES;
1074 for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
1075 u32 rb = cc_rb_backend_disable | (0xf0 << 16);
1076 u32 sp = cc_gc_shader_pipe_config;
1077 u32 gfx = grbm_gfx_index | SE_INDEX(i);
1079 if (i == num_shader_engines) {
1080 rb |= BACKEND_DISABLE(EVERGREEN_MAX_BACKENDS_MASK);
1081 sp |= INACTIVE_SIMDS(EVERGREEN_MAX_SIMDS_MASK);
1084 WREG32(GRBM_GFX_INDEX, gfx);
1085 WREG32(RLC_GFX_INDEX, gfx);
1087 WREG32(CC_RB_BACKEND_DISABLE, rb);
1088 WREG32(CC_SYS_RB_BACKEND_DISABLE, rb);
1089 WREG32(GC_USER_RB_BACKEND_DISABLE, rb);
1090 WREG32(CC_GC_SHADER_PIPE_CONFIG, sp);
1093 grbm_gfx_index |= SE_BROADCAST_WRITES;
1094 WREG32(GRBM_GFX_INDEX, grbm_gfx_index);
1095 WREG32(RLC_GFX_INDEX, grbm_gfx_index);
1097 WREG32(CGTS_SYS_TCC_DISABLE, 0);
1098 WREG32(CGTS_TCC_DISABLE, 0);
1099 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
1100 WREG32(CGTS_USER_TCC_DISABLE, 0);
1102 /* set HW defaults for 3D engine */
1103 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
1104 ROQ_IB2_START(0x2b)));
1106 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
1108 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
1113 sx_debug_1 = RREG32(SX_DEBUG_1);
1114 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
1115 WREG32(SX_DEBUG_1, sx_debug_1);
1118 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
1119 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
1120 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
1121 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
1123 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
1124 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
1125 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
1127 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
1128 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
1129 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
1131 WREG32(VGT_NUM_INSTANCES, 1);
1132 WREG32(SPI_CONFIG_CNTL, 0);
1133 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
1134 WREG32(CP_PERFMON_CNTL, 0);
1136 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
1137 FETCH_FIFO_HIWATER(0x4) |
1138 DONE_FIFO_HIWATER(0xe0) |
1139 ALU_UPDATE_FIFO_HIWATER(0x8)));
1141 sq_config = RREG32(SQ_CONFIG);
1142 sq_config &= ~(PS_PRIO(3) |
1146 sq_config |= (VC_ENABLE |
1153 if (rdev->family == CHIP_CEDAR)
1154 /* no vertex cache */
1155 sq_config &= ~VC_ENABLE;
1157 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
1159 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
1160 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
1161 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
1162 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1163 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1164 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1165 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1167 if (rdev->family == CHIP_CEDAR)
1168 ps_thread_count = 96;
1170 ps_thread_count = 128;
1172 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
1173 sq_thread_resource_mgmt |= NUM_VS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1174 sq_thread_resource_mgmt |= NUM_GS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1175 sq_thread_resource_mgmt |= NUM_ES_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1176 sq_thread_resource_mgmt_2 = NUM_HS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1177 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1179 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1180 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1181 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1182 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1183 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1184 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1186 WREG32(SQ_CONFIG, sq_config);
1187 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
1188 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
1189 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
1190 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
1191 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
1192 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
1193 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
1194 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
1195 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
1196 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
1198 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
1199 FORCE_EOV_MAX_REZ_CNT(255)));
1201 if (rdev->family == CHIP_CEDAR)
1202 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
1204 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
1205 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
1206 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
1208 WREG32(VGT_GS_VERTEX_REUSE, 16);
1209 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
1211 WREG32(CB_PERF_CTR0_SEL_0, 0);
1212 WREG32(CB_PERF_CTR0_SEL_1, 0);
1213 WREG32(CB_PERF_CTR1_SEL_0, 0);
1214 WREG32(CB_PERF_CTR1_SEL_1, 0);
1215 WREG32(CB_PERF_CTR2_SEL_0, 0);
1216 WREG32(CB_PERF_CTR2_SEL_1, 0);
1217 WREG32(CB_PERF_CTR3_SEL_0, 0);
1218 WREG32(CB_PERF_CTR3_SEL_1, 0);
1220 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
1221 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
1223 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
1229 int evergreen_mc_init(struct radeon_device *rdev)
1232 int chansize, numchan;
1234 /* Get VRAM informations */
1235 rdev->mc.vram_is_ddr = true;
1236 tmp = RREG32(MC_ARB_RAMCFG);
1237 if (tmp & CHANSIZE_OVERRIDE) {
1239 } else if (tmp & CHANSIZE_MASK) {
1244 tmp = RREG32(MC_SHARED_CHMAP);
1245 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1260 rdev->mc.vram_width = numchan * chansize;
1261 /* Could aper size report 0 ? */
1262 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
1263 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
1264 /* Setup GPU memory space */
1265 /* size in MB on evergreen */
1266 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1267 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1268 rdev->mc.visible_vram_size = rdev->mc.aper_size;
1269 /* FIXME remove this once we support unmappable VRAM */
1270 if (rdev->mc.mc_vram_size > rdev->mc.aper_size) {
1271 rdev->mc.mc_vram_size = rdev->mc.aper_size;
1272 rdev->mc.real_vram_size = rdev->mc.aper_size;
1274 r600_vram_gtt_location(rdev, &rdev->mc);
1275 radeon_update_bandwidth_info(rdev);
1280 bool evergreen_gpu_is_lockup(struct radeon_device *rdev)
1282 /* FIXME: implement for evergreen */
1286 static int evergreen_gpu_soft_reset(struct radeon_device *rdev)
1288 struct evergreen_mc_save save;
1292 dev_info(rdev->dev, "GPU softreset \n");
1293 dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n",
1294 RREG32(GRBM_STATUS));
1295 dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n",
1296 RREG32(GRBM_STATUS_SE0));
1297 dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n",
1298 RREG32(GRBM_STATUS_SE1));
1299 dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n",
1300 RREG32(SRBM_STATUS));
1301 evergreen_mc_stop(rdev, &save);
1302 if (evergreen_mc_wait_for_idle(rdev)) {
1303 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1305 /* Disable CP parsing/prefetching */
1306 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
1308 /* reset all the gfx blocks */
1309 grbm_reset = (SOFT_RESET_CP |
1322 dev_info(rdev->dev, " GRBM_SOFT_RESET=0x%08X\n", grbm_reset);
1323 WREG32(GRBM_SOFT_RESET, grbm_reset);
1324 (void)RREG32(GRBM_SOFT_RESET);
1326 WREG32(GRBM_SOFT_RESET, 0);
1327 (void)RREG32(GRBM_SOFT_RESET);
1329 /* reset all the system blocks */
1330 srbm_reset = SRBM_SOFT_RESET_ALL_MASK;
1332 dev_info(rdev->dev, " SRBM_SOFT_RESET=0x%08X\n", srbm_reset);
1333 WREG32(SRBM_SOFT_RESET, srbm_reset);
1334 (void)RREG32(SRBM_SOFT_RESET);
1336 WREG32(SRBM_SOFT_RESET, 0);
1337 (void)RREG32(SRBM_SOFT_RESET);
1338 /* Wait a little for things to settle down */
1340 dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n",
1341 RREG32(GRBM_STATUS));
1342 dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n",
1343 RREG32(GRBM_STATUS_SE0));
1344 dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n",
1345 RREG32(GRBM_STATUS_SE1));
1346 dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n",
1347 RREG32(SRBM_STATUS));
1348 /* After reset we need to reinit the asic as GPU often endup in an
1351 atom_asic_init(rdev->mode_info.atom_context);
1352 evergreen_mc_resume(rdev, &save);
1356 int evergreen_asic_reset(struct radeon_device *rdev)
1358 return evergreen_gpu_soft_reset(rdev);
1363 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
1367 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC0_REGISTER_OFFSET);
1369 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC1_REGISTER_OFFSET);
1371 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC2_REGISTER_OFFSET);
1373 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC3_REGISTER_OFFSET);
1375 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC4_REGISTER_OFFSET);
1377 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC5_REGISTER_OFFSET);
1383 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
1387 WREG32(CP_INT_CNTL, 0);
1388 WREG32(GRBM_INT_CNTL, 0);
1389 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1390 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
1391 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1392 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1393 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1394 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1396 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1397 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
1398 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1399 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1400 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1401 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1403 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
1404 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
1406 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1407 WREG32(DC_HPD1_INT_CONTROL, tmp);
1408 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1409 WREG32(DC_HPD2_INT_CONTROL, tmp);
1410 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1411 WREG32(DC_HPD3_INT_CONTROL, tmp);
1412 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1413 WREG32(DC_HPD4_INT_CONTROL, tmp);
1414 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1415 WREG32(DC_HPD5_INT_CONTROL, tmp);
1416 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1417 WREG32(DC_HPD6_INT_CONTROL, tmp);
1421 int evergreen_irq_set(struct radeon_device *rdev)
1423 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
1424 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
1425 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
1427 if (!rdev->irq.installed) {
1428 WARN(1, "Can't enable IRQ/MSI because no handler is installed.\n");
1431 /* don't enable anything if the ih is disabled */
1432 if (!rdev->ih.enabled) {
1433 r600_disable_interrupts(rdev);
1434 /* force the active interrupt state to all disabled */
1435 evergreen_disable_interrupt_state(rdev);
1439 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
1440 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
1441 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
1442 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
1443 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
1444 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
1446 if (rdev->irq.sw_int) {
1447 DRM_DEBUG("evergreen_irq_set: sw int\n");
1448 cp_int_cntl |= RB_INT_ENABLE;
1450 if (rdev->irq.crtc_vblank_int[0]) {
1451 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
1452 crtc1 |= VBLANK_INT_MASK;
1454 if (rdev->irq.crtc_vblank_int[1]) {
1455 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
1456 crtc2 |= VBLANK_INT_MASK;
1458 if (rdev->irq.crtc_vblank_int[2]) {
1459 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
1460 crtc3 |= VBLANK_INT_MASK;
1462 if (rdev->irq.crtc_vblank_int[3]) {
1463 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
1464 crtc4 |= VBLANK_INT_MASK;
1466 if (rdev->irq.crtc_vblank_int[4]) {
1467 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
1468 crtc5 |= VBLANK_INT_MASK;
1470 if (rdev->irq.crtc_vblank_int[5]) {
1471 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
1472 crtc6 |= VBLANK_INT_MASK;
1474 if (rdev->irq.hpd[0]) {
1475 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
1476 hpd1 |= DC_HPDx_INT_EN;
1478 if (rdev->irq.hpd[1]) {
1479 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
1480 hpd2 |= DC_HPDx_INT_EN;
1482 if (rdev->irq.hpd[2]) {
1483 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
1484 hpd3 |= DC_HPDx_INT_EN;
1486 if (rdev->irq.hpd[3]) {
1487 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
1488 hpd4 |= DC_HPDx_INT_EN;
1490 if (rdev->irq.hpd[4]) {
1491 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
1492 hpd5 |= DC_HPDx_INT_EN;
1494 if (rdev->irq.hpd[5]) {
1495 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
1496 hpd6 |= DC_HPDx_INT_EN;
1499 WREG32(CP_INT_CNTL, cp_int_cntl);
1501 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
1502 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
1503 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
1504 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
1505 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
1506 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
1508 WREG32(DC_HPD1_INT_CONTROL, hpd1);
1509 WREG32(DC_HPD2_INT_CONTROL, hpd2);
1510 WREG32(DC_HPD3_INT_CONTROL, hpd3);
1511 WREG32(DC_HPD4_INT_CONTROL, hpd4);
1512 WREG32(DC_HPD5_INT_CONTROL, hpd5);
1513 WREG32(DC_HPD6_INT_CONTROL, hpd6);
1518 static inline void evergreen_irq_ack(struct radeon_device *rdev,
1521 u32 *disp_int_cont2,
1522 u32 *disp_int_cont3,
1523 u32 *disp_int_cont4,
1524 u32 *disp_int_cont5)
1528 *disp_int = RREG32(DISP_INTERRUPT_STATUS);
1529 *disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
1530 *disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
1531 *disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
1532 *disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
1533 *disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
1535 if (*disp_int & LB_D1_VBLANK_INTERRUPT)
1536 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
1537 if (*disp_int & LB_D1_VLINE_INTERRUPT)
1538 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
1540 if (*disp_int_cont & LB_D2_VBLANK_INTERRUPT)
1541 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
1542 if (*disp_int_cont & LB_D2_VLINE_INTERRUPT)
1543 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
1545 if (*disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
1546 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
1547 if (*disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
1548 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
1550 if (*disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
1551 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
1552 if (*disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
1553 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
1555 if (*disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
1556 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
1557 if (*disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
1558 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
1560 if (*disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
1561 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
1562 if (*disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
1563 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
1565 if (*disp_int & DC_HPD1_INTERRUPT) {
1566 tmp = RREG32(DC_HPD1_INT_CONTROL);
1567 tmp |= DC_HPDx_INT_ACK;
1568 WREG32(DC_HPD1_INT_CONTROL, tmp);
1570 if (*disp_int_cont & DC_HPD2_INTERRUPT) {
1571 tmp = RREG32(DC_HPD2_INT_CONTROL);
1572 tmp |= DC_HPDx_INT_ACK;
1573 WREG32(DC_HPD2_INT_CONTROL, tmp);
1575 if (*disp_int_cont2 & DC_HPD3_INTERRUPT) {
1576 tmp = RREG32(DC_HPD3_INT_CONTROL);
1577 tmp |= DC_HPDx_INT_ACK;
1578 WREG32(DC_HPD3_INT_CONTROL, tmp);
1580 if (*disp_int_cont3 & DC_HPD4_INTERRUPT) {
1581 tmp = RREG32(DC_HPD4_INT_CONTROL);
1582 tmp |= DC_HPDx_INT_ACK;
1583 WREG32(DC_HPD4_INT_CONTROL, tmp);
1585 if (*disp_int_cont4 & DC_HPD5_INTERRUPT) {
1586 tmp = RREG32(DC_HPD5_INT_CONTROL);
1587 tmp |= DC_HPDx_INT_ACK;
1588 WREG32(DC_HPD5_INT_CONTROL, tmp);
1590 if (*disp_int_cont5 & DC_HPD6_INTERRUPT) {
1591 tmp = RREG32(DC_HPD5_INT_CONTROL);
1592 tmp |= DC_HPDx_INT_ACK;
1593 WREG32(DC_HPD6_INT_CONTROL, tmp);
1597 void evergreen_irq_disable(struct radeon_device *rdev)
1599 u32 disp_int, disp_int_cont, disp_int_cont2;
1600 u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
1602 r600_disable_interrupts(rdev);
1603 /* Wait and acknowledge irq */
1605 evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
1606 &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
1607 evergreen_disable_interrupt_state(rdev);
1610 static void evergreen_irq_suspend(struct radeon_device *rdev)
1612 evergreen_irq_disable(rdev);
1613 r600_rlc_stop(rdev);
1616 static inline u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
1620 /* XXX use writeback */
1621 wptr = RREG32(IH_RB_WPTR);
1623 if (wptr & RB_OVERFLOW) {
1624 /* When a ring buffer overflow happen start parsing interrupt
1625 * from the last not overwritten vector (wptr + 16). Hopefully
1626 * this should allow us to catchup.
1628 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
1629 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
1630 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
1631 tmp = RREG32(IH_RB_CNTL);
1632 tmp |= IH_WPTR_OVERFLOW_CLEAR;
1633 WREG32(IH_RB_CNTL, tmp);
1635 return (wptr & rdev->ih.ptr_mask);
1638 int evergreen_irq_process(struct radeon_device *rdev)
1640 u32 wptr = evergreen_get_ih_wptr(rdev);
1641 u32 rptr = rdev->ih.rptr;
1642 u32 src_id, src_data;
1644 u32 disp_int, disp_int_cont, disp_int_cont2;
1645 u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
1646 unsigned long flags;
1647 bool queue_hotplug = false;
1649 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
1650 if (!rdev->ih.enabled)
1653 spin_lock_irqsave(&rdev->ih.lock, flags);
1656 spin_unlock_irqrestore(&rdev->ih.lock, flags);
1659 if (rdev->shutdown) {
1660 spin_unlock_irqrestore(&rdev->ih.lock, flags);
1665 /* display interrupts */
1666 evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
1667 &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
1669 rdev->ih.wptr = wptr;
1670 while (rptr != wptr) {
1671 /* wptr/rptr are in bytes! */
1672 ring_index = rptr / 4;
1673 src_id = rdev->ih.ring[ring_index] & 0xff;
1674 src_data = rdev->ih.ring[ring_index + 1] & 0xfffffff;
1677 case 1: /* D1 vblank/vline */
1679 case 0: /* D1 vblank */
1680 if (disp_int & LB_D1_VBLANK_INTERRUPT) {
1681 drm_handle_vblank(rdev->ddev, 0);
1682 wake_up(&rdev->irq.vblank_queue);
1683 disp_int &= ~LB_D1_VBLANK_INTERRUPT;
1684 DRM_DEBUG("IH: D1 vblank\n");
1687 case 1: /* D1 vline */
1688 if (disp_int & LB_D1_VLINE_INTERRUPT) {
1689 disp_int &= ~LB_D1_VLINE_INTERRUPT;
1690 DRM_DEBUG("IH: D1 vline\n");
1694 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1698 case 2: /* D2 vblank/vline */
1700 case 0: /* D2 vblank */
1701 if (disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
1702 drm_handle_vblank(rdev->ddev, 1);
1703 wake_up(&rdev->irq.vblank_queue);
1704 disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
1705 DRM_DEBUG("IH: D2 vblank\n");
1708 case 1: /* D2 vline */
1709 if (disp_int_cont & LB_D2_VLINE_INTERRUPT) {
1710 disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
1711 DRM_DEBUG("IH: D2 vline\n");
1715 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1719 case 3: /* D3 vblank/vline */
1721 case 0: /* D3 vblank */
1722 if (disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
1723 drm_handle_vblank(rdev->ddev, 2);
1724 wake_up(&rdev->irq.vblank_queue);
1725 disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
1726 DRM_DEBUG("IH: D3 vblank\n");
1729 case 1: /* D3 vline */
1730 if (disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
1731 disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
1732 DRM_DEBUG("IH: D3 vline\n");
1736 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1740 case 4: /* D4 vblank/vline */
1742 case 0: /* D4 vblank */
1743 if (disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
1744 drm_handle_vblank(rdev->ddev, 3);
1745 wake_up(&rdev->irq.vblank_queue);
1746 disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
1747 DRM_DEBUG("IH: D4 vblank\n");
1750 case 1: /* D4 vline */
1751 if (disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
1752 disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
1753 DRM_DEBUG("IH: D4 vline\n");
1757 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1761 case 5: /* D5 vblank/vline */
1763 case 0: /* D5 vblank */
1764 if (disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
1765 drm_handle_vblank(rdev->ddev, 4);
1766 wake_up(&rdev->irq.vblank_queue);
1767 disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
1768 DRM_DEBUG("IH: D5 vblank\n");
1771 case 1: /* D5 vline */
1772 if (disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
1773 disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
1774 DRM_DEBUG("IH: D5 vline\n");
1778 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1782 case 6: /* D6 vblank/vline */
1784 case 0: /* D6 vblank */
1785 if (disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
1786 drm_handle_vblank(rdev->ddev, 5);
1787 wake_up(&rdev->irq.vblank_queue);
1788 disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
1789 DRM_DEBUG("IH: D6 vblank\n");
1792 case 1: /* D6 vline */
1793 if (disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
1794 disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
1795 DRM_DEBUG("IH: D6 vline\n");
1799 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1803 case 42: /* HPD hotplug */
1806 if (disp_int & DC_HPD1_INTERRUPT) {
1807 disp_int &= ~DC_HPD1_INTERRUPT;
1808 queue_hotplug = true;
1809 DRM_DEBUG("IH: HPD1\n");
1813 if (disp_int_cont & DC_HPD2_INTERRUPT) {
1814 disp_int_cont &= ~DC_HPD2_INTERRUPT;
1815 queue_hotplug = true;
1816 DRM_DEBUG("IH: HPD2\n");
1820 if (disp_int_cont2 & DC_HPD3_INTERRUPT) {
1821 disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
1822 queue_hotplug = true;
1823 DRM_DEBUG("IH: HPD3\n");
1827 if (disp_int_cont3 & DC_HPD4_INTERRUPT) {
1828 disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
1829 queue_hotplug = true;
1830 DRM_DEBUG("IH: HPD4\n");
1834 if (disp_int_cont4 & DC_HPD5_INTERRUPT) {
1835 disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
1836 queue_hotplug = true;
1837 DRM_DEBUG("IH: HPD5\n");
1841 if (disp_int_cont5 & DC_HPD6_INTERRUPT) {
1842 disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
1843 queue_hotplug = true;
1844 DRM_DEBUG("IH: HPD6\n");
1848 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1852 case 176: /* CP_INT in ring buffer */
1853 case 177: /* CP_INT in IB1 */
1854 case 178: /* CP_INT in IB2 */
1855 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
1856 radeon_fence_process(rdev);
1858 case 181: /* CP EOP event */
1859 DRM_DEBUG("IH: CP EOP\n");
1862 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1866 /* wptr/rptr are in bytes! */
1868 rptr &= rdev->ih.ptr_mask;
1870 /* make sure wptr hasn't changed while processing */
1871 wptr = evergreen_get_ih_wptr(rdev);
1872 if (wptr != rdev->ih.wptr)
1875 queue_work(rdev->wq, &rdev->hotplug_work);
1876 rdev->ih.rptr = rptr;
1877 WREG32(IH_RB_RPTR, rdev->ih.rptr);
1878 spin_unlock_irqrestore(&rdev->ih.lock, flags);
1882 static int evergreen_startup(struct radeon_device *rdev)
1886 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
1887 r = r600_init_microcode(rdev);
1889 DRM_ERROR("Failed to load firmware!\n");
1894 evergreen_mc_program(rdev);
1895 if (rdev->flags & RADEON_IS_AGP) {
1896 evergreen_agp_enable(rdev);
1898 r = evergreen_pcie_gart_enable(rdev);
1902 evergreen_gpu_init(rdev);
1904 if (!rdev->r600_blit.shader_obj) {
1905 r = r600_blit_init(rdev);
1907 DRM_ERROR("radeon: failed blitter (%d).\n", r);
1912 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
1913 if (unlikely(r != 0))
1915 r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
1916 &rdev->r600_blit.shader_gpu_addr);
1917 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
1919 DRM_ERROR("failed to pin blit object %d\n", r);
1925 r = r600_irq_init(rdev);
1927 DRM_ERROR("radeon: IH init failed (%d).\n", r);
1928 radeon_irq_kms_fini(rdev);
1931 evergreen_irq_set(rdev);
1933 r = radeon_ring_init(rdev, rdev->cp.ring_size);
1936 r = evergreen_cp_load_microcode(rdev);
1939 r = evergreen_cp_resume(rdev);
1942 /* write back buffer are not vital so don't worry about failure */
1943 r600_wb_enable(rdev);
1948 int evergreen_resume(struct radeon_device *rdev)
1952 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
1953 * posting will perform necessary task to bring back GPU into good
1957 atom_asic_init(rdev->mode_info.atom_context);
1958 /* Initialize clocks */
1959 r = radeon_clocks_init(rdev);
1964 r = evergreen_startup(rdev);
1966 DRM_ERROR("r600 startup failed on resume\n");
1970 r = r600_ib_test(rdev);
1972 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
1980 int evergreen_suspend(struct radeon_device *rdev)
1985 /* FIXME: we should wait for ring to be empty */
1987 rdev->cp.ready = false;
1988 evergreen_irq_suspend(rdev);
1989 r600_wb_disable(rdev);
1990 evergreen_pcie_gart_disable(rdev);
1992 /* unpin shaders bo */
1993 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
1994 if (likely(r == 0)) {
1995 radeon_bo_unpin(rdev->r600_blit.shader_obj);
1996 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
2002 static bool evergreen_card_posted(struct radeon_device *rdev)
2006 /* first check CRTCs */
2007 reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
2008 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
2009 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
2010 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
2011 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
2012 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
2013 if (reg & EVERGREEN_CRTC_MASTER_EN)
2016 /* then check MEM_SIZE, in case the crtcs are off */
2017 if (RREG32(CONFIG_MEMSIZE))
2023 /* Plan is to move initialization in that function and use
2024 * helper function so that radeon_device_init pretty much
2025 * do nothing more than calling asic specific function. This
2026 * should also allow to remove a bunch of callback function
2029 int evergreen_init(struct radeon_device *rdev)
2033 r = radeon_dummy_page_init(rdev);
2036 /* This don't do much */
2037 r = radeon_gem_init(rdev);
2041 if (!radeon_get_bios(rdev)) {
2042 if (ASIC_IS_AVIVO(rdev))
2045 /* Must be an ATOMBIOS */
2046 if (!rdev->is_atom_bios) {
2047 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
2050 r = radeon_atombios_init(rdev);
2053 /* Post card if necessary */
2054 if (!evergreen_card_posted(rdev)) {
2056 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
2059 DRM_INFO("GPU not posted. posting now...\n");
2060 atom_asic_init(rdev->mode_info.atom_context);
2062 /* Initialize scratch registers */
2063 r600_scratch_init(rdev);
2064 /* Initialize surface registers */
2065 radeon_surface_init(rdev);
2066 /* Initialize clocks */
2067 radeon_get_clock_info(rdev->ddev);
2068 r = radeon_clocks_init(rdev);
2071 /* Initialize power management */
2072 radeon_pm_init(rdev);
2074 r = radeon_fence_driver_init(rdev);
2077 /* initialize AGP */
2078 if (rdev->flags & RADEON_IS_AGP) {
2079 r = radeon_agp_init(rdev);
2081 radeon_agp_disable(rdev);
2083 /* initialize memory controller */
2084 r = evergreen_mc_init(rdev);
2087 /* Memory manager */
2088 r = radeon_bo_init(rdev);
2092 r = radeon_irq_kms_init(rdev);
2096 rdev->cp.ring_obj = NULL;
2097 r600_ring_init(rdev, 1024 * 1024);
2099 rdev->ih.ring_obj = NULL;
2100 r600_ih_ring_init(rdev, 64 * 1024);
2102 r = r600_pcie_gart_init(rdev);
2106 rdev->accel_working = false;
2107 r = evergreen_startup(rdev);
2109 dev_err(rdev->dev, "disabling GPU acceleration\n");
2112 r600_irq_fini(rdev);
2113 radeon_irq_kms_fini(rdev);
2114 evergreen_pcie_gart_fini(rdev);
2115 rdev->accel_working = false;
2117 if (rdev->accel_working) {
2118 r = radeon_ib_pool_init(rdev);
2120 DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
2121 rdev->accel_working = false;
2123 r = r600_ib_test(rdev);
2125 DRM_ERROR("radeon: failed testing IB (%d).\n", r);
2126 rdev->accel_working = false;
2132 void evergreen_fini(struct radeon_device *rdev)
2134 radeon_pm_fini(rdev);
2135 /*r600_blit_fini(rdev);*/
2138 r600_irq_fini(rdev);
2139 radeon_irq_kms_fini(rdev);
2140 evergreen_pcie_gart_fini(rdev);
2141 radeon_gem_fini(rdev);
2142 radeon_fence_driver_fini(rdev);
2143 radeon_clocks_fini(rdev);
2144 radeon_agp_fini(rdev);
2145 radeon_bo_fini(rdev);
2146 radeon_atombios_fini(rdev);
2149 radeon_dummy_page_fini(rdev);