Merge branch 'i2c-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/jdelvar...
[pandora-kernel.git] / drivers / gpu / drm / radeon / evergreen.c
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include <linux/slab.h>
27 #include "drmP.h"
28 #include "radeon.h"
29 #include "radeon_asic.h"
30 #include "radeon_drm.h"
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35
36 #define EVERGREEN_PFP_UCODE_SIZE 1120
37 #define EVERGREEN_PM4_UCODE_SIZE 1376
38
39 static void evergreen_gpu_init(struct radeon_device *rdev);
40 void evergreen_fini(struct radeon_device *rdev);
41
42 void evergreen_pm_misc(struct radeon_device *rdev)
43 {
44         int requested_index = rdev->pm.requested_power_state_index;
45         struct radeon_power_state *ps = &rdev->pm.power_state[requested_index];
46         struct radeon_voltage *voltage = &ps->clock_info[0].voltage;
47
48         if ((voltage->type == VOLTAGE_SW) && voltage->voltage)
49                 radeon_atom_set_voltage(rdev, voltage->voltage);
50 }
51
52 void evergreen_pm_prepare(struct radeon_device *rdev)
53 {
54         struct drm_device *ddev = rdev->ddev;
55         struct drm_crtc *crtc;
56         struct radeon_crtc *radeon_crtc;
57         u32 tmp;
58
59         /* disable any active CRTCs */
60         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
61                 radeon_crtc = to_radeon_crtc(crtc);
62                 if (radeon_crtc->enabled) {
63                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
64                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
65                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
66                 }
67         }
68 }
69
70 void evergreen_pm_finish(struct radeon_device *rdev)
71 {
72         struct drm_device *ddev = rdev->ddev;
73         struct drm_crtc *crtc;
74         struct radeon_crtc *radeon_crtc;
75         u32 tmp;
76
77         /* enable any active CRTCs */
78         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
79                 radeon_crtc = to_radeon_crtc(crtc);
80                 if (radeon_crtc->enabled) {
81                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
82                         tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
83                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
84                 }
85         }
86 }
87
88 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
89 {
90         bool connected = false;
91
92         switch (hpd) {
93         case RADEON_HPD_1:
94                 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
95                         connected = true;
96                 break;
97         case RADEON_HPD_2:
98                 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
99                         connected = true;
100                 break;
101         case RADEON_HPD_3:
102                 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
103                         connected = true;
104                 break;
105         case RADEON_HPD_4:
106                 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
107                         connected = true;
108                 break;
109         case RADEON_HPD_5:
110                 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
111                         connected = true;
112                 break;
113         case RADEON_HPD_6:
114                 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
115                         connected = true;
116                         break;
117         default:
118                 break;
119         }
120
121         return connected;
122 }
123
124 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
125                                 enum radeon_hpd_id hpd)
126 {
127         u32 tmp;
128         bool connected = evergreen_hpd_sense(rdev, hpd);
129
130         switch (hpd) {
131         case RADEON_HPD_1:
132                 tmp = RREG32(DC_HPD1_INT_CONTROL);
133                 if (connected)
134                         tmp &= ~DC_HPDx_INT_POLARITY;
135                 else
136                         tmp |= DC_HPDx_INT_POLARITY;
137                 WREG32(DC_HPD1_INT_CONTROL, tmp);
138                 break;
139         case RADEON_HPD_2:
140                 tmp = RREG32(DC_HPD2_INT_CONTROL);
141                 if (connected)
142                         tmp &= ~DC_HPDx_INT_POLARITY;
143                 else
144                         tmp |= DC_HPDx_INT_POLARITY;
145                 WREG32(DC_HPD2_INT_CONTROL, tmp);
146                 break;
147         case RADEON_HPD_3:
148                 tmp = RREG32(DC_HPD3_INT_CONTROL);
149                 if (connected)
150                         tmp &= ~DC_HPDx_INT_POLARITY;
151                 else
152                         tmp |= DC_HPDx_INT_POLARITY;
153                 WREG32(DC_HPD3_INT_CONTROL, tmp);
154                 break;
155         case RADEON_HPD_4:
156                 tmp = RREG32(DC_HPD4_INT_CONTROL);
157                 if (connected)
158                         tmp &= ~DC_HPDx_INT_POLARITY;
159                 else
160                         tmp |= DC_HPDx_INT_POLARITY;
161                 WREG32(DC_HPD4_INT_CONTROL, tmp);
162                 break;
163         case RADEON_HPD_5:
164                 tmp = RREG32(DC_HPD5_INT_CONTROL);
165                 if (connected)
166                         tmp &= ~DC_HPDx_INT_POLARITY;
167                 else
168                         tmp |= DC_HPDx_INT_POLARITY;
169                 WREG32(DC_HPD5_INT_CONTROL, tmp);
170                         break;
171         case RADEON_HPD_6:
172                 tmp = RREG32(DC_HPD6_INT_CONTROL);
173                 if (connected)
174                         tmp &= ~DC_HPDx_INT_POLARITY;
175                 else
176                         tmp |= DC_HPDx_INT_POLARITY;
177                 WREG32(DC_HPD6_INT_CONTROL, tmp);
178                 break;
179         default:
180                 break;
181         }
182 }
183
184 void evergreen_hpd_init(struct radeon_device *rdev)
185 {
186         struct drm_device *dev = rdev->ddev;
187         struct drm_connector *connector;
188         u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
189                 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
190
191         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
192                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
193                 switch (radeon_connector->hpd.hpd) {
194                 case RADEON_HPD_1:
195                         WREG32(DC_HPD1_CONTROL, tmp);
196                         rdev->irq.hpd[0] = true;
197                         break;
198                 case RADEON_HPD_2:
199                         WREG32(DC_HPD2_CONTROL, tmp);
200                         rdev->irq.hpd[1] = true;
201                         break;
202                 case RADEON_HPD_3:
203                         WREG32(DC_HPD3_CONTROL, tmp);
204                         rdev->irq.hpd[2] = true;
205                         break;
206                 case RADEON_HPD_4:
207                         WREG32(DC_HPD4_CONTROL, tmp);
208                         rdev->irq.hpd[3] = true;
209                         break;
210                 case RADEON_HPD_5:
211                         WREG32(DC_HPD5_CONTROL, tmp);
212                         rdev->irq.hpd[4] = true;
213                         break;
214                 case RADEON_HPD_6:
215                         WREG32(DC_HPD6_CONTROL, tmp);
216                         rdev->irq.hpd[5] = true;
217                         break;
218                 default:
219                         break;
220                 }
221         }
222         if (rdev->irq.installed)
223                 evergreen_irq_set(rdev);
224 }
225
226 void evergreen_hpd_fini(struct radeon_device *rdev)
227 {
228         struct drm_device *dev = rdev->ddev;
229         struct drm_connector *connector;
230
231         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
232                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
233                 switch (radeon_connector->hpd.hpd) {
234                 case RADEON_HPD_1:
235                         WREG32(DC_HPD1_CONTROL, 0);
236                         rdev->irq.hpd[0] = false;
237                         break;
238                 case RADEON_HPD_2:
239                         WREG32(DC_HPD2_CONTROL, 0);
240                         rdev->irq.hpd[1] = false;
241                         break;
242                 case RADEON_HPD_3:
243                         WREG32(DC_HPD3_CONTROL, 0);
244                         rdev->irq.hpd[2] = false;
245                         break;
246                 case RADEON_HPD_4:
247                         WREG32(DC_HPD4_CONTROL, 0);
248                         rdev->irq.hpd[3] = false;
249                         break;
250                 case RADEON_HPD_5:
251                         WREG32(DC_HPD5_CONTROL, 0);
252                         rdev->irq.hpd[4] = false;
253                         break;
254                 case RADEON_HPD_6:
255                         WREG32(DC_HPD6_CONTROL, 0);
256                         rdev->irq.hpd[5] = false;
257                         break;
258                 default:
259                         break;
260                 }
261         }
262 }
263
264 void evergreen_bandwidth_update(struct radeon_device *rdev)
265 {
266         /* XXX */
267 }
268
269 static int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
270 {
271         unsigned i;
272         u32 tmp;
273
274         for (i = 0; i < rdev->usec_timeout; i++) {
275                 /* read MC_STATUS */
276                 tmp = RREG32(SRBM_STATUS) & 0x1F00;
277                 if (!tmp)
278                         return 0;
279                 udelay(1);
280         }
281         return -1;
282 }
283
284 /*
285  * GART
286  */
287 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
288 {
289         unsigned i;
290         u32 tmp;
291
292         WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
293         for (i = 0; i < rdev->usec_timeout; i++) {
294                 /* read MC_STATUS */
295                 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
296                 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
297                 if (tmp == 2) {
298                         printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
299                         return;
300                 }
301                 if (tmp) {
302                         return;
303                 }
304                 udelay(1);
305         }
306 }
307
308 int evergreen_pcie_gart_enable(struct radeon_device *rdev)
309 {
310         u32 tmp;
311         int r;
312
313         if (rdev->gart.table.vram.robj == NULL) {
314                 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
315                 return -EINVAL;
316         }
317         r = radeon_gart_table_vram_pin(rdev);
318         if (r)
319                 return r;
320         radeon_gart_restore(rdev);
321         /* Setup L2 cache */
322         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
323                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
324                                 EFFECTIVE_L2_QUEUE_SIZE(7));
325         WREG32(VM_L2_CNTL2, 0);
326         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
327         /* Setup TLB control */
328         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
329                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
330                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
331                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
332         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
333         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
334         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
335         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
336         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
337         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
338         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
339         WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
340         WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
341         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
342         WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
343                                 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
344         WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
345                         (u32)(rdev->dummy_page.addr >> 12));
346         WREG32(VM_CONTEXT1_CNTL, 0);
347
348         evergreen_pcie_gart_tlb_flush(rdev);
349         rdev->gart.ready = true;
350         return 0;
351 }
352
353 void evergreen_pcie_gart_disable(struct radeon_device *rdev)
354 {
355         u32 tmp;
356         int r;
357
358         /* Disable all tables */
359         WREG32(VM_CONTEXT0_CNTL, 0);
360         WREG32(VM_CONTEXT1_CNTL, 0);
361
362         /* Setup L2 cache */
363         WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
364                                 EFFECTIVE_L2_QUEUE_SIZE(7));
365         WREG32(VM_L2_CNTL2, 0);
366         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
367         /* Setup TLB control */
368         tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
369         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
370         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
371         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
372         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
373         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
374         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
375         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
376         if (rdev->gart.table.vram.robj) {
377                 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
378                 if (likely(r == 0)) {
379                         radeon_bo_kunmap(rdev->gart.table.vram.robj);
380                         radeon_bo_unpin(rdev->gart.table.vram.robj);
381                         radeon_bo_unreserve(rdev->gart.table.vram.robj);
382                 }
383         }
384 }
385
386 void evergreen_pcie_gart_fini(struct radeon_device *rdev)
387 {
388         evergreen_pcie_gart_disable(rdev);
389         radeon_gart_table_vram_free(rdev);
390         radeon_gart_fini(rdev);
391 }
392
393
394 void evergreen_agp_enable(struct radeon_device *rdev)
395 {
396         u32 tmp;
397
398         /* Setup L2 cache */
399         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
400                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
401                                 EFFECTIVE_L2_QUEUE_SIZE(7));
402         WREG32(VM_L2_CNTL2, 0);
403         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
404         /* Setup TLB control */
405         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
406                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
407                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
408                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
409         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
410         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
411         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
412         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
413         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
414         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
415         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
416         WREG32(VM_CONTEXT0_CNTL, 0);
417         WREG32(VM_CONTEXT1_CNTL, 0);
418 }
419
420 static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
421 {
422         save->vga_control[0] = RREG32(D1VGA_CONTROL);
423         save->vga_control[1] = RREG32(D2VGA_CONTROL);
424         save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
425         save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
426         save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
427         save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
428         save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
429         save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
430         save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
431         save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
432         save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
433         save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
434         save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
435         save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
436
437         /* Stop all video */
438         WREG32(VGA_RENDER_CONTROL, 0);
439         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
440         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
441         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
442         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
443         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
444         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
445         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
446         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
447         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
448         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
449         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
450         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
451         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
452         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
453         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
454         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
455         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
456         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
457
458         WREG32(D1VGA_CONTROL, 0);
459         WREG32(D2VGA_CONTROL, 0);
460         WREG32(EVERGREEN_D3VGA_CONTROL, 0);
461         WREG32(EVERGREEN_D4VGA_CONTROL, 0);
462         WREG32(EVERGREEN_D5VGA_CONTROL, 0);
463         WREG32(EVERGREEN_D6VGA_CONTROL, 0);
464 }
465
466 static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
467 {
468         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
469                upper_32_bits(rdev->mc.vram_start));
470         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
471                upper_32_bits(rdev->mc.vram_start));
472         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
473                (u32)rdev->mc.vram_start);
474         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
475                (u32)rdev->mc.vram_start);
476
477         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
478                upper_32_bits(rdev->mc.vram_start));
479         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
480                upper_32_bits(rdev->mc.vram_start));
481         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
482                (u32)rdev->mc.vram_start);
483         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
484                (u32)rdev->mc.vram_start);
485
486         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
487                upper_32_bits(rdev->mc.vram_start));
488         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
489                upper_32_bits(rdev->mc.vram_start));
490         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
491                (u32)rdev->mc.vram_start);
492         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
493                (u32)rdev->mc.vram_start);
494
495         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
496                upper_32_bits(rdev->mc.vram_start));
497         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
498                upper_32_bits(rdev->mc.vram_start));
499         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
500                (u32)rdev->mc.vram_start);
501         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
502                (u32)rdev->mc.vram_start);
503
504         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
505                upper_32_bits(rdev->mc.vram_start));
506         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
507                upper_32_bits(rdev->mc.vram_start));
508         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
509                (u32)rdev->mc.vram_start);
510         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
511                (u32)rdev->mc.vram_start);
512
513         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
514                upper_32_bits(rdev->mc.vram_start));
515         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
516                upper_32_bits(rdev->mc.vram_start));
517         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
518                (u32)rdev->mc.vram_start);
519         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
520                (u32)rdev->mc.vram_start);
521
522         WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
523         WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
524         /* Unlock host access */
525         WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
526         mdelay(1);
527         /* Restore video state */
528         WREG32(D1VGA_CONTROL, save->vga_control[0]);
529         WREG32(D2VGA_CONTROL, save->vga_control[1]);
530         WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
531         WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
532         WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
533         WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
534         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
535         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
536         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
537         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
538         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
539         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
540         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
541         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
542         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
543         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
544         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
545         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
546         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
547         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
548         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
549         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
550         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
551         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
552         WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
553 }
554
555 static void evergreen_mc_program(struct radeon_device *rdev)
556 {
557         struct evergreen_mc_save save;
558         u32 tmp;
559         int i, j;
560
561         /* Initialize HDP */
562         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
563                 WREG32((0x2c14 + j), 0x00000000);
564                 WREG32((0x2c18 + j), 0x00000000);
565                 WREG32((0x2c1c + j), 0x00000000);
566                 WREG32((0x2c20 + j), 0x00000000);
567                 WREG32((0x2c24 + j), 0x00000000);
568         }
569         WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
570
571         evergreen_mc_stop(rdev, &save);
572         if (evergreen_mc_wait_for_idle(rdev)) {
573                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
574         }
575         /* Lockout access through VGA aperture*/
576         WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
577         /* Update configuration */
578         if (rdev->flags & RADEON_IS_AGP) {
579                 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
580                         /* VRAM before AGP */
581                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
582                                 rdev->mc.vram_start >> 12);
583                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
584                                 rdev->mc.gtt_end >> 12);
585                 } else {
586                         /* VRAM after AGP */
587                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
588                                 rdev->mc.gtt_start >> 12);
589                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
590                                 rdev->mc.vram_end >> 12);
591                 }
592         } else {
593                 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
594                         rdev->mc.vram_start >> 12);
595                 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
596                         rdev->mc.vram_end >> 12);
597         }
598         WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
599         tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
600         tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
601         WREG32(MC_VM_FB_LOCATION, tmp);
602         WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
603         WREG32(HDP_NONSURFACE_INFO, (2 << 7));
604         WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF);
605         if (rdev->flags & RADEON_IS_AGP) {
606                 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
607                 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
608                 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
609         } else {
610                 WREG32(MC_VM_AGP_BASE, 0);
611                 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
612                 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
613         }
614         if (evergreen_mc_wait_for_idle(rdev)) {
615                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
616         }
617         evergreen_mc_resume(rdev, &save);
618         /* we need to own VRAM, so turn off the VGA renderer here
619          * to stop it overwriting our objects */
620         rv515_vga_render_disable(rdev);
621 }
622
623 /*
624  * CP.
625  */
626
627 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
628 {
629         const __be32 *fw_data;
630         int i;
631
632         if (!rdev->me_fw || !rdev->pfp_fw)
633                 return -EINVAL;
634
635         r700_cp_stop(rdev);
636         WREG32(CP_RB_CNTL, RB_NO_UPDATE | (15 << 8) | (3 << 0));
637
638         fw_data = (const __be32 *)rdev->pfp_fw->data;
639         WREG32(CP_PFP_UCODE_ADDR, 0);
640         for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
641                 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
642         WREG32(CP_PFP_UCODE_ADDR, 0);
643
644         fw_data = (const __be32 *)rdev->me_fw->data;
645         WREG32(CP_ME_RAM_WADDR, 0);
646         for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
647                 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
648
649         WREG32(CP_PFP_UCODE_ADDR, 0);
650         WREG32(CP_ME_RAM_WADDR, 0);
651         WREG32(CP_ME_RAM_RADDR, 0);
652         return 0;
653 }
654
655 int evergreen_cp_resume(struct radeon_device *rdev)
656 {
657         u32 tmp;
658         u32 rb_bufsz;
659         int r;
660
661         /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
662         WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
663                                  SOFT_RESET_PA |
664                                  SOFT_RESET_SH |
665                                  SOFT_RESET_VGT |
666                                  SOFT_RESET_SX));
667         RREG32(GRBM_SOFT_RESET);
668         mdelay(15);
669         WREG32(GRBM_SOFT_RESET, 0);
670         RREG32(GRBM_SOFT_RESET);
671
672         /* Set ring buffer size */
673         rb_bufsz = drm_order(rdev->cp.ring_size / 8);
674         tmp = RB_NO_UPDATE | (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
675 #ifdef __BIG_ENDIAN
676         tmp |= BUF_SWAP_32BIT;
677 #endif
678         WREG32(CP_RB_CNTL, tmp);
679         WREG32(CP_SEM_WAIT_TIMER, 0x4);
680
681         /* Set the write pointer delay */
682         WREG32(CP_RB_WPTR_DELAY, 0);
683
684         /* Initialize the ring buffer's read and write pointers */
685         WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
686         WREG32(CP_RB_RPTR_WR, 0);
687         WREG32(CP_RB_WPTR, 0);
688         WREG32(CP_RB_RPTR_ADDR, rdev->cp.gpu_addr & 0xFFFFFFFF);
689         WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->cp.gpu_addr));
690         mdelay(1);
691         WREG32(CP_RB_CNTL, tmp);
692
693         WREG32(CP_RB_BASE, rdev->cp.gpu_addr >> 8);
694         WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
695
696         rdev->cp.rptr = RREG32(CP_RB_RPTR);
697         rdev->cp.wptr = RREG32(CP_RB_WPTR);
698
699         r600_cp_start(rdev);
700         rdev->cp.ready = true;
701         r = radeon_ring_test(rdev);
702         if (r) {
703                 rdev->cp.ready = false;
704                 return r;
705         }
706         return 0;
707 }
708
709 /*
710  * Core functions
711  */
712 static u32 evergreen_get_tile_pipe_to_backend_map(struct radeon_device *rdev,
713                                                   u32 num_tile_pipes,
714                                                   u32 num_backends,
715                                                   u32 backend_disable_mask)
716 {
717         u32 backend_map = 0;
718         u32 enabled_backends_mask = 0;
719         u32 enabled_backends_count = 0;
720         u32 cur_pipe;
721         u32 swizzle_pipe[EVERGREEN_MAX_PIPES];
722         u32 cur_backend = 0;
723         u32 i;
724         bool force_no_swizzle;
725
726         if (num_tile_pipes > EVERGREEN_MAX_PIPES)
727                 num_tile_pipes = EVERGREEN_MAX_PIPES;
728         if (num_tile_pipes < 1)
729                 num_tile_pipes = 1;
730         if (num_backends > EVERGREEN_MAX_BACKENDS)
731                 num_backends = EVERGREEN_MAX_BACKENDS;
732         if (num_backends < 1)
733                 num_backends = 1;
734
735         for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
736                 if (((backend_disable_mask >> i) & 1) == 0) {
737                         enabled_backends_mask |= (1 << i);
738                         ++enabled_backends_count;
739                 }
740                 if (enabled_backends_count == num_backends)
741                         break;
742         }
743
744         if (enabled_backends_count == 0) {
745                 enabled_backends_mask = 1;
746                 enabled_backends_count = 1;
747         }
748
749         if (enabled_backends_count != num_backends)
750                 num_backends = enabled_backends_count;
751
752         memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * EVERGREEN_MAX_PIPES);
753         switch (rdev->family) {
754         case CHIP_CEDAR:
755         case CHIP_REDWOOD:
756                 force_no_swizzle = false;
757                 break;
758         case CHIP_CYPRESS:
759         case CHIP_HEMLOCK:
760         case CHIP_JUNIPER:
761         default:
762                 force_no_swizzle = true;
763                 break;
764         }
765         if (force_no_swizzle) {
766                 bool last_backend_enabled = false;
767
768                 force_no_swizzle = false;
769                 for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
770                         if (((enabled_backends_mask >> i) & 1) == 1) {
771                                 if (last_backend_enabled)
772                                         force_no_swizzle = true;
773                                 last_backend_enabled = true;
774                         } else
775                                 last_backend_enabled = false;
776                 }
777         }
778
779         switch (num_tile_pipes) {
780         case 1:
781         case 3:
782         case 5:
783         case 7:
784                 DRM_ERROR("odd number of pipes!\n");
785                 break;
786         case 2:
787                 swizzle_pipe[0] = 0;
788                 swizzle_pipe[1] = 1;
789                 break;
790         case 4:
791                 if (force_no_swizzle) {
792                         swizzle_pipe[0] = 0;
793                         swizzle_pipe[1] = 1;
794                         swizzle_pipe[2] = 2;
795                         swizzle_pipe[3] = 3;
796                 } else {
797                         swizzle_pipe[0] = 0;
798                         swizzle_pipe[1] = 2;
799                         swizzle_pipe[2] = 1;
800                         swizzle_pipe[3] = 3;
801                 }
802                 break;
803         case 6:
804                 if (force_no_swizzle) {
805                         swizzle_pipe[0] = 0;
806                         swizzle_pipe[1] = 1;
807                         swizzle_pipe[2] = 2;
808                         swizzle_pipe[3] = 3;
809                         swizzle_pipe[4] = 4;
810                         swizzle_pipe[5] = 5;
811                 } else {
812                         swizzle_pipe[0] = 0;
813                         swizzle_pipe[1] = 2;
814                         swizzle_pipe[2] = 4;
815                         swizzle_pipe[3] = 1;
816                         swizzle_pipe[4] = 3;
817                         swizzle_pipe[5] = 5;
818                 }
819                 break;
820         case 8:
821                 if (force_no_swizzle) {
822                         swizzle_pipe[0] = 0;
823                         swizzle_pipe[1] = 1;
824                         swizzle_pipe[2] = 2;
825                         swizzle_pipe[3] = 3;
826                         swizzle_pipe[4] = 4;
827                         swizzle_pipe[5] = 5;
828                         swizzle_pipe[6] = 6;
829                         swizzle_pipe[7] = 7;
830                 } else {
831                         swizzle_pipe[0] = 0;
832                         swizzle_pipe[1] = 2;
833                         swizzle_pipe[2] = 4;
834                         swizzle_pipe[3] = 6;
835                         swizzle_pipe[4] = 1;
836                         swizzle_pipe[5] = 3;
837                         swizzle_pipe[6] = 5;
838                         swizzle_pipe[7] = 7;
839                 }
840                 break;
841         }
842
843         for (cur_pipe = 0; cur_pipe < num_tile_pipes; ++cur_pipe) {
844                 while (((1 << cur_backend) & enabled_backends_mask) == 0)
845                         cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
846
847                 backend_map |= (((cur_backend & 0xf) << (swizzle_pipe[cur_pipe] * 4)));
848
849                 cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
850         }
851
852         return backend_map;
853 }
854
855 static void evergreen_gpu_init(struct radeon_device *rdev)
856 {
857         u32 cc_rb_backend_disable = 0;
858         u32 cc_gc_shader_pipe_config;
859         u32 gb_addr_config = 0;
860         u32 mc_shared_chmap, mc_arb_ramcfg;
861         u32 gb_backend_map;
862         u32 grbm_gfx_index;
863         u32 sx_debug_1;
864         u32 smx_dc_ctl0;
865         u32 sq_config;
866         u32 sq_lds_resource_mgmt;
867         u32 sq_gpr_resource_mgmt_1;
868         u32 sq_gpr_resource_mgmt_2;
869         u32 sq_gpr_resource_mgmt_3;
870         u32 sq_thread_resource_mgmt;
871         u32 sq_thread_resource_mgmt_2;
872         u32 sq_stack_resource_mgmt_1;
873         u32 sq_stack_resource_mgmt_2;
874         u32 sq_stack_resource_mgmt_3;
875         u32 vgt_cache_invalidation;
876         u32 hdp_host_path_cntl;
877         int i, j, num_shader_engines, ps_thread_count;
878
879         switch (rdev->family) {
880         case CHIP_CYPRESS:
881         case CHIP_HEMLOCK:
882                 rdev->config.evergreen.num_ses = 2;
883                 rdev->config.evergreen.max_pipes = 4;
884                 rdev->config.evergreen.max_tile_pipes = 8;
885                 rdev->config.evergreen.max_simds = 10;
886                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
887                 rdev->config.evergreen.max_gprs = 256;
888                 rdev->config.evergreen.max_threads = 248;
889                 rdev->config.evergreen.max_gs_threads = 32;
890                 rdev->config.evergreen.max_stack_entries = 512;
891                 rdev->config.evergreen.sx_num_of_sets = 4;
892                 rdev->config.evergreen.sx_max_export_size = 256;
893                 rdev->config.evergreen.sx_max_export_pos_size = 64;
894                 rdev->config.evergreen.sx_max_export_smx_size = 192;
895                 rdev->config.evergreen.max_hw_contexts = 8;
896                 rdev->config.evergreen.sq_num_cf_insts = 2;
897
898                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
899                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
900                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
901                 break;
902         case CHIP_JUNIPER:
903                 rdev->config.evergreen.num_ses = 1;
904                 rdev->config.evergreen.max_pipes = 4;
905                 rdev->config.evergreen.max_tile_pipes = 4;
906                 rdev->config.evergreen.max_simds = 10;
907                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
908                 rdev->config.evergreen.max_gprs = 256;
909                 rdev->config.evergreen.max_threads = 248;
910                 rdev->config.evergreen.max_gs_threads = 32;
911                 rdev->config.evergreen.max_stack_entries = 512;
912                 rdev->config.evergreen.sx_num_of_sets = 4;
913                 rdev->config.evergreen.sx_max_export_size = 256;
914                 rdev->config.evergreen.sx_max_export_pos_size = 64;
915                 rdev->config.evergreen.sx_max_export_smx_size = 192;
916                 rdev->config.evergreen.max_hw_contexts = 8;
917                 rdev->config.evergreen.sq_num_cf_insts = 2;
918
919                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
920                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
921                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
922                 break;
923         case CHIP_REDWOOD:
924                 rdev->config.evergreen.num_ses = 1;
925                 rdev->config.evergreen.max_pipes = 4;
926                 rdev->config.evergreen.max_tile_pipes = 4;
927                 rdev->config.evergreen.max_simds = 5;
928                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
929                 rdev->config.evergreen.max_gprs = 256;
930                 rdev->config.evergreen.max_threads = 248;
931                 rdev->config.evergreen.max_gs_threads = 32;
932                 rdev->config.evergreen.max_stack_entries = 256;
933                 rdev->config.evergreen.sx_num_of_sets = 4;
934                 rdev->config.evergreen.sx_max_export_size = 256;
935                 rdev->config.evergreen.sx_max_export_pos_size = 64;
936                 rdev->config.evergreen.sx_max_export_smx_size = 192;
937                 rdev->config.evergreen.max_hw_contexts = 8;
938                 rdev->config.evergreen.sq_num_cf_insts = 2;
939
940                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
941                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
942                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
943                 break;
944         case CHIP_CEDAR:
945         default:
946                 rdev->config.evergreen.num_ses = 1;
947                 rdev->config.evergreen.max_pipes = 2;
948                 rdev->config.evergreen.max_tile_pipes = 2;
949                 rdev->config.evergreen.max_simds = 2;
950                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
951                 rdev->config.evergreen.max_gprs = 256;
952                 rdev->config.evergreen.max_threads = 192;
953                 rdev->config.evergreen.max_gs_threads = 16;
954                 rdev->config.evergreen.max_stack_entries = 256;
955                 rdev->config.evergreen.sx_num_of_sets = 4;
956                 rdev->config.evergreen.sx_max_export_size = 128;
957                 rdev->config.evergreen.sx_max_export_pos_size = 32;
958                 rdev->config.evergreen.sx_max_export_smx_size = 96;
959                 rdev->config.evergreen.max_hw_contexts = 4;
960                 rdev->config.evergreen.sq_num_cf_insts = 1;
961
962                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
963                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
964                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
965                 break;
966         }
967
968         /* Initialize HDP */
969         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
970                 WREG32((0x2c14 + j), 0x00000000);
971                 WREG32((0x2c18 + j), 0x00000000);
972                 WREG32((0x2c1c + j), 0x00000000);
973                 WREG32((0x2c20 + j), 0x00000000);
974                 WREG32((0x2c24 + j), 0x00000000);
975         }
976
977         WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
978
979         cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & ~2;
980
981         cc_gc_shader_pipe_config |=
982                 INACTIVE_QD_PIPES((EVERGREEN_MAX_PIPES_MASK << rdev->config.evergreen.max_pipes)
983                                   & EVERGREEN_MAX_PIPES_MASK);
984         cc_gc_shader_pipe_config |=
985                 INACTIVE_SIMDS((EVERGREEN_MAX_SIMDS_MASK << rdev->config.evergreen.max_simds)
986                                & EVERGREEN_MAX_SIMDS_MASK);
987
988         cc_rb_backend_disable =
989                 BACKEND_DISABLE((EVERGREEN_MAX_BACKENDS_MASK << rdev->config.evergreen.max_backends)
990                                 & EVERGREEN_MAX_BACKENDS_MASK);
991
992
993         mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
994         mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
995
996         switch (rdev->config.evergreen.max_tile_pipes) {
997         case 1:
998         default:
999                 gb_addr_config |= NUM_PIPES(0);
1000                 break;
1001         case 2:
1002                 gb_addr_config |= NUM_PIPES(1);
1003                 break;
1004         case 4:
1005                 gb_addr_config |= NUM_PIPES(2);
1006                 break;
1007         case 8:
1008                 gb_addr_config |= NUM_PIPES(3);
1009                 break;
1010         }
1011
1012         gb_addr_config |= PIPE_INTERLEAVE_SIZE((mc_arb_ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT);
1013         gb_addr_config |= BANK_INTERLEAVE_SIZE(0);
1014         gb_addr_config |= NUM_SHADER_ENGINES(rdev->config.evergreen.num_ses - 1);
1015         gb_addr_config |= SHADER_ENGINE_TILE_SIZE(1);
1016         gb_addr_config |= NUM_GPUS(0); /* Hemlock? */
1017         gb_addr_config |= MULTI_GPU_TILE_SIZE(2);
1018
1019         if (((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT) > 2)
1020                 gb_addr_config |= ROW_SIZE(2);
1021         else
1022                 gb_addr_config |= ROW_SIZE((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT);
1023
1024         if (rdev->ddev->pdev->device == 0x689e) {
1025                 u32 efuse_straps_4;
1026                 u32 efuse_straps_3;
1027                 u8 efuse_box_bit_131_124;
1028
1029                 WREG32(RCU_IND_INDEX, 0x204);
1030                 efuse_straps_4 = RREG32(RCU_IND_DATA);
1031                 WREG32(RCU_IND_INDEX, 0x203);
1032                 efuse_straps_3 = RREG32(RCU_IND_DATA);
1033                 efuse_box_bit_131_124 = (u8)(((efuse_straps_4 & 0xf) << 4) | ((efuse_straps_3 & 0xf0000000) >> 28));
1034
1035                 switch(efuse_box_bit_131_124) {
1036                 case 0x00:
1037                         gb_backend_map = 0x76543210;
1038                         break;
1039                 case 0x55:
1040                         gb_backend_map = 0x77553311;
1041                         break;
1042                 case 0x56:
1043                         gb_backend_map = 0x77553300;
1044                         break;
1045                 case 0x59:
1046                         gb_backend_map = 0x77552211;
1047                         break;
1048                 case 0x66:
1049                         gb_backend_map = 0x77443300;
1050                         break;
1051                 case 0x99:
1052                         gb_backend_map = 0x66552211;
1053                         break;
1054                 case 0x5a:
1055                         gb_backend_map = 0x77552200;
1056                         break;
1057                 case 0xaa:
1058                         gb_backend_map = 0x66442200;
1059                         break;
1060                 case 0x95:
1061                         gb_backend_map = 0x66553311;
1062                         break;
1063                 default:
1064                         DRM_ERROR("bad backend map, using default\n");
1065                         gb_backend_map =
1066                                 evergreen_get_tile_pipe_to_backend_map(rdev,
1067                                                                        rdev->config.evergreen.max_tile_pipes,
1068                                                                        rdev->config.evergreen.max_backends,
1069                                                                        ((EVERGREEN_MAX_BACKENDS_MASK <<
1070                                                                    rdev->config.evergreen.max_backends) &
1071                                                                         EVERGREEN_MAX_BACKENDS_MASK));
1072                         break;
1073                 }
1074         } else if (rdev->ddev->pdev->device == 0x68b9) {
1075                 u32 efuse_straps_3;
1076                 u8 efuse_box_bit_127_124;
1077
1078                 WREG32(RCU_IND_INDEX, 0x203);
1079                 efuse_straps_3 = RREG32(RCU_IND_DATA);
1080                 efuse_box_bit_127_124 = (u8)(efuse_straps_3 & 0xF0000000) >> 28;
1081
1082                 switch(efuse_box_bit_127_124) {
1083                 case 0x0:
1084                         gb_backend_map = 0x00003210;
1085                         break;
1086                 case 0x5:
1087                 case 0x6:
1088                 case 0x9:
1089                 case 0xa:
1090                         gb_backend_map = 0x00003311;
1091                         break;
1092                 default:
1093                         DRM_ERROR("bad backend map, using default\n");
1094                         gb_backend_map =
1095                                 evergreen_get_tile_pipe_to_backend_map(rdev,
1096                                                                        rdev->config.evergreen.max_tile_pipes,
1097                                                                        rdev->config.evergreen.max_backends,
1098                                                                        ((EVERGREEN_MAX_BACKENDS_MASK <<
1099                                                                    rdev->config.evergreen.max_backends) &
1100                                                                         EVERGREEN_MAX_BACKENDS_MASK));
1101                         break;
1102                 }
1103         } else
1104                 gb_backend_map =
1105                         evergreen_get_tile_pipe_to_backend_map(rdev,
1106                                                                rdev->config.evergreen.max_tile_pipes,
1107                                                                rdev->config.evergreen.max_backends,
1108                                                                ((EVERGREEN_MAX_BACKENDS_MASK <<
1109                                                                  rdev->config.evergreen.max_backends) &
1110                                                                 EVERGREEN_MAX_BACKENDS_MASK));
1111
1112         WREG32(GB_BACKEND_MAP, gb_backend_map);
1113         WREG32(GB_ADDR_CONFIG, gb_addr_config);
1114         WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
1115         WREG32(HDP_ADDR_CONFIG, gb_addr_config);
1116
1117         num_shader_engines = ((RREG32(GB_ADDR_CONFIG) & NUM_SHADER_ENGINES(3)) >> 12) + 1;
1118         grbm_gfx_index = INSTANCE_BROADCAST_WRITES;
1119
1120         for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
1121                 u32 rb = cc_rb_backend_disable | (0xf0 << 16);
1122                 u32 sp = cc_gc_shader_pipe_config;
1123                 u32 gfx = grbm_gfx_index | SE_INDEX(i);
1124
1125                 if (i == num_shader_engines) {
1126                         rb |= BACKEND_DISABLE(EVERGREEN_MAX_BACKENDS_MASK);
1127                         sp |= INACTIVE_SIMDS(EVERGREEN_MAX_SIMDS_MASK);
1128                 }
1129
1130                 WREG32(GRBM_GFX_INDEX, gfx);
1131                 WREG32(RLC_GFX_INDEX, gfx);
1132
1133                 WREG32(CC_RB_BACKEND_DISABLE, rb);
1134                 WREG32(CC_SYS_RB_BACKEND_DISABLE, rb);
1135                 WREG32(GC_USER_RB_BACKEND_DISABLE, rb);
1136                 WREG32(CC_GC_SHADER_PIPE_CONFIG, sp);
1137         }
1138
1139         grbm_gfx_index |= SE_BROADCAST_WRITES;
1140         WREG32(GRBM_GFX_INDEX, grbm_gfx_index);
1141         WREG32(RLC_GFX_INDEX, grbm_gfx_index);
1142
1143         WREG32(CGTS_SYS_TCC_DISABLE, 0);
1144         WREG32(CGTS_TCC_DISABLE, 0);
1145         WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
1146         WREG32(CGTS_USER_TCC_DISABLE, 0);
1147
1148         /* set HW defaults for 3D engine */
1149         WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
1150                                      ROQ_IB2_START(0x2b)));
1151
1152         WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
1153
1154         WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
1155                              SYNC_GRADIENT |
1156                              SYNC_WALKER |
1157                              SYNC_ALIGNER));
1158
1159         sx_debug_1 = RREG32(SX_DEBUG_1);
1160         sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
1161         WREG32(SX_DEBUG_1, sx_debug_1);
1162
1163
1164         smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
1165         smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
1166         smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
1167         WREG32(SMX_DC_CTL0, smx_dc_ctl0);
1168
1169         WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
1170                                         POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
1171                                         SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
1172
1173         WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
1174                                  SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
1175                                  SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
1176
1177         WREG32(VGT_NUM_INSTANCES, 1);
1178         WREG32(SPI_CONFIG_CNTL, 0);
1179         WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
1180         WREG32(CP_PERFMON_CNTL, 0);
1181
1182         WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
1183                                   FETCH_FIFO_HIWATER(0x4) |
1184                                   DONE_FIFO_HIWATER(0xe0) |
1185                                   ALU_UPDATE_FIFO_HIWATER(0x8)));
1186
1187         sq_config = RREG32(SQ_CONFIG);
1188         sq_config &= ~(PS_PRIO(3) |
1189                        VS_PRIO(3) |
1190                        GS_PRIO(3) |
1191                        ES_PRIO(3));
1192         sq_config |= (VC_ENABLE |
1193                       EXPORT_SRC_C |
1194                       PS_PRIO(0) |
1195                       VS_PRIO(1) |
1196                       GS_PRIO(2) |
1197                       ES_PRIO(3));
1198
1199         if (rdev->family == CHIP_CEDAR)
1200                 /* no vertex cache */
1201                 sq_config &= ~VC_ENABLE;
1202
1203         sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
1204
1205         sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
1206         sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
1207         sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
1208         sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1209         sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1210         sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1211         sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1212
1213         if (rdev->family == CHIP_CEDAR)
1214                 ps_thread_count = 96;
1215         else
1216                 ps_thread_count = 128;
1217
1218         sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
1219         sq_thread_resource_mgmt |= NUM_VS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1220         sq_thread_resource_mgmt |= NUM_GS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1221         sq_thread_resource_mgmt |= NUM_ES_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1222         sq_thread_resource_mgmt_2 = NUM_HS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1223         sq_thread_resource_mgmt_2 |= NUM_LS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1224
1225         sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1226         sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1227         sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1228         sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1229         sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1230         sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1231
1232         WREG32(SQ_CONFIG, sq_config);
1233         WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
1234         WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
1235         WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
1236         WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
1237         WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
1238         WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
1239         WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
1240         WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
1241         WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
1242         WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
1243
1244         WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
1245                                           FORCE_EOV_MAX_REZ_CNT(255)));
1246
1247         if (rdev->family == CHIP_CEDAR)
1248                 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
1249         else
1250                 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
1251         vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
1252         WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
1253
1254         WREG32(VGT_GS_VERTEX_REUSE, 16);
1255         WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
1256
1257         WREG32(CB_PERF_CTR0_SEL_0, 0);
1258         WREG32(CB_PERF_CTR0_SEL_1, 0);
1259         WREG32(CB_PERF_CTR1_SEL_0, 0);
1260         WREG32(CB_PERF_CTR1_SEL_1, 0);
1261         WREG32(CB_PERF_CTR2_SEL_0, 0);
1262         WREG32(CB_PERF_CTR2_SEL_1, 0);
1263         WREG32(CB_PERF_CTR3_SEL_0, 0);
1264         WREG32(CB_PERF_CTR3_SEL_1, 0);
1265
1266         hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
1267         WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
1268
1269         WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
1270
1271         udelay(50);
1272
1273 }
1274
1275 int evergreen_mc_init(struct radeon_device *rdev)
1276 {
1277         u32 tmp;
1278         int chansize, numchan;
1279
1280         /* Get VRAM informations */
1281         rdev->mc.vram_is_ddr = true;
1282         tmp = RREG32(MC_ARB_RAMCFG);
1283         if (tmp & CHANSIZE_OVERRIDE) {
1284                 chansize = 16;
1285         } else if (tmp & CHANSIZE_MASK) {
1286                 chansize = 64;
1287         } else {
1288                 chansize = 32;
1289         }
1290         tmp = RREG32(MC_SHARED_CHMAP);
1291         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1292         case 0:
1293         default:
1294                 numchan = 1;
1295                 break;
1296         case 1:
1297                 numchan = 2;
1298                 break;
1299         case 2:
1300                 numchan = 4;
1301                 break;
1302         case 3:
1303                 numchan = 8;
1304                 break;
1305         }
1306         rdev->mc.vram_width = numchan * chansize;
1307         /* Could aper size report 0 ? */
1308         rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
1309         rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
1310         /* Setup GPU memory space */
1311         /* size in MB on evergreen */
1312         rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1313         rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1314         rdev->mc.visible_vram_size = rdev->mc.aper_size;
1315         r600_vram_gtt_location(rdev, &rdev->mc);
1316         radeon_update_bandwidth_info(rdev);
1317
1318         return 0;
1319 }
1320
1321 bool evergreen_gpu_is_lockup(struct radeon_device *rdev)
1322 {
1323         /* FIXME: implement for evergreen */
1324         return false;
1325 }
1326
1327 static int evergreen_gpu_soft_reset(struct radeon_device *rdev)
1328 {
1329         struct evergreen_mc_save save;
1330         u32 srbm_reset = 0;
1331         u32 grbm_reset = 0;
1332
1333         dev_info(rdev->dev, "GPU softreset \n");
1334         dev_info(rdev->dev, "  GRBM_STATUS=0x%08X\n",
1335                 RREG32(GRBM_STATUS));
1336         dev_info(rdev->dev, "  GRBM_STATUS_SE0=0x%08X\n",
1337                 RREG32(GRBM_STATUS_SE0));
1338         dev_info(rdev->dev, "  GRBM_STATUS_SE1=0x%08X\n",
1339                 RREG32(GRBM_STATUS_SE1));
1340         dev_info(rdev->dev, "  SRBM_STATUS=0x%08X\n",
1341                 RREG32(SRBM_STATUS));
1342         evergreen_mc_stop(rdev, &save);
1343         if (evergreen_mc_wait_for_idle(rdev)) {
1344                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1345         }
1346         /* Disable CP parsing/prefetching */
1347         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
1348
1349         /* reset all the gfx blocks */
1350         grbm_reset = (SOFT_RESET_CP |
1351                       SOFT_RESET_CB |
1352                       SOFT_RESET_DB |
1353                       SOFT_RESET_PA |
1354                       SOFT_RESET_SC |
1355                       SOFT_RESET_SPI |
1356                       SOFT_RESET_SH |
1357                       SOFT_RESET_SX |
1358                       SOFT_RESET_TC |
1359                       SOFT_RESET_TA |
1360                       SOFT_RESET_VC |
1361                       SOFT_RESET_VGT);
1362
1363         dev_info(rdev->dev, "  GRBM_SOFT_RESET=0x%08X\n", grbm_reset);
1364         WREG32(GRBM_SOFT_RESET, grbm_reset);
1365         (void)RREG32(GRBM_SOFT_RESET);
1366         udelay(50);
1367         WREG32(GRBM_SOFT_RESET, 0);
1368         (void)RREG32(GRBM_SOFT_RESET);
1369
1370         /* reset all the system blocks */
1371         srbm_reset = SRBM_SOFT_RESET_ALL_MASK;
1372
1373         dev_info(rdev->dev, "  SRBM_SOFT_RESET=0x%08X\n", srbm_reset);
1374         WREG32(SRBM_SOFT_RESET, srbm_reset);
1375         (void)RREG32(SRBM_SOFT_RESET);
1376         udelay(50);
1377         WREG32(SRBM_SOFT_RESET, 0);
1378         (void)RREG32(SRBM_SOFT_RESET);
1379         /* Wait a little for things to settle down */
1380         udelay(50);
1381         dev_info(rdev->dev, "  GRBM_STATUS=0x%08X\n",
1382                 RREG32(GRBM_STATUS));
1383         dev_info(rdev->dev, "  GRBM_STATUS_SE0=0x%08X\n",
1384                 RREG32(GRBM_STATUS_SE0));
1385         dev_info(rdev->dev, "  GRBM_STATUS_SE1=0x%08X\n",
1386                 RREG32(GRBM_STATUS_SE1));
1387         dev_info(rdev->dev, "  SRBM_STATUS=0x%08X\n",
1388                 RREG32(SRBM_STATUS));
1389         /* After reset we need to reinit the asic as GPU often endup in an
1390          * incoherent state.
1391          */
1392         atom_asic_init(rdev->mode_info.atom_context);
1393         evergreen_mc_resume(rdev, &save);
1394         return 0;
1395 }
1396
1397 int evergreen_asic_reset(struct radeon_device *rdev)
1398 {
1399         return evergreen_gpu_soft_reset(rdev);
1400 }
1401
1402 /* Interrupts */
1403
1404 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
1405 {
1406         switch (crtc) {
1407         case 0:
1408                 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC0_REGISTER_OFFSET);
1409         case 1:
1410                 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC1_REGISTER_OFFSET);
1411         case 2:
1412                 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC2_REGISTER_OFFSET);
1413         case 3:
1414                 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC3_REGISTER_OFFSET);
1415         case 4:
1416                 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC4_REGISTER_OFFSET);
1417         case 5:
1418                 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC5_REGISTER_OFFSET);
1419         default:
1420                 return 0;
1421         }
1422 }
1423
1424 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
1425 {
1426         u32 tmp;
1427
1428         WREG32(CP_INT_CNTL, 0);
1429         WREG32(GRBM_INT_CNTL, 0);
1430         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1431         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
1432         WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1433         WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1434         WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1435         WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1436
1437         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1438         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
1439         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1440         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1441         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1442         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1443
1444         WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
1445         WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
1446
1447         tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1448         WREG32(DC_HPD1_INT_CONTROL, tmp);
1449         tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1450         WREG32(DC_HPD2_INT_CONTROL, tmp);
1451         tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1452         WREG32(DC_HPD3_INT_CONTROL, tmp);
1453         tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1454         WREG32(DC_HPD4_INT_CONTROL, tmp);
1455         tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1456         WREG32(DC_HPD5_INT_CONTROL, tmp);
1457         tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1458         WREG32(DC_HPD6_INT_CONTROL, tmp);
1459
1460 }
1461
1462 int evergreen_irq_set(struct radeon_device *rdev)
1463 {
1464         u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
1465         u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
1466         u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
1467         u32 grbm_int_cntl = 0;
1468
1469         if (!rdev->irq.installed) {
1470                 WARN(1, "Can't enable IRQ/MSI because no handler is installed.\n");
1471                 return -EINVAL;
1472         }
1473         /* don't enable anything if the ih is disabled */
1474         if (!rdev->ih.enabled) {
1475                 r600_disable_interrupts(rdev);
1476                 /* force the active interrupt state to all disabled */
1477                 evergreen_disable_interrupt_state(rdev);
1478                 return 0;
1479         }
1480
1481         hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
1482         hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
1483         hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
1484         hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
1485         hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
1486         hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
1487
1488         if (rdev->irq.sw_int) {
1489                 DRM_DEBUG("evergreen_irq_set: sw int\n");
1490                 cp_int_cntl |= RB_INT_ENABLE;
1491         }
1492         if (rdev->irq.crtc_vblank_int[0]) {
1493                 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
1494                 crtc1 |= VBLANK_INT_MASK;
1495         }
1496         if (rdev->irq.crtc_vblank_int[1]) {
1497                 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
1498                 crtc2 |= VBLANK_INT_MASK;
1499         }
1500         if (rdev->irq.crtc_vblank_int[2]) {
1501                 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
1502                 crtc3 |= VBLANK_INT_MASK;
1503         }
1504         if (rdev->irq.crtc_vblank_int[3]) {
1505                 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
1506                 crtc4 |= VBLANK_INT_MASK;
1507         }
1508         if (rdev->irq.crtc_vblank_int[4]) {
1509                 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
1510                 crtc5 |= VBLANK_INT_MASK;
1511         }
1512         if (rdev->irq.crtc_vblank_int[5]) {
1513                 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
1514                 crtc6 |= VBLANK_INT_MASK;
1515         }
1516         if (rdev->irq.hpd[0]) {
1517                 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
1518                 hpd1 |= DC_HPDx_INT_EN;
1519         }
1520         if (rdev->irq.hpd[1]) {
1521                 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
1522                 hpd2 |= DC_HPDx_INT_EN;
1523         }
1524         if (rdev->irq.hpd[2]) {
1525                 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
1526                 hpd3 |= DC_HPDx_INT_EN;
1527         }
1528         if (rdev->irq.hpd[3]) {
1529                 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
1530                 hpd4 |= DC_HPDx_INT_EN;
1531         }
1532         if (rdev->irq.hpd[4]) {
1533                 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
1534                 hpd5 |= DC_HPDx_INT_EN;
1535         }
1536         if (rdev->irq.hpd[5]) {
1537                 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
1538                 hpd6 |= DC_HPDx_INT_EN;
1539         }
1540         if (rdev->irq.gui_idle) {
1541                 DRM_DEBUG("gui idle\n");
1542                 grbm_int_cntl |= GUI_IDLE_INT_ENABLE;
1543         }
1544
1545         WREG32(CP_INT_CNTL, cp_int_cntl);
1546         WREG32(GRBM_INT_CNTL, grbm_int_cntl);
1547
1548         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
1549         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
1550         WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
1551         WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
1552         WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
1553         WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
1554
1555         WREG32(DC_HPD1_INT_CONTROL, hpd1);
1556         WREG32(DC_HPD2_INT_CONTROL, hpd2);
1557         WREG32(DC_HPD3_INT_CONTROL, hpd3);
1558         WREG32(DC_HPD4_INT_CONTROL, hpd4);
1559         WREG32(DC_HPD5_INT_CONTROL, hpd5);
1560         WREG32(DC_HPD6_INT_CONTROL, hpd6);
1561
1562         return 0;
1563 }
1564
1565 static inline void evergreen_irq_ack(struct radeon_device *rdev,
1566                                      u32 *disp_int,
1567                                      u32 *disp_int_cont,
1568                                      u32 *disp_int_cont2,
1569                                      u32 *disp_int_cont3,
1570                                      u32 *disp_int_cont4,
1571                                      u32 *disp_int_cont5)
1572 {
1573         u32 tmp;
1574
1575         *disp_int = RREG32(DISP_INTERRUPT_STATUS);
1576         *disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
1577         *disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
1578         *disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
1579         *disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
1580         *disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
1581
1582         if (*disp_int & LB_D1_VBLANK_INTERRUPT)
1583                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
1584         if (*disp_int & LB_D1_VLINE_INTERRUPT)
1585                 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
1586
1587         if (*disp_int_cont & LB_D2_VBLANK_INTERRUPT)
1588                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
1589         if (*disp_int_cont & LB_D2_VLINE_INTERRUPT)
1590                 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
1591
1592         if (*disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
1593                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
1594         if (*disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
1595                 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
1596
1597         if (*disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
1598                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
1599         if (*disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
1600                 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
1601
1602         if (*disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
1603                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
1604         if (*disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
1605                 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
1606
1607         if (*disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
1608                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
1609         if (*disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
1610                 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
1611
1612         if (*disp_int & DC_HPD1_INTERRUPT) {
1613                 tmp = RREG32(DC_HPD1_INT_CONTROL);
1614                 tmp |= DC_HPDx_INT_ACK;
1615                 WREG32(DC_HPD1_INT_CONTROL, tmp);
1616         }
1617         if (*disp_int_cont & DC_HPD2_INTERRUPT) {
1618                 tmp = RREG32(DC_HPD2_INT_CONTROL);
1619                 tmp |= DC_HPDx_INT_ACK;
1620                 WREG32(DC_HPD2_INT_CONTROL, tmp);
1621         }
1622         if (*disp_int_cont2 & DC_HPD3_INTERRUPT) {
1623                 tmp = RREG32(DC_HPD3_INT_CONTROL);
1624                 tmp |= DC_HPDx_INT_ACK;
1625                 WREG32(DC_HPD3_INT_CONTROL, tmp);
1626         }
1627         if (*disp_int_cont3 & DC_HPD4_INTERRUPT) {
1628                 tmp = RREG32(DC_HPD4_INT_CONTROL);
1629                 tmp |= DC_HPDx_INT_ACK;
1630                 WREG32(DC_HPD4_INT_CONTROL, tmp);
1631         }
1632         if (*disp_int_cont4 & DC_HPD5_INTERRUPT) {
1633                 tmp = RREG32(DC_HPD5_INT_CONTROL);
1634                 tmp |= DC_HPDx_INT_ACK;
1635                 WREG32(DC_HPD5_INT_CONTROL, tmp);
1636         }
1637         if (*disp_int_cont5 & DC_HPD6_INTERRUPT) {
1638                 tmp = RREG32(DC_HPD5_INT_CONTROL);
1639                 tmp |= DC_HPDx_INT_ACK;
1640                 WREG32(DC_HPD6_INT_CONTROL, tmp);
1641         }
1642 }
1643
1644 void evergreen_irq_disable(struct radeon_device *rdev)
1645 {
1646         u32 disp_int, disp_int_cont, disp_int_cont2;
1647         u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
1648
1649         r600_disable_interrupts(rdev);
1650         /* Wait and acknowledge irq */
1651         mdelay(1);
1652         evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
1653                           &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
1654         evergreen_disable_interrupt_state(rdev);
1655 }
1656
1657 static void evergreen_irq_suspend(struct radeon_device *rdev)
1658 {
1659         evergreen_irq_disable(rdev);
1660         r600_rlc_stop(rdev);
1661 }
1662
1663 static inline u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
1664 {
1665         u32 wptr, tmp;
1666
1667         /* XXX use writeback */
1668         wptr = RREG32(IH_RB_WPTR);
1669
1670         if (wptr & RB_OVERFLOW) {
1671                 /* When a ring buffer overflow happen start parsing interrupt
1672                  * from the last not overwritten vector (wptr + 16). Hopefully
1673                  * this should allow us to catchup.
1674                  */
1675                 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
1676                         wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
1677                 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
1678                 tmp = RREG32(IH_RB_CNTL);
1679                 tmp |= IH_WPTR_OVERFLOW_CLEAR;
1680                 WREG32(IH_RB_CNTL, tmp);
1681         }
1682         return (wptr & rdev->ih.ptr_mask);
1683 }
1684
1685 int evergreen_irq_process(struct radeon_device *rdev)
1686 {
1687         u32 wptr = evergreen_get_ih_wptr(rdev);
1688         u32 rptr = rdev->ih.rptr;
1689         u32 src_id, src_data;
1690         u32 ring_index;
1691         u32 disp_int, disp_int_cont, disp_int_cont2;
1692         u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
1693         unsigned long flags;
1694         bool queue_hotplug = false;
1695
1696         DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
1697         if (!rdev->ih.enabled)
1698                 return IRQ_NONE;
1699
1700         spin_lock_irqsave(&rdev->ih.lock, flags);
1701
1702         if (rptr == wptr) {
1703                 spin_unlock_irqrestore(&rdev->ih.lock, flags);
1704                 return IRQ_NONE;
1705         }
1706         if (rdev->shutdown) {
1707                 spin_unlock_irqrestore(&rdev->ih.lock, flags);
1708                 return IRQ_NONE;
1709         }
1710
1711 restart_ih:
1712         /* display interrupts */
1713         evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
1714                           &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
1715
1716         rdev->ih.wptr = wptr;
1717         while (rptr != wptr) {
1718                 /* wptr/rptr are in bytes! */
1719                 ring_index = rptr / 4;
1720                 src_id =  rdev->ih.ring[ring_index] & 0xff;
1721                 src_data = rdev->ih.ring[ring_index + 1] & 0xfffffff;
1722
1723                 switch (src_id) {
1724                 case 1: /* D1 vblank/vline */
1725                         switch (src_data) {
1726                         case 0: /* D1 vblank */
1727                                 if (disp_int & LB_D1_VBLANK_INTERRUPT) {
1728                                         drm_handle_vblank(rdev->ddev, 0);
1729                                         wake_up(&rdev->irq.vblank_queue);
1730                                         disp_int &= ~LB_D1_VBLANK_INTERRUPT;
1731                                         DRM_DEBUG("IH: D1 vblank\n");
1732                                 }
1733                                 break;
1734                         case 1: /* D1 vline */
1735                                 if (disp_int & LB_D1_VLINE_INTERRUPT) {
1736                                         disp_int &= ~LB_D1_VLINE_INTERRUPT;
1737                                         DRM_DEBUG("IH: D1 vline\n");
1738                                 }
1739                                 break;
1740                         default:
1741                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1742                                 break;
1743                         }
1744                         break;
1745                 case 2: /* D2 vblank/vline */
1746                         switch (src_data) {
1747                         case 0: /* D2 vblank */
1748                                 if (disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
1749                                         drm_handle_vblank(rdev->ddev, 1);
1750                                         wake_up(&rdev->irq.vblank_queue);
1751                                         disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
1752                                         DRM_DEBUG("IH: D2 vblank\n");
1753                                 }
1754                                 break;
1755                         case 1: /* D2 vline */
1756                                 if (disp_int_cont & LB_D2_VLINE_INTERRUPT) {
1757                                         disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
1758                                         DRM_DEBUG("IH: D2 vline\n");
1759                                 }
1760                                 break;
1761                         default:
1762                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1763                                 break;
1764                         }
1765                         break;
1766                 case 3: /* D3 vblank/vline */
1767                         switch (src_data) {
1768                         case 0: /* D3 vblank */
1769                                 if (disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
1770                                         drm_handle_vblank(rdev->ddev, 2);
1771                                         wake_up(&rdev->irq.vblank_queue);
1772                                         disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
1773                                         DRM_DEBUG("IH: D3 vblank\n");
1774                                 }
1775                                 break;
1776                         case 1: /* D3 vline */
1777                                 if (disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
1778                                         disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
1779                                         DRM_DEBUG("IH: D3 vline\n");
1780                                 }
1781                                 break;
1782                         default:
1783                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1784                                 break;
1785                         }
1786                         break;
1787                 case 4: /* D4 vblank/vline */
1788                         switch (src_data) {
1789                         case 0: /* D4 vblank */
1790                                 if (disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
1791                                         drm_handle_vblank(rdev->ddev, 3);
1792                                         wake_up(&rdev->irq.vblank_queue);
1793                                         disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
1794                                         DRM_DEBUG("IH: D4 vblank\n");
1795                                 }
1796                                 break;
1797                         case 1: /* D4 vline */
1798                                 if (disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
1799                                         disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
1800                                         DRM_DEBUG("IH: D4 vline\n");
1801                                 }
1802                                 break;
1803                         default:
1804                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1805                                 break;
1806                         }
1807                         break;
1808                 case 5: /* D5 vblank/vline */
1809                         switch (src_data) {
1810                         case 0: /* D5 vblank */
1811                                 if (disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
1812                                         drm_handle_vblank(rdev->ddev, 4);
1813                                         wake_up(&rdev->irq.vblank_queue);
1814                                         disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
1815                                         DRM_DEBUG("IH: D5 vblank\n");
1816                                 }
1817                                 break;
1818                         case 1: /* D5 vline */
1819                                 if (disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
1820                                         disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
1821                                         DRM_DEBUG("IH: D5 vline\n");
1822                                 }
1823                                 break;
1824                         default:
1825                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1826                                 break;
1827                         }
1828                         break;
1829                 case 6: /* D6 vblank/vline */
1830                         switch (src_data) {
1831                         case 0: /* D6 vblank */
1832                                 if (disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
1833                                         drm_handle_vblank(rdev->ddev, 5);
1834                                         wake_up(&rdev->irq.vblank_queue);
1835                                         disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
1836                                         DRM_DEBUG("IH: D6 vblank\n");
1837                                 }
1838                                 break;
1839                         case 1: /* D6 vline */
1840                                 if (disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
1841                                         disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
1842                                         DRM_DEBUG("IH: D6 vline\n");
1843                                 }
1844                                 break;
1845                         default:
1846                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1847                                 break;
1848                         }
1849                         break;
1850                 case 42: /* HPD hotplug */
1851                         switch (src_data) {
1852                         case 0:
1853                                 if (disp_int & DC_HPD1_INTERRUPT) {
1854                                         disp_int &= ~DC_HPD1_INTERRUPT;
1855                                         queue_hotplug = true;
1856                                         DRM_DEBUG("IH: HPD1\n");
1857                                 }
1858                                 break;
1859                         case 1:
1860                                 if (disp_int_cont & DC_HPD2_INTERRUPT) {
1861                                         disp_int_cont &= ~DC_HPD2_INTERRUPT;
1862                                         queue_hotplug = true;
1863                                         DRM_DEBUG("IH: HPD2\n");
1864                                 }
1865                                 break;
1866                         case 2:
1867                                 if (disp_int_cont2 & DC_HPD3_INTERRUPT) {
1868                                         disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
1869                                         queue_hotplug = true;
1870                                         DRM_DEBUG("IH: HPD3\n");
1871                                 }
1872                                 break;
1873                         case 3:
1874                                 if (disp_int_cont3 & DC_HPD4_INTERRUPT) {
1875                                         disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
1876                                         queue_hotplug = true;
1877                                         DRM_DEBUG("IH: HPD4\n");
1878                                 }
1879                                 break;
1880                         case 4:
1881                                 if (disp_int_cont4 & DC_HPD5_INTERRUPT) {
1882                                         disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
1883                                         queue_hotplug = true;
1884                                         DRM_DEBUG("IH: HPD5\n");
1885                                 }
1886                                 break;
1887                         case 5:
1888                                 if (disp_int_cont5 & DC_HPD6_INTERRUPT) {
1889                                         disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
1890                                         queue_hotplug = true;
1891                                         DRM_DEBUG("IH: HPD6\n");
1892                                 }
1893                                 break;
1894                         default:
1895                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1896                                 break;
1897                         }
1898                         break;
1899                 case 176: /* CP_INT in ring buffer */
1900                 case 177: /* CP_INT in IB1 */
1901                 case 178: /* CP_INT in IB2 */
1902                         DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
1903                         radeon_fence_process(rdev);
1904                         break;
1905                 case 181: /* CP EOP event */
1906                         DRM_DEBUG("IH: CP EOP\n");
1907                         break;
1908                 case 233: /* GUI IDLE */
1909                         DRM_DEBUG("IH: CP EOP\n");
1910                         rdev->pm.gui_idle = true;
1911                         wake_up(&rdev->irq.idle_queue);
1912                         break;
1913                 default:
1914                         DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1915                         break;
1916                 }
1917
1918                 /* wptr/rptr are in bytes! */
1919                 rptr += 16;
1920                 rptr &= rdev->ih.ptr_mask;
1921         }
1922         /* make sure wptr hasn't changed while processing */
1923         wptr = evergreen_get_ih_wptr(rdev);
1924         if (wptr != rdev->ih.wptr)
1925                 goto restart_ih;
1926         if (queue_hotplug)
1927                 queue_work(rdev->wq, &rdev->hotplug_work);
1928         rdev->ih.rptr = rptr;
1929         WREG32(IH_RB_RPTR, rdev->ih.rptr);
1930         spin_unlock_irqrestore(&rdev->ih.lock, flags);
1931         return IRQ_HANDLED;
1932 }
1933
1934 static int evergreen_startup(struct radeon_device *rdev)
1935 {
1936         int r;
1937
1938         if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
1939                 r = r600_init_microcode(rdev);
1940                 if (r) {
1941                         DRM_ERROR("Failed to load firmware!\n");
1942                         return r;
1943                 }
1944         }
1945
1946         evergreen_mc_program(rdev);
1947         if (rdev->flags & RADEON_IS_AGP) {
1948                 evergreen_agp_enable(rdev);
1949         } else {
1950                 r = evergreen_pcie_gart_enable(rdev);
1951                 if (r)
1952                         return r;
1953         }
1954         evergreen_gpu_init(rdev);
1955 #if 0
1956         if (!rdev->r600_blit.shader_obj) {
1957                 r = r600_blit_init(rdev);
1958                 if (r) {
1959                         DRM_ERROR("radeon: failed blitter (%d).\n", r);
1960                         return r;
1961                 }
1962         }
1963
1964         r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
1965         if (unlikely(r != 0))
1966                 return r;
1967         r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
1968                         &rdev->r600_blit.shader_gpu_addr);
1969         radeon_bo_unreserve(rdev->r600_blit.shader_obj);
1970         if (r) {
1971                 DRM_ERROR("failed to pin blit object %d\n", r);
1972                 return r;
1973         }
1974 #endif
1975
1976         /* Enable IRQ */
1977         r = r600_irq_init(rdev);
1978         if (r) {
1979                 DRM_ERROR("radeon: IH init failed (%d).\n", r);
1980                 radeon_irq_kms_fini(rdev);
1981                 return r;
1982         }
1983         evergreen_irq_set(rdev);
1984
1985         r = radeon_ring_init(rdev, rdev->cp.ring_size);
1986         if (r)
1987                 return r;
1988         r = evergreen_cp_load_microcode(rdev);
1989         if (r)
1990                 return r;
1991         r = evergreen_cp_resume(rdev);
1992         if (r)
1993                 return r;
1994         /* write back buffer are not vital so don't worry about failure */
1995         r600_wb_enable(rdev);
1996
1997         return 0;
1998 }
1999
2000 int evergreen_resume(struct radeon_device *rdev)
2001 {
2002         int r;
2003
2004         /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
2005          * posting will perform necessary task to bring back GPU into good
2006          * shape.
2007          */
2008         /* post card */
2009         atom_asic_init(rdev->mode_info.atom_context);
2010         /* Initialize clocks */
2011         r = radeon_clocks_init(rdev);
2012         if (r) {
2013                 return r;
2014         }
2015
2016         r = evergreen_startup(rdev);
2017         if (r) {
2018                 DRM_ERROR("r600 startup failed on resume\n");
2019                 return r;
2020         }
2021
2022         r = r600_ib_test(rdev);
2023         if (r) {
2024                 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
2025                 return r;
2026         }
2027
2028         return r;
2029
2030 }
2031
2032 int evergreen_suspend(struct radeon_device *rdev)
2033 {
2034 #if 0
2035         int r;
2036 #endif
2037         /* FIXME: we should wait for ring to be empty */
2038         r700_cp_stop(rdev);
2039         rdev->cp.ready = false;
2040         evergreen_irq_suspend(rdev);
2041         r600_wb_disable(rdev);
2042         evergreen_pcie_gart_disable(rdev);
2043 #if 0
2044         /* unpin shaders bo */
2045         r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
2046         if (likely(r == 0)) {
2047                 radeon_bo_unpin(rdev->r600_blit.shader_obj);
2048                 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
2049         }
2050 #endif
2051         return 0;
2052 }
2053
2054 static bool evergreen_card_posted(struct radeon_device *rdev)
2055 {
2056         u32 reg;
2057
2058         /* first check CRTCs */
2059         reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
2060                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
2061                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
2062                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
2063                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
2064                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
2065         if (reg & EVERGREEN_CRTC_MASTER_EN)
2066                 return true;
2067
2068         /* then check MEM_SIZE, in case the crtcs are off */
2069         if (RREG32(CONFIG_MEMSIZE))
2070                 return true;
2071
2072         return false;
2073 }
2074
2075 /* Plan is to move initialization in that function and use
2076  * helper function so that radeon_device_init pretty much
2077  * do nothing more than calling asic specific function. This
2078  * should also allow to remove a bunch of callback function
2079  * like vram_info.
2080  */
2081 int evergreen_init(struct radeon_device *rdev)
2082 {
2083         int r;
2084
2085         r = radeon_dummy_page_init(rdev);
2086         if (r)
2087                 return r;
2088         /* This don't do much */
2089         r = radeon_gem_init(rdev);
2090         if (r)
2091                 return r;
2092         /* Read BIOS */
2093         if (!radeon_get_bios(rdev)) {
2094                 if (ASIC_IS_AVIVO(rdev))
2095                         return -EINVAL;
2096         }
2097         /* Must be an ATOMBIOS */
2098         if (!rdev->is_atom_bios) {
2099                 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
2100                 return -EINVAL;
2101         }
2102         r = radeon_atombios_init(rdev);
2103         if (r)
2104                 return r;
2105         /* Post card if necessary */
2106         if (!evergreen_card_posted(rdev)) {
2107                 if (!rdev->bios) {
2108                         dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
2109                         return -EINVAL;
2110                 }
2111                 DRM_INFO("GPU not posted. posting now...\n");
2112                 atom_asic_init(rdev->mode_info.atom_context);
2113         }
2114         /* Initialize scratch registers */
2115         r600_scratch_init(rdev);
2116         /* Initialize surface registers */
2117         radeon_surface_init(rdev);
2118         /* Initialize clocks */
2119         radeon_get_clock_info(rdev->ddev);
2120         r = radeon_clocks_init(rdev);
2121         if (r)
2122                 return r;
2123         /* Fence driver */
2124         r = radeon_fence_driver_init(rdev);
2125         if (r)
2126                 return r;
2127         /* initialize AGP */
2128         if (rdev->flags & RADEON_IS_AGP) {
2129                 r = radeon_agp_init(rdev);
2130                 if (r)
2131                         radeon_agp_disable(rdev);
2132         }
2133         /* initialize memory controller */
2134         r = evergreen_mc_init(rdev);
2135         if (r)
2136                 return r;
2137         /* Memory manager */
2138         r = radeon_bo_init(rdev);
2139         if (r)
2140                 return r;
2141
2142         r = radeon_irq_kms_init(rdev);
2143         if (r)
2144                 return r;
2145
2146         rdev->cp.ring_obj = NULL;
2147         r600_ring_init(rdev, 1024 * 1024);
2148
2149         rdev->ih.ring_obj = NULL;
2150         r600_ih_ring_init(rdev, 64 * 1024);
2151
2152         r = r600_pcie_gart_init(rdev);
2153         if (r)
2154                 return r;
2155
2156         rdev->accel_working = false;
2157         r = evergreen_startup(rdev);
2158         if (r) {
2159                 dev_err(rdev->dev, "disabling GPU acceleration\n");
2160                 r700_cp_fini(rdev);
2161                 r600_wb_fini(rdev);
2162                 r600_irq_fini(rdev);
2163                 radeon_irq_kms_fini(rdev);
2164                 evergreen_pcie_gart_fini(rdev);
2165                 rdev->accel_working = false;
2166         }
2167         if (rdev->accel_working) {
2168                 r = radeon_ib_pool_init(rdev);
2169                 if (r) {
2170                         DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
2171                         rdev->accel_working = false;
2172                 }
2173                 r = r600_ib_test(rdev);
2174                 if (r) {
2175                         DRM_ERROR("radeon: failed testing IB (%d).\n", r);
2176                         rdev->accel_working = false;
2177                 }
2178         }
2179         return 0;
2180 }
2181
2182 void evergreen_fini(struct radeon_device *rdev)
2183 {
2184         /*r600_blit_fini(rdev);*/
2185         r700_cp_fini(rdev);
2186         r600_wb_fini(rdev);
2187         r600_irq_fini(rdev);
2188         radeon_irq_kms_fini(rdev);
2189         evergreen_pcie_gart_fini(rdev);
2190         radeon_gem_fini(rdev);
2191         radeon_fence_driver_fini(rdev);
2192         radeon_clocks_fini(rdev);
2193         radeon_agp_fini(rdev);
2194         radeon_bo_fini(rdev);
2195         radeon_atombios_fini(rdev);
2196         kfree(rdev->bios);
2197         rdev->bios = NULL;
2198         radeon_dummy_page_fini(rdev);
2199 }