26,6 → 26,7 |
#include <drm/drmP.h> |
#include "radeon.h" |
#include "radeon_asic.h" |
#include "radeon_audio.h" |
#include <drm/radeon_drm.h> |
#include "evergreend.h" |
#include "atom.h" |
34,6 → 35,75 |
#include "evergreen_blit_shaders.h" |
#include "radeon_ucode.h" |
|
/* |
* Indirect registers accessor |
*/ |
u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg) |
{ |
unsigned long flags; |
u32 r; |
|
spin_lock_irqsave(&rdev->cg_idx_lock, flags); |
WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff)); |
r = RREG32(EVERGREEN_CG_IND_DATA); |
spin_unlock_irqrestore(&rdev->cg_idx_lock, flags); |
return r; |
} |
|
void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v) |
{ |
unsigned long flags; |
|
spin_lock_irqsave(&rdev->cg_idx_lock, flags); |
WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff)); |
WREG32(EVERGREEN_CG_IND_DATA, (v)); |
spin_unlock_irqrestore(&rdev->cg_idx_lock, flags); |
} |
|
u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg) |
{ |
unsigned long flags; |
u32 r; |
|
spin_lock_irqsave(&rdev->pif_idx_lock, flags); |
WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff)); |
r = RREG32(EVERGREEN_PIF_PHY0_DATA); |
spin_unlock_irqrestore(&rdev->pif_idx_lock, flags); |
return r; |
} |
|
void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v) |
{ |
unsigned long flags; |
|
spin_lock_irqsave(&rdev->pif_idx_lock, flags); |
WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff)); |
WREG32(EVERGREEN_PIF_PHY0_DATA, (v)); |
spin_unlock_irqrestore(&rdev->pif_idx_lock, flags); |
} |
|
u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg) |
{ |
unsigned long flags; |
u32 r; |
|
spin_lock_irqsave(&rdev->pif_idx_lock, flags); |
WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff)); |
r = RREG32(EVERGREEN_PIF_PHY1_DATA); |
spin_unlock_irqrestore(&rdev->pif_idx_lock, flags); |
return r; |
} |
|
void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v) |
{ |
unsigned long flags; |
|
spin_lock_irqsave(&rdev->pif_idx_lock, flags); |
WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff)); |
WREG32(EVERGREEN_PIF_PHY1_DATA, (v)); |
spin_unlock_irqrestore(&rdev->pif_idx_lock, flags); |
} |
|
static const u32 crtc_offsets[6] = |
{ |
EVERGREEN_CRTC0_REGISTER_OFFSET, |
1005,6 → 1075,34 |
} |
} |
|
/** |
* evergreen_get_allowed_info_register - fetch the register for the info ioctl |
* |
* @rdev: radeon_device pointer |
* @reg: register offset in bytes |
* @val: register value |
* |
* Returns 0 for success or -EINVAL for an invalid register |
* |
*/ |
int evergreen_get_allowed_info_register(struct radeon_device *rdev, |
u32 reg, u32 *val) |
{ |
switch (reg) { |
case GRBM_STATUS: |
case GRBM_STATUS_SE0: |
case GRBM_STATUS_SE1: |
case SRBM_STATUS: |
case SRBM_STATUS2: |
case DMA_STATUS_REG: |
case UVD_STATUS: |
*val = RREG32(reg); |
return 0; |
default: |
return -EINVAL; |
} |
} |
|
void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw, |
unsigned *bankh, unsigned *mtaspect, |
unsigned *tile_split) |
1103,9 → 1201,9 |
return 0; |
} |
|
// r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000, |
// 16384, 0x03FFFFFF, 0, 128, 5, |
// &fb_div, &vclk_div, &dclk_div); |
r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000, |
16384, 0x03FFFFFF, 0, 128, 5, |
&fb_div, &vclk_div, &dclk_div); |
if (r) |
return r; |
|
1121,9 → 1219,9 |
|
mdelay(1); |
|
// r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); |
// if (r) |
// return r; |
r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); |
if (r) |
return r; |
|
/* assert UPLL_RESET again */ |
WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK); |
1158,9 → 1256,9 |
/* switch from bypass mode to normal mode */ |
WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK); |
|
// r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); |
// if (r) |
// return r; |
r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); |
if (r) |
return r; |
|
/* switch VCLK and DCLK selection */ |
WREG32_P(CG_UPLL_FUNC_CNTL_2, |
1306,46 → 1404,22 |
* @crtc_id: crtc to cleanup pageflip on |
* @crtc_base: new address of the crtc (GPU MC address) |
* |
* Does the actual pageflip (evergreen+). |
* During vblank we take the crtc lock and wait for the update_pending |
* bit to go high, when it does, we release the lock, and allow the |
* double buffered update to take place. |
* Returns the current update pending status. |
* Triggers the actual pageflip by updating the primary |
* surface base address (evergreen+). |
*/ |
void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base) |
{ |
struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id]; |
u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset); |
int i; |
|
/* Lock the graphics update lock */ |
tmp |= EVERGREEN_GRPH_UPDATE_LOCK; |
WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp); |
|
/* update the scanout addresses */ |
WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset, |
upper_32_bits(crtc_base)); |
WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset, |
(u32)crtc_base); |
|
WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset, |
upper_32_bits(crtc_base)); |
WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset, |
(u32)crtc_base); |
|
/* Wait for update_pending to go high. */ |
for (i = 0; i < rdev->usec_timeout; i++) { |
if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) |
break; |
udelay(1); |
/* post the write */ |
RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset); |
} |
DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n"); |
|
/* Unlock the lock, so double-buffering can take place inside vblank */ |
tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK; |
WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp); |
} |
|
/** |
* evergreen_page_flip_pending - check if page flip is still pending |
* |
2298,6 → 2372,9 |
c.full = dfixed_div(c, a); |
priority_b_mark = dfixed_trunc(c); |
priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK; |
|
/* Save number of lines the linebuffer leads before the scanout */ |
radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay); |
} |
|
/* select wm A */ |
3252,6 → 3329,8 |
} |
|
WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff)); |
WREG32(SRBM_INT_CNTL, 0x1); |
WREG32(SRBM_INT_ACK, 0x1); |
|
evergreen_fix_pci_max_read_req_size(rdev); |
|
4323,6 → 4402,7 |
tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE; |
WREG32(DMA_CNTL, tmp); |
WREG32(GRBM_INT_CNTL, 0); |
WREG32(SRBM_INT_CNTL, 0); |
WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0); |
WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0); |
if (rdev->num_crtc >= 4) { |
4388,12 → 4468,12 |
return 0; |
} |
|
hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN; |
hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN; |
hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN; |
hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN; |
hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN; |
hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN; |
hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN); |
hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN); |
hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN); |
hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN); |
hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN); |
hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN); |
if (rdev->family == CHIP_ARUBA) |
thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) & |
~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW); |
4482,27 → 4562,27 |
} |
if (rdev->irq.hpd[0]) { |
DRM_DEBUG("evergreen_irq_set: hpd 1\n"); |
hpd1 |= DC_HPDx_INT_EN; |
hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN; |
} |
if (rdev->irq.hpd[1]) { |
DRM_DEBUG("evergreen_irq_set: hpd 2\n"); |
hpd2 |= DC_HPDx_INT_EN; |
hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN; |
} |
if (rdev->irq.hpd[2]) { |
DRM_DEBUG("evergreen_irq_set: hpd 3\n"); |
hpd3 |= DC_HPDx_INT_EN; |
hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN; |
} |
if (rdev->irq.hpd[3]) { |
DRM_DEBUG("evergreen_irq_set: hpd 4\n"); |
hpd4 |= DC_HPDx_INT_EN; |
hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN; |
} |
if (rdev->irq.hpd[4]) { |
DRM_DEBUG("evergreen_irq_set: hpd 5\n"); |
hpd5 |= DC_HPDx_INT_EN; |
hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN; |
} |
if (rdev->irq.hpd[5]) { |
DRM_DEBUG("evergreen_irq_set: hpd 6\n"); |
hpd6 |= DC_HPDx_INT_EN; |
hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN; |
} |
if (rdev->irq.afmt[0]) { |
DRM_DEBUG("evergreen_irq_set: hdmi 0\n"); |
4589,6 → 4669,9 |
WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5); |
WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6); |
|
/* posting read */ |
RREG32(SRBM_STATUS); |
|
return 0; |
} |
|
4693,6 → 4776,38 |
tmp |= DC_HPDx_INT_ACK; |
WREG32(DC_HPD6_INT_CONTROL, tmp); |
} |
|
if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) { |
tmp = RREG32(DC_HPD1_INT_CONTROL); |
tmp |= DC_HPDx_RX_INT_ACK; |
WREG32(DC_HPD1_INT_CONTROL, tmp); |
} |
if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) { |
tmp = RREG32(DC_HPD2_INT_CONTROL); |
tmp |= DC_HPDx_RX_INT_ACK; |
WREG32(DC_HPD2_INT_CONTROL, tmp); |
} |
if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) { |
tmp = RREG32(DC_HPD3_INT_CONTROL); |
tmp |= DC_HPDx_RX_INT_ACK; |
WREG32(DC_HPD3_INT_CONTROL, tmp); |
} |
if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) { |
tmp = RREG32(DC_HPD4_INT_CONTROL); |
tmp |= DC_HPDx_RX_INT_ACK; |
WREG32(DC_HPD4_INT_CONTROL, tmp); |
} |
if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) { |
tmp = RREG32(DC_HPD5_INT_CONTROL); |
tmp |= DC_HPDx_RX_INT_ACK; |
WREG32(DC_HPD5_INT_CONTROL, tmp); |
} |
if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) { |
tmp = RREG32(DC_HPD5_INT_CONTROL); |
tmp |= DC_HPDx_RX_INT_ACK; |
WREG32(DC_HPD6_INT_CONTROL, tmp); |
} |
|
if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) { |
tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET); |
tmp |= AFMT_AZ_FORMAT_WTRIG_ACK; |
4773,6 → 4888,7 |
u32 ring_index; |
bool queue_hotplug = false; |
bool queue_hdmi = false; |
bool queue_dp = false; |
bool queue_thermal = false; |
u32 status, addr; |
|
4787,7 → 4903,7 |
return IRQ_NONE; |
|
rptr = rdev->ih.rptr; |
DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr); |
DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr); |
|
/* Order reading of wptr vs. reading of IH ring data */ |
rmb(); |
4805,23 → 4921,27 |
case 1: /* D1 vblank/vline */ |
switch (src_data) { |
case 0: /* D1 vblank */ |
if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) { |
if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)) |
DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n"); |
|
if (rdev->irq.crtc_vblank_int[0]) { |
// drm_handle_vblank(rdev->ddev, 0); |
drm_handle_vblank(rdev->ddev, 0); |
rdev->pm.vblank_sync = true; |
// wake_up(&rdev->irq.vblank_queue); |
wake_up(&rdev->irq.vblank_queue); |
} |
// if (rdev->irq.pflip[0]) |
// radeon_crtc_handle_flip(rdev, 0); |
if (atomic_read(&rdev->irq.pflip[0])) |
radeon_crtc_handle_vblank(rdev, 0); |
rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT; |
DRM_DEBUG("IH: D1 vblank\n"); |
} |
|
break; |
case 1: /* D1 vline */ |
if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) { |
if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)) |
DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT; |
DRM_DEBUG("IH: D1 vline\n"); |
} |
|
break; |
default: |
DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); |
4831,23 → 4951,27 |
case 2: /* D2 vblank/vline */ |
switch (src_data) { |
case 0: /* D2 vblank */ |
if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) { |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)) |
DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n"); |
|
if (rdev->irq.crtc_vblank_int[1]) { |
// drm_handle_vblank(rdev->ddev, 1); |
drm_handle_vblank(rdev->ddev, 1); |
rdev->pm.vblank_sync = true; |
// wake_up(&rdev->irq.vblank_queue); |
wake_up(&rdev->irq.vblank_queue); |
} |
// if (rdev->irq.pflip[1]) |
// radeon_crtc_handle_flip(rdev, 1); |
if (atomic_read(&rdev->irq.pflip[1])) |
radeon_crtc_handle_vblank(rdev, 1); |
rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT; |
DRM_DEBUG("IH: D2 vblank\n"); |
} |
|
break; |
case 1: /* D2 vline */ |
if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) { |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)) |
DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT; |
DRM_DEBUG("IH: D2 vline\n"); |
} |
|
break; |
default: |
DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); |
4857,23 → 4981,27 |
case 3: /* D3 vblank/vline */ |
switch (src_data) { |
case 0: /* D3 vblank */ |
if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) { |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)) |
DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n"); |
|
if (rdev->irq.crtc_vblank_int[2]) { |
// drm_handle_vblank(rdev->ddev, 2); |
drm_handle_vblank(rdev->ddev, 2); |
rdev->pm.vblank_sync = true; |
// wake_up(&rdev->irq.vblank_queue); |
wake_up(&rdev->irq.vblank_queue); |
} |
// if (rdev->irq.pflip[2]) |
// radeon_crtc_handle_flip(rdev, 2); |
if (atomic_read(&rdev->irq.pflip[2])) |
radeon_crtc_handle_vblank(rdev, 2); |
rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT; |
DRM_DEBUG("IH: D3 vblank\n"); |
} |
|
break; |
case 1: /* D3 vline */ |
if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) { |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)) |
DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT; |
DRM_DEBUG("IH: D3 vline\n"); |
} |
|
break; |
default: |
DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); |
4883,23 → 5011,27 |
case 4: /* D4 vblank/vline */ |
switch (src_data) { |
case 0: /* D4 vblank */ |
if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) { |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)) |
DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n"); |
|
if (rdev->irq.crtc_vblank_int[3]) { |
// drm_handle_vblank(rdev->ddev, 3); |
drm_handle_vblank(rdev->ddev, 3); |
rdev->pm.vblank_sync = true; |
// wake_up(&rdev->irq.vblank_queue); |
wake_up(&rdev->irq.vblank_queue); |
} |
// if (rdev->irq.pflip[3]) |
// radeon_crtc_handle_flip(rdev, 3); |
if (atomic_read(&rdev->irq.pflip[3])) |
radeon_crtc_handle_vblank(rdev, 3); |
rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT; |
DRM_DEBUG("IH: D4 vblank\n"); |
} |
|
break; |
case 1: /* D4 vline */ |
if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) { |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)) |
DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT; |
DRM_DEBUG("IH: D4 vline\n"); |
} |
|
break; |
default: |
DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); |
4909,23 → 5041,27 |
case 5: /* D5 vblank/vline */ |
switch (src_data) { |
case 0: /* D5 vblank */ |
if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) { |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)) |
DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n"); |
|
if (rdev->irq.crtc_vblank_int[4]) { |
// drm_handle_vblank(rdev->ddev, 4); |
drm_handle_vblank(rdev->ddev, 4); |
rdev->pm.vblank_sync = true; |
// wake_up(&rdev->irq.vblank_queue); |
wake_up(&rdev->irq.vblank_queue); |
} |
// if (rdev->irq.pflip[4]) |
// radeon_crtc_handle_flip(rdev, 4); |
if (atomic_read(&rdev->irq.pflip[4])) |
radeon_crtc_handle_vblank(rdev, 4); |
rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT; |
DRM_DEBUG("IH: D5 vblank\n"); |
} |
|
break; |
case 1: /* D5 vline */ |
if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) { |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)) |
DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT; |
DRM_DEBUG("IH: D5 vline\n"); |
} |
|
break; |
default: |
DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); |
4935,23 → 5071,27 |
case 6: /* D6 vblank/vline */ |
switch (src_data) { |
case 0: /* D6 vblank */ |
if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) { |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)) |
DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n"); |
|
if (rdev->irq.crtc_vblank_int[5]) { |
// drm_handle_vblank(rdev->ddev, 5); |
drm_handle_vblank(rdev->ddev, 5); |
rdev->pm.vblank_sync = true; |
// wake_up(&rdev->irq.vblank_queue); |
wake_up(&rdev->irq.vblank_queue); |
} |
// if (rdev->irq.pflip[5]) |
// radeon_crtc_handle_flip(rdev, 5); |
if (atomic_read(&rdev->irq.pflip[5])) |
radeon_crtc_handle_vblank(rdev, 5); |
rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT; |
DRM_DEBUG("IH: D6 vblank\n"); |
} |
|
break; |
case 1: /* D6 vline */ |
if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) { |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)) |
DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT; |
DRM_DEBUG("IH: D6 vline\n"); |
} |
|
break; |
default: |
DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); |
4969,47 → 5109,101 |
case 42: /* HPD hotplug */ |
switch (src_data) { |
case 0: |
if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) { |
if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT)) |
DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT; |
queue_hotplug = true; |
DRM_DEBUG("IH: HPD1\n"); |
} |
break; |
case 1: |
if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) { |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT)) |
DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT; |
queue_hotplug = true; |
DRM_DEBUG("IH: HPD2\n"); |
} |
break; |
case 2: |
if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) { |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT)) |
DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT; |
queue_hotplug = true; |
DRM_DEBUG("IH: HPD3\n"); |
} |
break; |
case 3: |
if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) { |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT)) |
DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT; |
queue_hotplug = true; |
DRM_DEBUG("IH: HPD4\n"); |
} |
break; |
case 4: |
if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) { |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT)) |
DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT; |
queue_hotplug = true; |
DRM_DEBUG("IH: HPD5\n"); |
} |
break; |
case 5: |
if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) { |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT)) |
DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT; |
queue_hotplug = true; |
DRM_DEBUG("IH: HPD6\n"); |
} |
break; |
case 6: |
if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT)) |
DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT; |
queue_dp = true; |
DRM_DEBUG("IH: HPD_RX 1\n"); |
break; |
case 7: |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT)) |
DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT; |
queue_dp = true; |
DRM_DEBUG("IH: HPD_RX 2\n"); |
break; |
case 8: |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT)) |
DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT; |
queue_dp = true; |
DRM_DEBUG("IH: HPD_RX 3\n"); |
break; |
case 9: |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT)) |
DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT; |
queue_dp = true; |
DRM_DEBUG("IH: HPD_RX 4\n"); |
break; |
case 10: |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT)) |
DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT; |
queue_dp = true; |
DRM_DEBUG("IH: HPD_RX 5\n"); |
break; |
case 11: |
if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT)) |
DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT; |
queue_dp = true; |
DRM_DEBUG("IH: HPD_RX 6\n"); |
break; |
default: |
DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data); |
break; |
5018,51 → 5212,61 |
case 44: /* hdmi */ |
switch (src_data) { |
case 0: |
if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) { |
if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG)) |
DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG; |
queue_hdmi = true; |
DRM_DEBUG("IH: HDMI0\n"); |
} |
break; |
case 1: |
if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) { |
if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG)) |
DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG; |
queue_hdmi = true; |
DRM_DEBUG("IH: HDMI1\n"); |
} |
break; |
case 2: |
if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) { |
if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG)) |
DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG; |
queue_hdmi = true; |
DRM_DEBUG("IH: HDMI2\n"); |
} |
break; |
case 3: |
if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) { |
if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG)) |
DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG; |
queue_hdmi = true; |
DRM_DEBUG("IH: HDMI3\n"); |
} |
break; |
case 4: |
if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) { |
if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG)) |
DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG; |
queue_hdmi = true; |
DRM_DEBUG("IH: HDMI4\n"); |
} |
break; |
case 5: |
if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) { |
if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG)) |
DRM_DEBUG("IH: IH event w/o asserted irq bit?\n"); |
|
rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG; |
queue_hdmi = true; |
DRM_DEBUG("IH: HDMI5\n"); |
} |
break; |
default: |
DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data); |
break; |
} |
case 96: |
DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR)); |
WREG32(SRBM_INT_ACK, 0x1); |
break; |
case 124: /* UVD */ |
DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data); |
radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX); |
5213,13 → 5417,13 |
return r; |
} |
|
// r = rv770_uvd_resume(rdev); |
// if (!r) { |
// r = radeon_fence_driver_start_ring(rdev, |
// R600_RING_TYPE_UVD_INDEX); |
// if (r) |
// dev_err(rdev->dev, "UVD fences init error (%d).\n", r); |
// } |
r = uvd_v2_2_resume(rdev); |
if (!r) { |
r = radeon_fence_driver_start_ring(rdev, |
R600_RING_TYPE_UVD_INDEX); |
if (r) |
dev_err(rdev->dev, "UVD fences init error (%d).\n", r); |
} |
|
if (r) |
rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; |
5234,7 → 5438,7 |
r = r600_irq_init(rdev); |
if (r) { |
DRM_ERROR("radeon: IH init failed (%d).\n", r); |
// radeon_irq_kms_fini(rdev); |
radeon_irq_kms_fini(rdev); |
return r; |
} |
evergreen_irq_set(rdev); |
5261,7 → 5465,17 |
if (r) |
return r; |
|
ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; |
if (ring->ring_size) { |
r = radeon_ring_init(rdev, ring, ring->ring_size, 0, |
RADEON_CP_PACKET2); |
if (!r) |
r = uvd_v1_0_init(rdev); |
|
if (r) |
DRM_ERROR("radeon: error initializing UVD (%d).\n", r); |
} |
|
r = radeon_ib_pool_init(rdev); |
if (r) { |
dev_err(rdev->dev, "IB initialization failed (%d).\n", r); |
5365,12 → 5579,12 |
rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL; |
r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024); |
|
// r = radeon_uvd_init(rdev); |
// if (!r) { |
// rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL; |
// r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], |
// 4096); |
// } |
r = radeon_uvd_init(rdev); |
if (!r) { |
rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL; |
r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], |
4096); |
} |
|
rdev->ih.ring_obj = NULL; |
r600_ih_ring_init(rdev, 64 * 1024); |
5383,6 → 5597,15 |
r = evergreen_startup(rdev); |
if (r) { |
dev_err(rdev->dev, "disabling GPU acceleration\n"); |
r700_cp_fini(rdev); |
r600_dma_fini(rdev); |
r600_irq_fini(rdev); |
if (rdev->flags & RADEON_IS_IGP) |
sumo_rlc_fini(rdev); |
radeon_wb_fini(rdev); |
radeon_ib_pool_fini(rdev); |
radeon_irq_kms_fini(rdev); |
evergreen_pcie_gart_fini(rdev); |
rdev->accel_working = false; |
} |
|
5400,6 → 5623,30 |
return 0; |
} |
|
void evergreen_fini(struct radeon_device *rdev) |
{ |
radeon_pm_fini(rdev); |
radeon_audio_fini(rdev); |
r700_cp_fini(rdev); |
r600_dma_fini(rdev); |
r600_irq_fini(rdev); |
if (rdev->flags & RADEON_IS_IGP) |
sumo_rlc_fini(rdev); |
radeon_wb_fini(rdev); |
radeon_ib_pool_fini(rdev); |
radeon_irq_kms_fini(rdev); |
uvd_v1_0_fini(rdev); |
radeon_uvd_fini(rdev); |
evergreen_pcie_gart_fini(rdev); |
r600_vram_scratch_fini(rdev); |
radeon_gem_fini(rdev); |
radeon_fence_driver_fini(rdev); |
radeon_agp_fini(rdev); |
radeon_bo_fini(rdev); |
radeon_atombios_fini(rdev); |
kfree(rdev->bios); |
rdev->bios = NULL; |
} |
|
void evergreen_pcie_gen2_enable(struct radeon_device *rdev) |
{ |