1 #ifndef _INTEL_RINGBUFFER_H_
2 #define _INTEL_RINGBUFFER_H_
4 struct intel_hw_status_page {
5 u32 __iomem *page_addr;
7 struct drm_gem_object *obj;
10 #define I915_RING_READ(reg) i915_safe_read(dev_priv, reg)
12 #define I915_READ_TAIL(ring) I915_RING_READ(RING_TAIL(ring->mmio_base))
13 #define I915_WRITE_TAIL(ring, val) I915_WRITE(RING_TAIL(ring->mmio_base), val)
15 #define I915_READ_START(ring) I915_RING_READ(RING_START(ring->mmio_base))
16 #define I915_WRITE_START(ring, val) I915_WRITE(RING_START(ring->mmio_base), val)
18 #define I915_READ_HEAD(ring) I915_RING_READ(RING_HEAD(ring->mmio_base))
19 #define I915_WRITE_HEAD(ring, val) I915_WRITE(RING_HEAD(ring->mmio_base), val)
21 #define I915_READ_CTL(ring) I915_RING_READ(RING_CTL(ring->mmio_base))
22 #define I915_WRITE_CTL(ring, val) I915_WRITE(RING_CTL(ring->mmio_base), val)
24 struct drm_i915_gem_execbuffer2;
25 struct intel_ring_buffer {
34 struct drm_device *dev;
35 struct drm_gem_object *gem_object;
41 struct intel_hw_status_page status_page;
43 u32 irq_seqno; /* last seq seem at irq time */
45 int user_irq_refcount;
46 void (*user_irq_get)(struct intel_ring_buffer *ring);
47 void (*user_irq_put)(struct intel_ring_buffer *ring);
49 int (*init)(struct intel_ring_buffer *ring);
51 void (*write_tail)(struct intel_ring_buffer *ring,
53 void (*flush)(struct intel_ring_buffer *ring,
54 u32 invalidate_domains,
56 int (*add_request)(struct intel_ring_buffer *ring,
58 u32 (*get_seqno)(struct intel_ring_buffer *ring);
59 int (*dispatch_execbuffer)(struct intel_ring_buffer *ring,
60 struct drm_i915_gem_execbuffer2 *exec,
61 struct drm_clip_rect *cliprects,
62 uint64_t exec_offset);
63 void (*cleanup)(struct intel_ring_buffer *ring);
66 * List of objects currently involved in rendering from the
69 * Includes buffers having the contents of their GPU caches
70 * flushed, not necessarily primitives. last_rendering_seqno
71 * represents when the rendering involved will be completed.
73 * A reference is held on the buffer while on this list.
75 struct list_head active_list;
78 * List of breadcrumbs associated with GPU requests currently
81 struct list_head request_list;
84 * List of objects currently pending a GPU write flush.
86 * All elements on this list will belong to either the
87 * active_list or flushing_list, last_rendering_seqno can
88 * be used to differentiate between the two elements.
90 struct list_head gpu_write_list;
93 * Do we have some not yet emitted requests outstanding?
95 u32 outstanding_lazy_request;
97 wait_queue_head_t irq_queue;
104 intel_read_status_page(struct intel_ring_buffer *ring,
107 return ioread32(ring->status_page.page_addr + reg);
110 void intel_cleanup_ring_buffer(struct intel_ring_buffer *ring);
111 int __must_check intel_wait_ring_buffer(struct intel_ring_buffer *ring, int n);
112 int __must_check intel_ring_begin(struct intel_ring_buffer *ring, int n);
114 static inline void intel_ring_emit(struct intel_ring_buffer *ring,
117 iowrite32(data, ring->virtual_start + ring->tail);
121 void intel_ring_advance(struct intel_ring_buffer *ring);
123 u32 intel_ring_get_seqno(struct intel_ring_buffer *ring);
125 int intel_init_render_ring_buffer(struct drm_device *dev);
126 int intel_init_bsd_ring_buffer(struct drm_device *dev);
127 int intel_init_blt_ring_buffer(struct drm_device *dev);
129 u32 intel_ring_get_active_head(struct intel_ring_buffer *ring);
130 void intel_ring_setup_status_page(struct intel_ring_buffer *ring);
132 #endif /* _INTEL_RINGBUFFER_H_ */