Merge branch 'drm-fixes' of /home/airlied/kernel/linux-2.6 into drm-core-next
[pandora-kernel.git] / drivers / gpu / drm / i915 / intel_ringbuffer.h
1 #ifndef _INTEL_RINGBUFFER_H_
2 #define _INTEL_RINGBUFFER_H_
3
4 struct  intel_hw_status_page {
5         void            *page_addr;
6         unsigned int    gfx_addr;
7         struct          drm_gem_object *obj;
8 };
9
10 #define I915_READ_TAIL(ring) I915_READ(RING_TAIL(ring->mmio_base))
11 #define I915_WRITE_TAIL(ring, val) I915_WRITE(RING_TAIL(ring->mmio_base), val)
12 #define I915_READ_START(ring) I915_READ(RING_START(ring->mmio_base))
13 #define I915_WRITE_START(ring, val) I915_WRITE(RING_START(ring->mmio_base), val)
14 #define I915_READ_HEAD(ring) I915_READ(RING_HEAD(ring->mmio_base))
15 #define I915_WRITE_HEAD(ring, val) I915_WRITE(RING_HEAD(ring->mmio_base), val)
16 #define I915_READ_CTL(ring) I915_READ(RING_CTL(ring->mmio_base))
17 #define I915_WRITE_CTL(ring, val) I915_WRITE(RING_CTL(ring->mmio_base), val)
18
19 struct drm_i915_gem_execbuffer2;
20 struct  intel_ring_buffer {
21         const char      *name;
22         enum intel_ring_id {
23                 RING_RENDER = 0x1,
24                 RING_BSD = 0x2,
25         } id;
26         u32             mmio_base;
27         unsigned long   size;
28         void            *virtual_start;
29         struct          drm_device *dev;
30         struct          drm_gem_object *gem_object;
31
32         unsigned int    head;
33         unsigned int    tail;
34         int             space;
35         struct intel_hw_status_page status_page;
36
37         u32             irq_gem_seqno;          /* last seq seem at irq time */
38         u32             waiting_gem_seqno;
39         int             user_irq_refcount;
40         void            (*user_irq_get)(struct drm_device *dev,
41                         struct intel_ring_buffer *ring);
42         void            (*user_irq_put)(struct drm_device *dev,
43                         struct intel_ring_buffer *ring);
44
45         int             (*init)(struct drm_device *dev,
46                         struct intel_ring_buffer *ring);
47
48         void            (*set_tail)(struct drm_device *dev,
49                                     struct intel_ring_buffer *ring,
50                                     u32 value);
51         void            (*flush)(struct drm_device *dev,
52                         struct intel_ring_buffer *ring,
53                         u32     invalidate_domains,
54                         u32     flush_domains);
55         u32             (*add_request)(struct drm_device *dev,
56                         struct intel_ring_buffer *ring,
57                         u32 flush_domains);
58         u32             (*get_seqno)(struct drm_device *dev,
59                                      struct intel_ring_buffer *ring);
60         int             (*dispatch_gem_execbuffer)(struct drm_device *dev,
61                         struct intel_ring_buffer *ring,
62                         struct drm_i915_gem_execbuffer2 *exec,
63                         struct drm_clip_rect *cliprects,
64                         uint64_t exec_offset);
65
66         /**
67          * List of objects currently involved in rendering from the
68          * ringbuffer.
69          *
70          * Includes buffers having the contents of their GPU caches
71          * flushed, not necessarily primitives.  last_rendering_seqno
72          * represents when the rendering involved will be completed.
73          *
74          * A reference is held on the buffer while on this list.
75          */
76         struct list_head active_list;
77
78         /**
79          * List of breadcrumbs associated with GPU requests currently
80          * outstanding.
81          */
82         struct list_head request_list;
83
84         /**
85          * Do we have some not yet emitted requests outstanding?
86          */
87         bool outstanding_lazy_request;
88
89         wait_queue_head_t irq_queue;
90         drm_local_map_t map;
91 };
92
93 static inline u32
94 intel_read_status_page(struct intel_ring_buffer *ring,
95                 int reg)
96 {
97         u32 *regs = ring->status_page.page_addr;
98         return regs[reg];
99 }
100
101 int intel_init_ring_buffer(struct drm_device *dev,
102                            struct intel_ring_buffer *ring);
103 void intel_cleanup_ring_buffer(struct drm_device *dev,
104                                struct intel_ring_buffer *ring);
105 int intel_wait_ring_buffer(struct drm_device *dev,
106                            struct intel_ring_buffer *ring, int n);
107 void intel_ring_begin(struct drm_device *dev,
108                       struct intel_ring_buffer *ring, int n);
109
110 static inline void intel_ring_emit(struct drm_device *dev,
111                                    struct intel_ring_buffer *ring,
112                                    unsigned int data)
113 {
114         unsigned int *virt = ring->virtual_start + ring->tail;
115         *virt = data;
116         ring->tail += 4;
117 }
118
119 void intel_fill_struct(struct drm_device *dev,
120                 struct intel_ring_buffer *ring,
121                 void *data,
122                 unsigned int len);
123 void intel_ring_advance(struct drm_device *dev,
124                 struct intel_ring_buffer *ring);
125
126 u32 intel_ring_get_seqno(struct drm_device *dev,
127                 struct intel_ring_buffer *ring);
128
129 int intel_init_render_ring_buffer(struct drm_device *dev);
130 int intel_init_bsd_ring_buffer(struct drm_device *dev);
131
132 u32 intel_ring_get_active_head(struct drm_device *dev,
133                                struct intel_ring_buffer *ring);
134 void intel_ring_setup_status_page(struct drm_device *dev,
135                                   struct intel_ring_buffer *ring);
136
137 #endif /* _INTEL_RINGBUFFER_H_ */