Add indirect dispatching for OpenGL.
[cairo/gpu.git] / src / cairo-gpu-impl-space-gallium.h
blobecc3fe62789135c34fcaa896b7a3dfaaff364bfa
1 static void
2 _cairo_gpu_space_fini_entry(void* abstract_entry, void* closure)
4 cairo_gpu_context_t* ctx = closure;
5 cairo_gpu_ptr_entry_t* ptr_entry = (cairo_gpu_ptr_entry_t*)abstract_entry;
6 switch(ptr_entry->base.hash & TABLE_MASK)
8 case TABLE_VERT:
9 ctx->pipe->delete_vs_state(ctx->pipe, ptr_entry->v);
10 break;
11 case TABLE_FRAG:
12 ctx->pipe->delete_fs_state(ctx->pipe, ptr_entry->v);
13 break;
14 case TABLE_BLEND:
15 ctx->pipe->delete_blend_state(ctx->pipe, ptr_entry->v);
16 break;
17 case TABLE_SAMPLER:
18 ctx->pipe->delete_sampler_state(ctx->pipe, ptr_entry->v);
19 break;
20 default:
21 break;
24 _cairo_hash_table_remove(ctx->space->table, (cairo_hash_entry_t*)abstract_entry);
25 free(abstract_entry);
28 static inline void
29 _cairo_gpu_enter(void)
33 static inline void
34 _cairo_gpu_exit(void)
38 static void
39 _cairo_gpu_context_init(cairo_gpu_context_t* ctx);
41 static cairo_gpu_space_tls_t*
42 _cairo_gpu_space_alloc_tls(cairo_gpu_space_t* space)
44 cairo_gpu_space_tls_t* tls = (cairo_gpu_space_tls_t*)calloc(sizeof(cairo_gpu_space_tls_t), 1);
45 tls->context.space = space;
46 tls->context.tls = tls;
48 _cairo_gpu_context_init(&tls->context);
49 return tls;
52 static cairo_font_options_t *
53 _cairo_gpu_get_font_options (cairo_gpu_space_t* space)
55 if (space->has_font_options)
56 return &space->font_options;
58 CAIRO_MUTEX_LOCK(space->mutex);
59 if (! space->has_font_options) {
60 _cairo_font_options_init_default (&space->font_options);
62 space->font_options.antialias = CAIRO_ANTIALIAS_SUBPIXEL;
64 space->has_font_options = TRUE;
66 CAIRO_MUTEX_UNLOCK(space->mutex);
68 return &space->font_options;
71 static void
72 _cairo_gpu_context__destroy(cairo_gpu_context_t * ctx)
74 ctx->pipe->destroy(ctx->pipe);
75 ctx->pipe = 0;
78 static inline void
79 _cairo_gpu_space_tls_destroy_contexts(cairo_gpu_space_tls_t* tls)
81 _cairo_gpu_context__destroy(&tls->context);
84 static inline cairo_gpu_context_t *
85 _cairo_gpu_space_tls_lookup_context(cairo_gpu_space_tls_t* tls)
87 return &tls->context;
90 static inline cairo_gpu_context_t *
91 _cairo_gpu_space_lookup_context(cairo_gpu_space_t* space)
93 cairo_gpu_space_tls_t* tls = _cairo_gpu_space_get_tls(space);
94 return &tls->context;
97 static void
98 _cairo_gpu_drm_space_destroy(cairo_gpu_space_t* space);
100 static void
101 _cairo_gpu_space_destroy(void* abstract_space)
103 unsigned i;
104 cairo_gpu_space_t * space = abstract_space;
105 cairo_gpu_space_tls_t* tls;
106 cairo_gpu_space_tls_t* tls_next;
107 cairo_gpu_context_t* ctx;
109 pthread_key_delete(space->tls);
111 for(tls = (cairo_gpu_space_tls_t*)space->tls_list.next; (list_node_t*)tls != &space->tls_list; tls = tls_next)
113 tls_next = (cairo_gpu_space_tls_t*)tls->node.next;
114 ctx = &tls->context;
116 ctx->pipe->bind_blend_state(ctx->pipe, 0);
117 ctx->pipe->bind_fs_state(ctx->pipe, 0);
118 ctx->pipe->bind_vs_state(ctx->pipe, 0);
119 ctx->pipe->bind_rasterizer_state(ctx->pipe, 0);
120 ctx->pipe->bind_sampler_states(ctx->pipe, 0, 0);
121 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, 0);
124 ctx = _cairo_gpu_space_get_bind_context(space);
125 _cairo_hash_table_foreach(space->table, _cairo_gpu_space_fini_entry, ctx);
126 _cairo_hash_table_destroy(space->table);
128 for(i = 0; i < sizeof(space->rasterizer) / sizeof(space->rasterizer[0]); ++i)
130 if(space->rasterizer[i])
131 ctx->pipe->delete_rasterizer_state(ctx->pipe, space->rasterizer[i]);
134 if(space->zsa)
135 ctx->pipe->delete_depth_stencil_alpha_state(ctx->pipe, space->zsa);
137 _cairo_gpu_texture_fini(0, &space->dummy_texture);
139 for(tls = (cairo_gpu_space_tls_t*)space->tls_list.next; (list_node_t*)tls != &space->tls_list; tls = tls_next)
141 tls_next = (cairo_gpu_space_tls_t*)tls->node.next;
143 _cairo_gpu_space_tls_destroy(tls);
146 if(space->owns_screen)
147 space->screen->destroy(space->screen);
149 space->screen = 0;
151 if(space->api == API_DRM)
152 _cairo_gpu_drm_space_destroy(space);
155 /* only current thread */
156 static cairo_status_t
157 _cairo_gpu_space_sync(void* abstract_space)
159 cairo_gpu_space_t* space = abstract_space;
160 cairo_gpu_context_t* ctx = _cairo_gpu_space_get_bind_context(space);
162 struct pipe_fence_handle *fence = NULL;
164 ctx->pipe->flush(ctx->pipe, PIPE_FLUSH_RENDER_CACHE | PIPE_FLUSH_FRAME, &fence);
166 if(fence)
168 ctx->space->screen->fence_finish(ctx->space->screen, fence, 0);
169 ctx->space->screen->fence_reference(ctx->space->screen, &fence, NULL);
172 return CAIRO_STATUS_SUCCESS;
175 static void
176 _cairo_gpu_space_finish_create(cairo_gpu_space_t* space, unsigned flags)
178 const char* env = getenv("__CAIRO_GPU_GALLIUM_FLAGS");
179 if(env)
180 flags |= atoi(env);
182 space->base.is_software = !strcmp(space->screen->get_name(space->screen), "softpipe");
184 if(flags & CAIRO_GPU_GALLIUM_TRACE)
186 space->real_screen = space->screen;
187 space->screen = trace_screen_create(space->screen);
190 space->use_fbo = 1;
191 space->tex_npot = !(flags & CAIRO_GPU_GALLIUM_DISABLE_TEXTURE_NON_POWER_OF_TWO)
192 && space->screen->get_param(space->screen, PIPE_CAP_NPOT_TEXTURES);
193 space->tex_rectangle = 0;
195 space->extend_mask = 1 << CAIRO_EXTEND_REPEAT;
196 space->extend_mask |= 1 << CAIRO_EXTEND_NONE;
197 space->extend_mask |= 1 << CAIRO_EXTEND_PAD;
199 if(space->screen->get_param(space->screen, PIPE_CAP_TEXTURE_MIRROR_REPEAT))
200 space->extend_mask |= 1 << CAIRO_EXTEND_REFLECT;
202 space->vert_op = space->vert_passthru = 1;
203 space->per_component = 1;
204 space->radial = space->frag_div_alpha = space->discontinuous = 1;
205 space->frag_passthru = 1;
206 space->blend_func_separate = 1;
207 space->blend_color = 1;
208 space->blend_subtract = 1;
210 space->msaa_samples = 16; /* optimistic value, will be downgraded if necessary */
212 // TODO: this is true on nVidia G70, which does the equivalent of 2x2 MSAA
213 space->fastest_polygon_smooth_samples = 4;
214 space->nicest_polygon_smooth_samples = 4;
216 space->max_anisotropy = space->screen->get_paramf(space->screen, PIPE_CAP_MAX_TEXTURE_ANISOTROPY);
219 static cairo_bool_t
220 _cairo_gpu_hash_keys_equal(const void *key_a, const void *key_b)
222 return ((cairo_hash_entry_t*)key_a)->hash == ((cairo_hash_entry_t*)key_b)->hash;
225 static cairo_gpu_space_t *
226 _cairo_gpu_space_begin_create(void)
228 cairo_gpu_space_t *space;
230 space = calloc(1, sizeof(cairo_gpu_space_t));
231 if(!space)
232 return 0;
234 CAIRO_REFERENCE_COUNT_INIT(&space->base.ref_count, 1);
235 CAIRO_MUTEX_INIT(space->mutex);
236 CAIRO_MUTEX_INIT(space->cached_mask_surface_mutex);
237 space->base.backend = &_cairo_gpu_space_backend;
238 space->tls_list.prev = &space->tls_list;
239 space->tls_list.next = &space->tls_list;
240 pthread_key_create(&space->tls, _cairo_gpu_space_tls_dtor);
241 space->table = _cairo_hash_table_create(_cairo_gpu_hash_keys_equal);
243 return space;
246 #include "cairo-gpu-impl-space-gallium-softpipe.h"
247 #include "cairo-gpu-impl-space-gallium-drm.h"
250 cairo_space_t*
251 cairo_gallium_hardpipe_space_create (unsigned flags)
253 cairo_space_t* space;
255 space = cairo_gallium_drm_space_create(0, flags);
256 if(space)
258 if(space->is_software)
259 cairo_space_destroy(space);
260 else
261 return space;
264 space = cairo_gallium_x11_space_create(0, flags);
265 if(space)
267 if(space->is_software)
268 cairo_space_destroy(space);
269 else
270 return space;
273 return 0;
276 cairo_space_t*
277 cairo_gallium_space_create (unsigned flags)
279 cairo_space_t* space = cairo_gallium_hardpipe_space_create(flags);
280 if(space)
281 return space;
283 return cairo_gallium_softpipe_space_create(flags);
286 cairo_space_t *
287 cairo_gallium_space_wrap(struct pipe_screen* screen, struct pipe_context* (*context_create)(void*, struct pipe_screen*), void* cookie, unsigned flags)
289 cairo_gpu_space_t *space;
290 space = _cairo_gpu_space_begin_create();
291 if(!space)
292 return 0;
294 space->api = API_USER;
295 space->screen = (struct pipe_screen*)screen;
296 space->user.context_create = context_create;
297 space->user.cookie = cookie;
299 _cairo_gpu_space_finish_create(space, flags);
300 return &space->base;
303 struct pipe_screen*
304 cairo_gallium_space_get_screen(cairo_space_t* abstract_space)
306 cairo_gpu_space_t* space = (cairo_gpu_space_t*)abstract_space;
307 if(abstract_space->backend != &_cairo_gpu_space_backend)
308 return 0;
310 return space->screen;
313 static struct pipe_context*
314 _cairo_gallium_space_create_context(cairo_gpu_space_t* space)
316 struct pipe_screen* screen;
318 screen = space->real_screen ? space->real_screen : space->screen;
320 if(space->api == API_SOFTPIPE)
321 return softpipe_create(screen);
322 else if(space->api == API_USER)
323 return (struct pipe_context*)space->user.context_create(space->user.cookie, screen);
324 else if(space->api == API_DRM)
325 return space->drm.api->create_context(space->drm.api, screen);
326 else
327 abort();
330 struct pipe_context*
331 cairo_gallium_space_create_context(cairo_space_t* abstract_space)
333 cairo_gpu_space_t* space = (cairo_gpu_space_t*)abstract_space;
335 if(abstract_space->backend != &_cairo_gpu_space_backend)
336 return 0;
338 return _cairo_gallium_space_create_context(space);