Changes.
[cairo/gpu.git] / src / gpu / cairo-gpu-impl-space-gallium.h
blob8bf2eb78579b89f511cef23f2dd3ef8dfd3bdebf
1 static void
2 _cairo_gpu_space__fini_entry(void* abstract_entry, void* closure)
4 cairo_gpu_context_t* ctx = closure;
5 cairo_gpu_ptr_entry_t* ptr_entry = (cairo_gpu_ptr_entry_t*)abstract_entry;
6 switch(ptr_entry->base.hash & TABLE_MASK)
8 case TABLE_VERT:
9 ctx->pipe->delete_vs_state(ctx->pipe, ptr_entry->v);
10 break;
11 case TABLE_FRAG:
12 ctx->pipe->delete_fs_state(ctx->pipe, ptr_entry->v);
13 break;
14 case TABLE_BLEND:
15 ctx->pipe->delete_blend_state(ctx->pipe, ptr_entry->v);
16 break;
17 case TABLE_SAMPLER:
18 ctx->pipe->delete_sampler_state(ctx->pipe, ptr_entry->v);
19 break;
20 default:
21 break;
24 _cairo_hash_table_remove(ctx->space->table, (cairo_hash_entry_t*)abstract_entry);
25 free(abstract_entry);
28 static inline void
29 _cairo_gpu_enter(void)
33 static inline void
34 _cairo_gpu_exit(void)
38 static void
39 _cairo_gpu_context_init(cairo_gpu_context_t* ctx);
41 static cairo_gpu_space_tls_t*
42 _cairo_gpu_space_alloc_tls(cairo_gpu_space_t* space)
44 cairo_gpu_space_tls_t* tls = (cairo_gpu_space_tls_t*)calloc(sizeof(cairo_gpu_space_tls_t), 1);
45 tls->context.space = space;
46 tls->context.tls = tls;
48 _cairo_gpu_context_init(&tls->context);
49 return tls;
52 static cairo_font_options_t *
53 _cairo_gpu_get_font_options (cairo_gpu_space_t* space)
55 if (space->has_font_options)
56 return &space->font_options;
58 CAIRO_MUTEX_LOCK(space->mutex);
59 if (! space->has_font_options) {
60 _cairo_font_options_init_default (&space->font_options);
62 space->font_options.antialias = CAIRO_ANTIALIAS_SUBPIXEL;
64 space->has_font_options = TRUE;
66 CAIRO_MUTEX_UNLOCK(space->mutex);
68 return &space->font_options;
71 static void
72 _cairo_gpu_context__destroy(cairo_gpu_context_t * ctx)
74 ctx->pipe->destroy(ctx->pipe);
75 ctx->pipe = 0;
78 static inline void
79 _cairo_gpu_space_tls_destroy_contexts(cairo_gpu_space_tls_t* tls)
81 _cairo_gpu_context__destroy(&tls->context);
84 static inline cairo_gpu_context_t *
85 _cairo_gpu_space_tls_lookup_context(cairo_gpu_space_tls_t* tls)
87 return &tls->context;
90 static void
91 _cairo_gpu_drm_space_destroy(cairo_gpu_space_t* space);
93 static void
94 _cairo_gpu_space_destroy(void* abstract_space)
96 unsigned i;
97 cairo_gpu_space_t * space = abstract_space;
98 cairo_gpu_space_tls_t* tls;
99 cairo_gpu_space_tls_t* tls_next;
100 cairo_gpu_context_t* ctx;
102 pthread_key_delete(space->tls);
104 for(tls = (cairo_gpu_space_tls_t*)space->tls_list.next; (list_node_t*)tls != &space->tls_list; tls = tls_next)
106 tls_next = (cairo_gpu_space_tls_t*)tls->node.next;
107 ctx = &tls->context;
109 ctx->pipe->bind_blend_state(ctx->pipe, 0);
110 ctx->pipe->bind_fs_state(ctx->pipe, 0);
111 ctx->pipe->bind_vs_state(ctx->pipe, 0);
112 ctx->pipe->bind_rasterizer_state(ctx->pipe, 0);
113 ctx->pipe->bind_sampler_states(ctx->pipe, 0, 0);
114 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, 0);
117 ctx = _cairo_gpu_space_bind(space);
118 _cairo_hash_table_foreach(space->table, _cairo_gpu_space__fini_entry, ctx);
119 _cairo_hash_table_destroy(space->table);
121 for(i = 0; i < sizeof(space->rasterizer) / sizeof(space->rasterizer[0]); ++i)
123 if(space->rasterizer[i])
124 ctx->pipe->delete_rasterizer_state(ctx->pipe, space->rasterizer[i]);
127 if(space->zsa)
128 ctx->pipe->delete_depth_stencil_alpha_state(ctx->pipe, space->zsa);
130 _cairo_gpu_texture_fini(0, &space->dummy_texture);
132 for(tls = (cairo_gpu_space_tls_t*)space->tls_list.next; (list_node_t*)tls != &space->tls_list; tls = tls_next)
134 tls_next = (cairo_gpu_space_tls_t*)tls->node.next;
136 _cairo_gpu_space_tls_destroy(tls);
139 if(space->owns_screen)
140 space->screen->destroy(space->screen);
142 space->screen = 0;
144 if(space->api == API_DRM)
145 _cairo_gpu_drm_space_destroy(space);
148 /* only current thread */
149 static cairo_status_t
150 _cairo_gpu_space_sync(void* abstract_space)
152 cairo_gpu_space_t* space = abstract_space;
153 cairo_gpu_context_t* ctx = _cairo_gpu_space_bind(space);
155 struct pipe_fence_handle *fence = NULL;
157 ctx->pipe->flush(ctx->pipe, PIPE_FLUSH_RENDER_CACHE | PIPE_FLUSH_FRAME, &fence);
159 if(fence)
161 ctx->space->screen->fence_finish(ctx->space->screen, fence, 0);
162 ctx->space->screen->fence_reference(ctx->space->screen, &fence, NULL);
165 return CAIRO_STATUS_SUCCESS;
168 static void
169 _cairo_gpu_space__finish_create(cairo_gpu_space_t* space, unsigned flags)
171 const char* env = getenv("__CAIRO_GPU_GALLIUM_FLAGS");
172 if(env)
173 flags |= atoi(env);
175 space->base.is_software = !strcmp(space->screen->get_name(space->screen), "softpipe");
177 if(flags & CAIRO_GPU_GALLIUM_TRACE)
179 space->real_screen = space->screen;
180 space->screen = trace_screen_create(space->screen);
183 space->use_fbo = 1;
184 space->tex_npot = !(flags & CAIRO_GPU_GALLIUM_DISABLE_TEXTURE_NON_POWER_OF_TWO)
185 && space->screen->get_param(space->screen, PIPE_CAP_NPOT_TEXTURES);
186 space->tex_rectangle = 0;
188 space->extend_mask = 1 << CAIRO_EXTEND_REPEAT;
189 space->extend_mask |= 1 << CAIRO_EXTEND_NONE;
190 space->extend_mask |= 1 << CAIRO_EXTEND_PAD;
192 if(space->screen->get_param(space->screen, PIPE_CAP_TEXTURE_MIRROR_REPEAT))
193 space->extend_mask |= 1 << CAIRO_EXTEND_REFLECT;
195 space->vert_op = space->vert_passthru = 1;
196 space->per_component = 1;
197 space->radial = space->frag_div_alpha = space->discontinuous = 1;
198 space->frag_mul_alpha = 1;
199 space->tex_aaaa_111a = 1;
200 space->frag_passthru = 1;
201 space->blend_func_separate = 1;
202 space->blend_color = 1;
203 space->blend_subtract = 1;
205 space->msaa_samples = 16; /* optimistic value, will be downgraded if necessary */
207 // TODO: this is true on nVidia G70, which does the equivalent of 2x2 MSAA
208 space->fastest_polygon_smooth_samples = 4;
209 space->nicest_polygon_smooth_samples = 4;
211 space->max_anisotropy = space->screen->get_paramf(space->screen, PIPE_CAP_MAX_TEXTURE_ANISOTROPY);
214 static inline cairo_bool_t
215 _cairo_gpu_space_is_frag_supported(cairo_gpu_space_t* space, unsigned frag)
217 return 1;
220 static cairo_bool_t
221 _cairo_gpu_hash_keys_equal(const void *key_a, const void *key_b)
223 return ((cairo_hash_entry_t*)key_a)->hash == ((cairo_hash_entry_t*)key_b)->hash;
226 static cairo_gpu_space_t *
227 _cairo_gpu_space__begin_create(void)
229 cairo_gpu_space_t *space;
231 space = calloc(1, sizeof(cairo_gpu_space_t));
232 if(!space)
233 return 0;
235 CAIRO_REFERENCE_COUNT_INIT(&space->base.ref_count, 1);
236 CAIRO_MUTEX_INIT(space->mutex);
237 CAIRO_MUTEX_INIT(space->cached_mask_surface_mutex);
238 space->base.backend = &_cairo_gpu_space_backend;
239 space->tls_list.prev = &space->tls_list;
240 space->tls_list.next = &space->tls_list;
241 pthread_key_create(&space->tls, _cairo_gpu_space_tls_dtor);
242 space->table = _cairo_hash_table_create(_cairo_gpu_hash_keys_equal);
244 return space;
247 #include "cairo-gpu-impl-space-gallium-softpipe.h"
248 #include "cairo-gpu-impl-space-gallium-drm.h"
251 cairo_space_t*
252 cairo_gallium_hardpipe_space_create (unsigned flags)
254 cairo_space_t* space;
256 space = cairo_gallium_drm_space_create(0, flags);
257 if(space)
259 if(space->is_software)
260 cairo_space_destroy(space);
261 else
262 return space;
265 space = cairo_gallium_x11_space_create(0, flags);
266 if(space)
268 if(space->is_software)
269 cairo_space_destroy(space);
270 else
271 return space;
274 return 0;
277 cairo_space_t*
278 cairo_gallium_space_create (unsigned flags)
280 cairo_space_t* space = cairo_gallium_hardpipe_space_create(flags);
281 if(space)
282 return space;
284 return cairo_gallium_softpipe_space_create(flags);
287 cairo_space_t *
288 cairo_gallium_space_wrap(struct pipe_screen* screen, struct pipe_context* (*context_create)(void*, struct pipe_screen*), void* cookie, unsigned flags)
290 cairo_gpu_space_t *space;
291 space = _cairo_gpu_space__begin_create();
292 if(!space)
293 return 0;
295 space->api = API_USER;
296 space->screen = (struct pipe_screen*)screen;
297 space->user.context_create = context_create;
298 space->user.cookie = cookie;
300 _cairo_gpu_space__finish_create(space, flags);
301 return &space->base;
304 struct pipe_screen*
305 cairo_gallium_space_get_screen(cairo_space_t* abstract_space)
307 cairo_gpu_space_t* space = (cairo_gpu_space_t*)abstract_space;
308 if(abstract_space->backend != &_cairo_gpu_space_backend)
309 return 0;
311 return space->screen;
314 static struct pipe_context*
315 _cairo_gallium_space_create_context(cairo_gpu_space_t* space)
317 struct pipe_screen* screen;
319 screen = space->real_screen ? space->real_screen : space->screen;
321 if(space->api == API_SOFTPIPE)
322 return softpipe_create(screen);
323 else if(space->api == API_USER)
324 return (struct pipe_context*)space->user.context_create(space->user.cookie, screen);
325 else if(space->api == API_DRM)
326 return space->drm.api->create_context(space->drm.api, screen);
327 else
328 abort();
331 struct pipe_context*
332 cairo_gallium_space_create_context(cairo_space_t* abstract_space)
334 cairo_gpu_space_t* space = (cairo_gpu_space_t*)abstract_space;
336 if(abstract_space->backend != &_cairo_gpu_space_backend)
337 return 0;
339 return _cairo_gallium_space_create_context(space);