1 /**************************************************************************
3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 **************************************************************************/
31 #include "pipe/p_context.h"
32 #include "pipe/p_defines.h"
33 #include "pipe/p_state.h"
34 #include "pipe/p_screen.h"
35 #include "util/u_debug.h"
36 #include "util/u_debug_describe.h"
37 #include "util/u_debug_refcnt.h"
38 #include "util/u_atomic.h"
39 #include "util/u_box.h"
40 #include "util/u_math.h"
49 * Reference counting helper functions.
54 pipe_reference_init(struct pipe_reference
*reference
, unsigned count
)
56 p_atomic_set(&reference
->count
, count
);
60 pipe_is_referenced(struct pipe_reference
*reference
)
62 return p_atomic_read(&reference
->count
) != 0;
66 * Update reference counting.
67 * The old thing pointed to, if any, will be unreferenced.
68 * Both 'ptr' and 'reference' may be NULL.
69 * \return TRUE if the object's refcount hits zero and should be destroyed.
72 pipe_reference_described(struct pipe_reference
*ptr
,
73 struct pipe_reference
*reference
,
74 debug_reference_descriptor get_desc
)
76 boolean destroy
= FALSE
;
78 if(ptr
!= reference
) {
79 /* bump the reference.count first */
81 assert(pipe_is_referenced(reference
));
82 p_atomic_inc(&reference
->count
);
83 debug_reference(reference
, get_desc
, 1);
87 assert(pipe_is_referenced(ptr
));
88 if (p_atomic_dec_zero(&ptr
->count
)) {
91 debug_reference(ptr
, get_desc
, -1);
99 pipe_reference(struct pipe_reference
*ptr
, struct pipe_reference
*reference
)
101 return pipe_reference_described(ptr
, reference
,
102 (debug_reference_descriptor
)debug_describe_reference
);
106 pipe_surface_reference(struct pipe_surface
**ptr
, struct pipe_surface
*surf
)
108 struct pipe_surface
*old_surf
= *ptr
;
110 if (pipe_reference_described(&(*ptr
)->reference
, &surf
->reference
,
111 (debug_reference_descriptor
)debug_describe_surface
))
112 old_surf
->context
->surface_destroy(old_surf
->context
, old_surf
);
117 pipe_resource_reference(struct pipe_resource
**ptr
, struct pipe_resource
*tex
)
119 struct pipe_resource
*old_tex
= *ptr
;
121 if (pipe_reference_described(&(*ptr
)->reference
, &tex
->reference
,
122 (debug_reference_descriptor
)debug_describe_resource
))
123 old_tex
->screen
->resource_destroy(old_tex
->screen
, old_tex
);
128 pipe_sampler_view_reference(struct pipe_sampler_view
**ptr
, struct pipe_sampler_view
*view
)
130 struct pipe_sampler_view
*old_view
= *ptr
;
132 if (pipe_reference_described(&(*ptr
)->reference
, &view
->reference
,
133 (debug_reference_descriptor
)debug_describe_sampler_view
))
134 old_view
->context
->sampler_view_destroy(old_view
->context
, old_view
);
139 pipe_surface_reset(struct pipe_context
*ctx
, struct pipe_surface
* ps
,
140 struct pipe_resource
*pt
, unsigned level
, unsigned layer
,
143 pipe_resource_reference(&ps
->texture
, pt
);
144 ps
->format
= pt
->format
;
145 ps
->width
= u_minify(pt
->width0
, level
);
146 ps
->height
= u_minify(pt
->height0
, level
);
148 ps
->u
.tex
.level
= level
;
149 ps
->u
.tex
.first_layer
= ps
->u
.tex
.last_layer
= layer
;
154 pipe_surface_init(struct pipe_context
*ctx
, struct pipe_surface
* ps
,
155 struct pipe_resource
*pt
, unsigned level
, unsigned layer
,
159 pipe_reference_init(&ps
->reference
, 1);
160 pipe_surface_reset(ctx
, ps
, pt
, level
, layer
, flags
);
163 /* Return true if the surfaces are equal. */
164 static INLINE boolean
165 pipe_surface_equal(struct pipe_surface
*s1
, struct pipe_surface
*s2
)
167 return s1
->texture
== s2
->texture
&&
168 s1
->format
== s2
->format
&&
169 (s1
->texture
->target
!= PIPE_BUFFER
||
170 (s1
->u
.buf
.first_element
== s2
->u
.buf
.first_element
&&
171 s1
->u
.buf
.last_element
== s2
->u
.buf
.last_element
)) &&
172 (s1
->texture
->target
== PIPE_BUFFER
||
173 (s1
->u
.tex
.level
== s2
->u
.tex
.level
&&
174 s1
->u
.tex
.first_layer
== s2
->u
.tex
.first_layer
&&
175 s1
->u
.tex
.last_layer
== s2
->u
.tex
.last_layer
));
179 * Convenience wrappers for screen buffer functions.
182 static INLINE
struct pipe_resource
*
183 pipe_buffer_create( struct pipe_screen
*screen
,
188 struct pipe_resource buffer
;
189 memset(&buffer
, 0, sizeof buffer
);
190 buffer
.target
= PIPE_BUFFER
;
191 buffer
.format
= PIPE_FORMAT_R8_UNORM
; /* want TYPELESS or similar */
193 buffer
.usage
= usage
;
195 buffer
.width0
= size
;
198 buffer
.array_size
= 1;
199 return screen
->resource_create(screen
, &buffer
);
203 static INLINE
struct pipe_resource
*
204 pipe_user_buffer_create( struct pipe_screen
*screen
, void *ptr
, unsigned size
,
207 return screen
->user_buffer_create(screen
, ptr
, size
, usage
);
211 pipe_buffer_map_range(struct pipe_context
*pipe
,
212 struct pipe_resource
*buffer
,
216 struct pipe_transfer
**transfer
)
221 assert(offset
< buffer
->width0
);
222 assert(offset
+ length
<= buffer
->width0
);
225 u_box_1d(offset
, length
, &box
);
227 *transfer
= pipe
->get_transfer( pipe
,
233 if (*transfer
== NULL
)
236 map
= pipe
->transfer_map( pipe
, *transfer
);
238 pipe
->transfer_destroy( pipe
, *transfer
);
243 /* Match old screen->buffer_map_range() behaviour, return pointer
244 * to where the beginning of the buffer would be:
246 return (void *)((char *)map
- offset
);
251 pipe_buffer_map(struct pipe_context
*pipe
,
252 struct pipe_resource
*buffer
,
254 struct pipe_transfer
**transfer
)
256 return pipe_buffer_map_range(pipe
, buffer
, 0, buffer
->width0
, usage
, transfer
);
261 pipe_buffer_unmap(struct pipe_context
*pipe
,
262 struct pipe_transfer
*transfer
)
265 pipe
->transfer_unmap(pipe
, transfer
);
266 pipe
->transfer_destroy(pipe
, transfer
);
271 pipe_buffer_flush_mapped_range(struct pipe_context
*pipe
,
272 struct pipe_transfer
*transfer
,
280 assert(transfer
->box
.x
<= offset
);
281 assert(offset
+ length
<= transfer
->box
.x
+ transfer
->box
.width
);
283 /* Match old screen->buffer_flush_mapped_range() behaviour, where
284 * offset parameter is relative to the start of the buffer, not the
287 transfer_offset
= offset
- transfer
->box
.x
;
289 u_box_1d(transfer_offset
, length
, &box
);
291 pipe
->transfer_flush_region(pipe
, transfer
, &box
);
295 pipe_buffer_write(struct pipe_context
*pipe
,
296 struct pipe_resource
*buf
,
302 unsigned usage
= PIPE_TRANSFER_WRITE
;
304 if (offset
== 0 && size
== buf
->width0
) {
305 usage
|= PIPE_TRANSFER_DISCARD_WHOLE_RESOURCE
;
307 usage
|= PIPE_TRANSFER_DISCARD_RANGE
;
310 u_box_1d(offset
, size
, &box
);
312 pipe
->transfer_inline_write( pipe
,
323 * Special case for writing non-overlapping ranges.
325 * We can avoid GPU/CPU synchronization when writing range that has never
326 * been written before.
329 pipe_buffer_write_nooverlap(struct pipe_context
*pipe
,
330 struct pipe_resource
*buf
,
331 unsigned offset
, unsigned size
,
336 u_box_1d(offset
, size
, &box
);
338 pipe
->transfer_inline_write(pipe
,
341 (PIPE_TRANSFER_WRITE
|
342 PIPE_TRANSFER_NOOVERWRITE
),
349 pipe_buffer_read(struct pipe_context
*pipe
,
350 struct pipe_resource
*buf
,
355 struct pipe_transfer
*src_transfer
;
358 map
= (ubyte
*) pipe_buffer_map_range(pipe
,
365 memcpy(data
, map
+ offset
, size
);
367 pipe_buffer_unmap(pipe
, src_transfer
);
370 static INLINE
struct pipe_transfer
*
371 pipe_get_transfer( struct pipe_context
*context
,
372 struct pipe_resource
*resource
,
373 unsigned level
, unsigned layer
,
374 enum pipe_transfer_usage usage
,
375 unsigned x
, unsigned y
,
376 unsigned w
, unsigned h
)
379 u_box_2d_zslice( x
, y
, layer
, w
, h
, &box
);
380 return context
->get_transfer( context
,
388 pipe_transfer_map( struct pipe_context
*context
,
389 struct pipe_transfer
*transfer
)
391 return context
->transfer_map( context
, transfer
);
395 pipe_transfer_unmap( struct pipe_context
*context
,
396 struct pipe_transfer
*transfer
)
398 context
->transfer_unmap( context
, transfer
);
403 pipe_transfer_destroy( struct pipe_context
*context
,
404 struct pipe_transfer
*transfer
)
406 context
->transfer_destroy(context
, transfer
);
410 static INLINE boolean
util_get_offset(
411 const struct pipe_rasterizer_state
*templ
,
415 case PIPE_POLYGON_MODE_POINT
:
416 return templ
->offset_point
;
417 case PIPE_POLYGON_MODE_LINE
:
418 return templ
->offset_line
;
419 case PIPE_POLYGON_MODE_FILL
:
420 return templ
->offset_tri
;
428 * This function is used to copy an array of pipe_vertex_buffer structures,
429 * while properly referencing the pipe_vertex_buffer::buffer member.
431 * \sa util_copy_framebuffer_state
433 static INLINE
void util_copy_vertex_buffers(struct pipe_vertex_buffer
*dst
,
435 const struct pipe_vertex_buffer
*src
,
440 /* Reference the buffers of 'src' in 'dst'. */
441 for (i
= 0; i
< src_count
; i
++) {
442 pipe_resource_reference(&dst
[i
].buffer
, src
[i
].buffer
);
444 /* Unreference the rest of the buffers in 'dst'. */
445 for (; i
< *dst_count
; i
++) {
446 pipe_resource_reference(&dst
[i
].buffer
, NULL
);
449 /* Update the size of 'dst' and copy over the other members
450 * of pipe_vertex_buffer. */
451 *dst_count
= src_count
;
452 memcpy(dst
, src
, src_count
* sizeof(struct pipe_vertex_buffer
));
459 #endif /* U_INLINES_H */