1 /**************************************************************************
2 *
3 * Copyright 2009 Younes Manton.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 #include "util/u_sampler.h"
29
30 #include "vl_compositor_gfx.h"
31 #include "vl_compositor_cs.h"
32
33 static bool
init_shaders(struct vl_compositor * c)34 init_shaders(struct vl_compositor *c)
35 {
36 assert(c);
37
38 if (c->shaders_initialized)
39 return true;
40
41 if (c->pipe_cs_composit_supported) {
42 if (!vl_compositor_cs_init_shaders(c))
43 return false;
44
45 } else if (c->pipe_gfx_supported) {
46 c->fs_video_buffer = create_frag_shader_video_buffer(c);
47 if (!c->fs_video_buffer) {
48 debug_printf("Unable to create YCbCr-to-RGB fragment shader.\n");
49 return false;
50 }
51
52 c->fs_weave_rgb = create_frag_shader_weave_rgb(c);
53 if (!c->fs_weave_rgb) {
54 debug_printf("Unable to create YCbCr-to-RGB weave fragment shader.\n");
55 return false;
56 }
57
58 c->fs_yuv.weave.y = create_frag_shader_deint_yuv(c, true, true);
59 c->fs_yuv.weave.uv = create_frag_shader_deint_yuv(c, false, true);
60 c->fs_yuv.bob.y = create_frag_shader_deint_yuv(c, true, false);
61 c->fs_yuv.bob.uv = create_frag_shader_deint_yuv(c, false, false);
62 if (!c->fs_yuv.weave.y || !c->fs_yuv.weave.uv ||
63 !c->fs_yuv.bob.y || !c->fs_yuv.bob.uv) {
64 debug_printf("Unable to create YCbCr i-to-YCbCr p deint fragment shader.\n");
65 return false;
66 }
67
68 c->fs_rgb_yuv.y = create_frag_shader_rgb_yuv(c, true);
69 c->fs_rgb_yuv.uv = create_frag_shader_rgb_yuv(c, false);
70 if (!c->fs_rgb_yuv.y || !c->fs_rgb_yuv.uv) {
71 debug_printf("Unable to create RGB-to-YUV fragment shader.\n");
72 return false;
73 }
74 }
75
76 if (c->pipe_gfx_supported) {
77 c->vs = create_vert_shader(c);
78 if (!c->vs) {
79 debug_printf("Unable to create vertex shader.\n");
80 return false;
81 }
82
83 c->fs_palette.yuv = create_frag_shader_palette(c, true);
84 if (!c->fs_palette.yuv) {
85 debug_printf("Unable to create YUV-Palette-to-RGB fragment shader.\n");
86 return false;
87 }
88
89 c->fs_palette.rgb = create_frag_shader_palette(c, false);
90 if (!c->fs_palette.rgb) {
91 debug_printf("Unable to create RGB-Palette-to-RGB fragment shader.\n");
92 return false;
93 }
94
95 c->fs_rgba = create_frag_shader_rgba(c);
96 if (!c->fs_rgba) {
97 debug_printf("Unable to create RGB-to-RGB fragment shader.\n");
98 return false;
99 }
100 }
101
102 c->shaders_initialized = true;
103
104 return true;
105 }
106
cleanup_shaders(struct vl_compositor * c)107 static void cleanup_shaders(struct vl_compositor *c)
108 {
109 assert(c);
110
111 if (!c->shaders_initialized)
112 return;
113
114 if (c->pipe_cs_composit_supported) {
115 vl_compositor_cs_cleanup_shaders(c);
116 } else if (c->pipe_gfx_supported) {
117 c->pipe->delete_fs_state(c->pipe, c->fs_video_buffer);
118 c->pipe->delete_fs_state(c->pipe, c->fs_weave_rgb);
119 c->pipe->delete_fs_state(c->pipe, c->fs_yuv.weave.y);
120 c->pipe->delete_fs_state(c->pipe, c->fs_yuv.weave.uv);
121 c->pipe->delete_fs_state(c->pipe, c->fs_yuv.bob.y);
122 c->pipe->delete_fs_state(c->pipe, c->fs_yuv.bob.uv);
123 c->pipe->delete_fs_state(c->pipe, c->fs_rgb_yuv.y);
124 c->pipe->delete_fs_state(c->pipe, c->fs_rgb_yuv.uv);
125 }
126
127 if (c->pipe_gfx_supported) {
128 c->pipe->delete_vs_state(c->pipe, c->vs);
129 c->pipe->delete_fs_state(c->pipe, c->fs_palette.yuv);
130 c->pipe->delete_fs_state(c->pipe, c->fs_palette.rgb);
131 c->pipe->delete_fs_state(c->pipe, c->fs_rgba);
132 }
133 }
134
135 static bool
init_pipe_state(struct vl_compositor * c)136 init_pipe_state(struct vl_compositor *c)
137 {
138 struct pipe_rasterizer_state rast;
139 struct pipe_sampler_state sampler;
140 struct pipe_blend_state blend;
141 struct pipe_depth_stencil_alpha_state dsa;
142 unsigned i;
143
144 assert(c);
145
146 c->fb_state.nr_cbufs = 1;
147 c->fb_state.zsbuf = NULL;
148
149 memset(&sampler, 0, sizeof(sampler));
150 sampler.wrap_s = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
151 sampler.wrap_t = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
152 sampler.wrap_r = PIPE_TEX_WRAP_REPEAT;
153 sampler.min_img_filter = PIPE_TEX_FILTER_LINEAR;
154 sampler.min_mip_filter = PIPE_TEX_MIPFILTER_NONE;
155 sampler.mag_img_filter = PIPE_TEX_FILTER_LINEAR;
156 sampler.compare_mode = PIPE_TEX_COMPARE_NONE;
157 sampler.compare_func = PIPE_FUNC_ALWAYS;
158
159 if (c->pipe_gfx_supported) {
160 c->sampler_linear = c->pipe->create_sampler_state(c->pipe, &sampler);
161
162 sampler.min_img_filter = PIPE_TEX_FILTER_NEAREST;
163 sampler.mag_img_filter = PIPE_TEX_FILTER_NEAREST;
164 c->sampler_nearest = c->pipe->create_sampler_state(c->pipe, &sampler);
165
166 memset(&blend, 0, sizeof blend);
167 blend.independent_blend_enable = 0;
168 blend.rt[0].blend_enable = 0;
169 blend.logicop_enable = 0;
170 blend.logicop_func = PIPE_LOGICOP_CLEAR;
171 blend.rt[0].colormask = PIPE_MASK_RGBA;
172 blend.dither = 0;
173 c->blend_clear = c->pipe->create_blend_state(c->pipe, &blend);
174
175 blend.rt[0].blend_enable = 1;
176 blend.rt[0].rgb_func = PIPE_BLEND_ADD;
177 blend.rt[0].rgb_src_factor = PIPE_BLENDFACTOR_SRC_ALPHA;
178 blend.rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_INV_SRC_ALPHA;
179 blend.rt[0].alpha_func = PIPE_BLEND_ADD;
180 blend.rt[0].alpha_src_factor = PIPE_BLENDFACTOR_ONE;
181 blend.rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_ONE;
182 c->blend_add = c->pipe->create_blend_state(c->pipe, &blend);
183
184 memset(&rast, 0, sizeof rast);
185 rast.flatshade = 0;
186 rast.front_ccw = 1;
187 rast.cull_face = PIPE_FACE_NONE;
188 rast.fill_back = PIPE_POLYGON_MODE_FILL;
189 rast.fill_front = PIPE_POLYGON_MODE_FILL;
190 rast.scissor = 1;
191 rast.line_width = 1;
192 rast.point_size_per_vertex = 1;
193 rast.offset_units = 1;
194 rast.offset_scale = 1;
195 rast.half_pixel_center = 1;
196 rast.bottom_edge_rule = 1;
197 rast.depth_clip_near = 1;
198 rast.depth_clip_far = 1;
199
200 c->rast = c->pipe->create_rasterizer_state(c->pipe, &rast);
201
202 memset(&dsa, 0, sizeof dsa);
203 dsa.depth_enabled = 0;
204 dsa.depth_writemask = 0;
205 dsa.depth_func = PIPE_FUNC_ALWAYS;
206 for (i = 0; i < 2; ++i) {
207 dsa.stencil[i].enabled = 0;
208 dsa.stencil[i].func = PIPE_FUNC_ALWAYS;
209 dsa.stencil[i].fail_op = PIPE_STENCIL_OP_KEEP;
210 dsa.stencil[i].zpass_op = PIPE_STENCIL_OP_KEEP;
211 dsa.stencil[i].zfail_op = PIPE_STENCIL_OP_KEEP;
212 dsa.stencil[i].valuemask = 0;
213 dsa.stencil[i].writemask = 0;
214 }
215 dsa.alpha_enabled = 0;
216 dsa.alpha_func = PIPE_FUNC_ALWAYS;
217 dsa.alpha_ref_value = 0;
218 c->dsa = c->pipe->create_depth_stencil_alpha_state(c->pipe, &dsa);
219 c->pipe->bind_depth_stencil_alpha_state(c->pipe, c->dsa);
220 }
221
222 return true;
223 }
224
cleanup_pipe_state(struct vl_compositor * c)225 static void cleanup_pipe_state(struct vl_compositor *c)
226 {
227 assert(c);
228
229 if (c->pipe_gfx_supported) {
230 /* Asserted in softpipe_delete_fs_state() for some reason */
231 c->pipe->bind_vs_state(c->pipe, NULL);
232 c->pipe->bind_fs_state(c->pipe, NULL);
233
234 c->pipe->delete_depth_stencil_alpha_state(c->pipe, c->dsa);
235 c->pipe->delete_blend_state(c->pipe, c->blend_clear);
236 c->pipe->delete_blend_state(c->pipe, c->blend_add);
237 c->pipe->delete_rasterizer_state(c->pipe, c->rast);
238 }
239 if (c->sampler_linear)
240 c->pipe->delete_sampler_state(c->pipe, c->sampler_linear);
241 if (c->sampler_nearest)
242 c->pipe->delete_sampler_state(c->pipe, c->sampler_nearest);
243 }
244
245 static bool
init_buffers(struct vl_compositor * c)246 init_buffers(struct vl_compositor *c)
247 {
248 struct pipe_vertex_element vertex_elems[3];
249 memset(vertex_elems, 0, sizeof(vertex_elems));
250
251 assert(c);
252
253 /*
254 * Create our vertex buffer and vertex buffer elements
255 */
256 c->vertex_buf.buffer_offset = 0;
257 c->vertex_buf.buffer.resource = NULL;
258 c->vertex_buf.is_user_buffer = false;
259
260 if (c->pipe_gfx_supported) {
261 vertex_elems[0].src_offset = 0;
262 vertex_elems[0].src_stride = VL_COMPOSITOR_VB_STRIDE;
263 vertex_elems[0].instance_divisor = 0;
264 vertex_elems[0].vertex_buffer_index = 0;
265 vertex_elems[0].src_format = PIPE_FORMAT_R32G32_FLOAT;
266 vertex_elems[1].src_offset = sizeof(struct vertex2f);
267 vertex_elems[1].src_stride = VL_COMPOSITOR_VB_STRIDE;
268 vertex_elems[1].instance_divisor = 0;
269 vertex_elems[1].vertex_buffer_index = 0;
270 vertex_elems[1].src_format = PIPE_FORMAT_R32G32B32A32_FLOAT;
271 vertex_elems[2].src_offset = sizeof(struct vertex2f) + sizeof(struct vertex4f);
272 vertex_elems[2].src_stride = VL_COMPOSITOR_VB_STRIDE;
273 vertex_elems[2].instance_divisor = 0;
274 vertex_elems[2].vertex_buffer_index = 0;
275 vertex_elems[2].src_format = PIPE_FORMAT_R32G32B32A32_FLOAT;
276 c->vertex_elems_state = c->pipe->create_vertex_elements_state(c->pipe, 3, vertex_elems);
277 }
278
279 return true;
280 }
281
282 static void
cleanup_buffers(struct vl_compositor * c)283 cleanup_buffers(struct vl_compositor *c)
284 {
285 assert(c);
286
287 if (c->pipe_gfx_supported) {
288 c->pipe->delete_vertex_elements_state(c->pipe, c->vertex_elems_state);
289 }
290 pipe_resource_reference(&c->vertex_buf.buffer.resource, NULL);
291 }
292
293 static inline struct u_rect
default_rect(struct vl_compositor_layer * layer)294 default_rect(struct vl_compositor_layer *layer)
295 {
296 struct pipe_resource *res = layer->sampler_views[0]->texture;
297 struct u_rect rect = { 0, res->width0, 0, res->height0 * res->array_size };
298 return rect;
299 }
300
301 static inline struct vertex2f
calc_topleft(struct vertex2f size,struct u_rect rect)302 calc_topleft(struct vertex2f size, struct u_rect rect)
303 {
304 struct vertex2f res = { rect.x0 / size.x, rect.y0 / size.y };
305 return res;
306 }
307
308 static inline struct vertex2f
calc_bottomright(struct vertex2f size,struct u_rect rect)309 calc_bottomright(struct vertex2f size, struct u_rect rect)
310 {
311 struct vertex2f res = { rect.x1 / size.x, rect.y1 / size.y };
312 return res;
313 }
314
315 static inline void
calc_src_and_dst(struct vl_compositor_layer * layer,unsigned width,unsigned height,struct u_rect src,struct u_rect dst)316 calc_src_and_dst(struct vl_compositor_layer *layer, unsigned width, unsigned height,
317 struct u_rect src, struct u_rect dst)
318 {
319 struct vertex2f size = { width, height };
320
321 layer->src.tl = calc_topleft(size, src);
322 layer->src.br = calc_bottomright(size, src);
323 layer->dst.tl = calc_topleft(size, dst);
324 layer->dst.br = calc_bottomright(size, dst);
325 layer->zw.x = 0.0f;
326 layer->zw.y = size.y;
327 }
328
329 static void
set_yuv_layer(struct vl_compositor_state * s,struct vl_compositor * c,unsigned layer,struct pipe_video_buffer * buffer,struct u_rect * src_rect,struct u_rect * dst_rect,bool y,enum vl_compositor_deinterlace deinterlace)330 set_yuv_layer(struct vl_compositor_state *s, struct vl_compositor *c,
331 unsigned layer, struct pipe_video_buffer *buffer,
332 struct u_rect *src_rect, struct u_rect *dst_rect,
333 bool y, enum vl_compositor_deinterlace deinterlace)
334 {
335 struct pipe_sampler_view **sampler_views;
336 float half_a_line;
337 unsigned i;
338
339 assert(s && c && buffer);
340
341 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
342
343 if (!init_shaders(c))
344 return;
345
346 s->used_layers |= 1 << layer;
347 sampler_views = buffer->get_sampler_view_components(buffer);
348 for (i = 0; i < 3; ++i) {
349 s->layers[layer].samplers[i] = c->sampler_linear;
350 pipe_sampler_view_reference(&s->layers[layer].sampler_views[i], sampler_views[i]);
351 }
352
353 calc_src_and_dst(&s->layers[layer], buffer->width, buffer->height,
354 src_rect ? *src_rect : default_rect(&s->layers[layer]),
355 dst_rect ? *dst_rect : default_rect(&s->layers[layer]));
356
357 half_a_line = 0.5f / s->layers[layer].zw.y;
358
359 switch(deinterlace) {
360 case VL_COMPOSITOR_BOB_TOP:
361 s->layers[layer].zw.x = 0.0f;
362 s->layers[layer].src.tl.y += half_a_line;
363 s->layers[layer].src.br.y += half_a_line;
364 if (c->pipe_gfx_supported)
365 s->layers[layer].fs = (y) ? c->fs_yuv.bob.y : c->fs_yuv.bob.uv;
366 if (c->pipe_cs_composit_supported)
367 s->layers[layer].cs = (y) ? c->cs_yuv.progressive.y : c->cs_yuv.progressive.uv;
368 break;
369
370 case VL_COMPOSITOR_BOB_BOTTOM:
371 s->layers[layer].zw.x = 1.0f;
372 s->layers[layer].src.tl.y -= half_a_line;
373 s->layers[layer].src.br.y -= half_a_line;
374 if (c->pipe_gfx_supported)
375 s->layers[layer].fs = (y) ? c->fs_yuv.bob.y : c->fs_yuv.bob.uv;
376 if (c->pipe_cs_composit_supported)
377 s->layers[layer].cs = (y) ? c->cs_yuv.progressive.y : c->cs_yuv.progressive.uv;
378 break;
379
380 case VL_COMPOSITOR_NONE:
381 if (c->pipe_cs_composit_supported) {
382 s->layers[layer].cs = (y) ? c->cs_yuv.progressive.y : c->cs_yuv.progressive.uv;
383 break;
384 }
385 FALLTHROUGH;
386
387 default:
388 if (c->pipe_gfx_supported)
389 s->layers[layer].fs = (y) ? c->fs_yuv.weave.y : c->fs_yuv.weave.uv;
390 if (c->pipe_cs_composit_supported)
391 s->layers[layer].cs = (y) ? c->cs_yuv.weave.y : c->cs_yuv.weave.uv;
392 break;
393 }
394 }
395
396 static void
set_rgb_to_yuv_layer(struct vl_compositor_state * s,struct vl_compositor * c,unsigned layer,struct pipe_sampler_view * v,struct u_rect * src_rect,struct u_rect * dst_rect,bool y)397 set_rgb_to_yuv_layer(struct vl_compositor_state *s, struct vl_compositor *c,
398 unsigned layer, struct pipe_sampler_view *v,
399 struct u_rect *src_rect, struct u_rect *dst_rect, bool y)
400 {
401 assert(s && c && v);
402
403 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
404
405 if (!init_shaders(c))
406 return;
407
408 s->used_layers |= 1 << layer;
409
410 if (c->pipe_cs_composit_supported)
411 s->layers[layer].cs = y ? c->cs_rgb_yuv.y : c->cs_rgb_yuv.uv;
412 else if (c->pipe_gfx_supported)
413 s->layers[layer].fs = y ? c->fs_rgb_yuv.y : c->fs_rgb_yuv.uv;
414
415 s->layers[layer].samplers[0] = c->sampler_linear;
416 s->layers[layer].samplers[1] = NULL;
417 s->layers[layer].samplers[2] = NULL;
418
419 pipe_sampler_view_reference(&s->layers[layer].sampler_views[0], v);
420 pipe_sampler_view_reference(&s->layers[layer].sampler_views[1], NULL);
421 pipe_sampler_view_reference(&s->layers[layer].sampler_views[2], NULL);
422
423 calc_src_and_dst(&s->layers[layer], v->texture->width0, v->texture->height0,
424 src_rect ? *src_rect : default_rect(&s->layers[layer]),
425 dst_rect ? *dst_rect : default_rect(&s->layers[layer]));
426 }
427
428 void
vl_compositor_reset_dirty_area(struct u_rect * dirty)429 vl_compositor_reset_dirty_area(struct u_rect *dirty)
430 {
431 assert(dirty);
432
433 dirty->x0 = dirty->y0 = VL_COMPOSITOR_MIN_DIRTY;
434 dirty->x1 = dirty->y1 = VL_COMPOSITOR_MAX_DIRTY;
435 }
436
437 void
vl_compositor_set_clear_color(struct vl_compositor_state * s,union pipe_color_union * color)438 vl_compositor_set_clear_color(struct vl_compositor_state *s, union pipe_color_union *color)
439 {
440 assert(s);
441 assert(color);
442
443 s->clear_color = *color;
444 }
445
446 void
vl_compositor_get_clear_color(struct vl_compositor_state * s,union pipe_color_union * color)447 vl_compositor_get_clear_color(struct vl_compositor_state *s, union pipe_color_union *color)
448 {
449 assert(s);
450 assert(color);
451
452 *color = s->clear_color;
453 }
454
455 void
vl_compositor_clear_layers(struct vl_compositor_state * s)456 vl_compositor_clear_layers(struct vl_compositor_state *s)
457 {
458 unsigned i, j;
459
460 assert(s);
461 s->used_layers = 0;
462 for ( i = 0; i < VL_COMPOSITOR_MAX_LAYERS; ++i) {
463 struct vertex4f v_one = { 1.0f, 1.0f, 1.0f, 1.0f };
464 s->layers[i].clearing = i ? false : true;
465 s->layers[i].blend = NULL;
466 s->layers[i].fs = NULL;
467 s->layers[i].cs = NULL;
468 s->layers[i].viewport.scale[2] = 1;
469 s->layers[i].viewport.translate[2] = 0;
470 s->layers[i].viewport.swizzle_x = PIPE_VIEWPORT_SWIZZLE_POSITIVE_X;
471 s->layers[i].viewport.swizzle_y = PIPE_VIEWPORT_SWIZZLE_POSITIVE_Y;
472 s->layers[i].viewport.swizzle_z = PIPE_VIEWPORT_SWIZZLE_POSITIVE_Z;
473 s->layers[i].viewport.swizzle_w = PIPE_VIEWPORT_SWIZZLE_POSITIVE_W;
474 s->layers[i].rotate = VL_COMPOSITOR_ROTATE_0;
475
476 for ( j = 0; j < 3; j++)
477 pipe_sampler_view_reference(&s->layers[i].sampler_views[j], NULL);
478 for ( j = 0; j < 4; ++j)
479 s->layers[i].colors[j] = v_one;
480 }
481 }
482
483 void
vl_compositor_cleanup(struct vl_compositor * c)484 vl_compositor_cleanup(struct vl_compositor *c)
485 {
486 assert(c);
487
488 cleanup_buffers(c);
489 cleanup_shaders(c);
490 cleanup_pipe_state(c);
491 }
492
493 bool
vl_compositor_set_csc_matrix(struct vl_compositor_state * s,vl_csc_matrix const * matrix,float luma_min,float luma_max)494 vl_compositor_set_csc_matrix(struct vl_compositor_state *s,
495 vl_csc_matrix const *matrix,
496 float luma_min, float luma_max)
497 {
498 assert(s);
499
500 memcpy(&s->csc_matrix, matrix, sizeof(vl_csc_matrix));
501 s->luma_min = luma_min;
502 s->luma_max = luma_max;
503
504 return true;
505 }
506
507 void
vl_compositor_set_dst_clip(struct vl_compositor_state * s,struct u_rect * dst_clip)508 vl_compositor_set_dst_clip(struct vl_compositor_state *s, struct u_rect *dst_clip)
509 {
510 assert(s);
511
512 s->scissor_valid = dst_clip != NULL;
513 if (dst_clip) {
514 s->scissor.minx = dst_clip->x0;
515 s->scissor.miny = dst_clip->y0;
516 s->scissor.maxx = dst_clip->x1;
517 s->scissor.maxy = dst_clip->y1;
518 }
519 }
520
521 void
vl_compositor_set_layer_blend(struct vl_compositor_state * s,unsigned layer,void * blend,bool is_clearing)522 vl_compositor_set_layer_blend(struct vl_compositor_state *s,
523 unsigned layer, void *blend,
524 bool is_clearing)
525 {
526 assert(s && blend);
527
528 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
529
530 s->layers[layer].clearing = is_clearing;
531 s->layers[layer].blend = blend;
532 }
533
534 void
vl_compositor_set_layer_dst_area(struct vl_compositor_state * s,unsigned layer,struct u_rect * dst_area)535 vl_compositor_set_layer_dst_area(struct vl_compositor_state *s,
536 unsigned layer, struct u_rect *dst_area)
537 {
538 assert(s);
539
540 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
541
542 s->layers[layer].viewport_valid = dst_area != NULL;
543 if (dst_area) {
544 s->layers[layer].viewport.scale[0] = dst_area->x1 - dst_area->x0;
545 s->layers[layer].viewport.scale[1] = dst_area->y1 - dst_area->y0;
546 s->layers[layer].viewport.translate[0] = dst_area->x0;
547 s->layers[layer].viewport.translate[1] = dst_area->y0;
548 }
549 }
550
551 void
vl_compositor_set_buffer_layer(struct vl_compositor_state * s,struct vl_compositor * c,unsigned layer,struct pipe_video_buffer * buffer,struct u_rect * src_rect,struct u_rect * dst_rect,enum vl_compositor_deinterlace deinterlace)552 vl_compositor_set_buffer_layer(struct vl_compositor_state *s,
553 struct vl_compositor *c,
554 unsigned layer,
555 struct pipe_video_buffer *buffer,
556 struct u_rect *src_rect,
557 struct u_rect *dst_rect,
558 enum vl_compositor_deinterlace deinterlace)
559 {
560 struct pipe_sampler_view **sampler_views;
561 unsigned i;
562
563 assert(s && c && buffer);
564
565 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
566
567 if (!init_shaders(c))
568 return;
569
570 s->used_layers |= 1 << layer;
571 sampler_views = buffer->get_sampler_view_components(buffer);
572 for (i = 0; i < 3; ++i) {
573 s->layers[layer].samplers[i] = c->sampler_linear;
574 pipe_sampler_view_reference(&s->layers[layer].sampler_views[i], sampler_views[i]);
575 }
576
577 calc_src_and_dst(&s->layers[layer], buffer->width, buffer->height,
578 src_rect ? *src_rect : default_rect(&s->layers[layer]),
579 dst_rect ? *dst_rect : default_rect(&s->layers[layer]));
580
581 if (buffer->interlaced) {
582 float half_a_line = 0.5f / s->layers[layer].zw.y;
583 switch(deinterlace) {
584 case VL_COMPOSITOR_NONE:
585 case VL_COMPOSITOR_MOTION_ADAPTIVE:
586 case VL_COMPOSITOR_WEAVE:
587 if (c->pipe_cs_composit_supported)
588 s->layers[layer].cs = c->cs_weave_rgb;
589 else if (c->pipe_gfx_supported)
590 s->layers[layer].fs = c->fs_weave_rgb;
591 break;
592
593 case VL_COMPOSITOR_BOB_TOP:
594 s->layers[layer].zw.x = 0.0f;
595 s->layers[layer].src.tl.y += half_a_line;
596 s->layers[layer].src.br.y += half_a_line;
597 if (c->pipe_cs_composit_supported)
598 s->layers[layer].cs = c->cs_video_buffer;
599 else if (c->pipe_gfx_supported)
600 s->layers[layer].fs = c->fs_video_buffer;
601 break;
602
603 case VL_COMPOSITOR_BOB_BOTTOM:
604 s->layers[layer].zw.x = 1.0f;
605 s->layers[layer].src.tl.y -= half_a_line;
606 s->layers[layer].src.br.y -= half_a_line;
607 if (c->pipe_cs_composit_supported)
608 s->layers[layer].cs = c->cs_video_buffer;
609 else if (c->pipe_gfx_supported)
610 s->layers[layer].fs = c->fs_video_buffer;
611 break;
612 }
613
614 } else {
615 if (c->pipe_cs_composit_supported)
616 s->layers[layer].cs = c->cs_video_buffer;
617 else if (c->pipe_gfx_supported)
618 s->layers[layer].fs = c->fs_video_buffer;
619 }
620 }
621
622 void
vl_compositor_set_palette_layer(struct vl_compositor_state * s,struct vl_compositor * c,unsigned layer,struct pipe_sampler_view * indexes,struct pipe_sampler_view * palette,struct u_rect * src_rect,struct u_rect * dst_rect,bool include_color_conversion)623 vl_compositor_set_palette_layer(struct vl_compositor_state *s,
624 struct vl_compositor *c,
625 unsigned layer,
626 struct pipe_sampler_view *indexes,
627 struct pipe_sampler_view *palette,
628 struct u_rect *src_rect,
629 struct u_rect *dst_rect,
630 bool include_color_conversion)
631 {
632 assert(s && c && indexes && palette);
633
634 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
635
636 if (!init_shaders(c))
637 return;
638
639 s->used_layers |= 1 << layer;
640
641 s->layers[layer].fs = include_color_conversion ?
642 c->fs_palette.yuv : c->fs_palette.rgb;
643
644 s->layers[layer].samplers[0] = c->sampler_linear;
645 s->layers[layer].samplers[1] = c->sampler_nearest;
646 s->layers[layer].samplers[2] = NULL;
647 pipe_sampler_view_reference(&s->layers[layer].sampler_views[0], indexes);
648 pipe_sampler_view_reference(&s->layers[layer].sampler_views[1], palette);
649 pipe_sampler_view_reference(&s->layers[layer].sampler_views[2], NULL);
650 calc_src_and_dst(&s->layers[layer], indexes->texture->width0, indexes->texture->height0,
651 src_rect ? *src_rect : default_rect(&s->layers[layer]),
652 dst_rect ? *dst_rect : default_rect(&s->layers[layer]));
653 }
654
655 void
vl_compositor_set_rgba_layer(struct vl_compositor_state * s,struct vl_compositor * c,unsigned layer,struct pipe_sampler_view * rgba,struct u_rect * src_rect,struct u_rect * dst_rect,struct vertex4f * colors)656 vl_compositor_set_rgba_layer(struct vl_compositor_state *s,
657 struct vl_compositor *c,
658 unsigned layer,
659 struct pipe_sampler_view *rgba,
660 struct u_rect *src_rect,
661 struct u_rect *dst_rect,
662 struct vertex4f *colors)
663 {
664 unsigned i;
665
666 assert(s && c && rgba);
667
668 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
669
670 if (!init_shaders(c))
671 return;
672
673 s->used_layers |= 1 << layer;
674 s->layers[layer].fs = c->fs_rgba;
675 s->layers[layer].samplers[0] = c->sampler_linear;
676 s->layers[layer].samplers[1] = NULL;
677 s->layers[layer].samplers[2] = NULL;
678 pipe_sampler_view_reference(&s->layers[layer].sampler_views[0], rgba);
679 pipe_sampler_view_reference(&s->layers[layer].sampler_views[1], NULL);
680 pipe_sampler_view_reference(&s->layers[layer].sampler_views[2], NULL);
681 calc_src_and_dst(&s->layers[layer], rgba->texture->width0, rgba->texture->height0,
682 src_rect ? *src_rect : default_rect(&s->layers[layer]),
683 dst_rect ? *dst_rect : default_rect(&s->layers[layer]));
684
685 if (colors)
686 for (i = 0; i < 4; ++i)
687 s->layers[layer].colors[i] = colors[i];
688 }
689
690 void
vl_compositor_set_layer_rotation(struct vl_compositor_state * s,unsigned layer,enum vl_compositor_rotation rotate)691 vl_compositor_set_layer_rotation(struct vl_compositor_state *s,
692 unsigned layer,
693 enum vl_compositor_rotation rotate)
694 {
695 assert(s);
696 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
697 s->layers[layer].rotate = rotate;
698 }
699
700 void
vl_compositor_yuv_deint_full(struct vl_compositor_state * s,struct vl_compositor * c,struct pipe_video_buffer * src,struct pipe_video_buffer * dst,struct u_rect * src_rect,struct u_rect * dst_rect,enum vl_compositor_deinterlace deinterlace)701 vl_compositor_yuv_deint_full(struct vl_compositor_state *s,
702 struct vl_compositor *c,
703 struct pipe_video_buffer *src,
704 struct pipe_video_buffer *dst,
705 struct u_rect *src_rect,
706 struct u_rect *dst_rect,
707 enum vl_compositor_deinterlace deinterlace)
708 {
709 struct pipe_surface **dst_surfaces;
710
711 dst_surfaces = dst->get_surfaces(dst);
712 vl_compositor_clear_layers(s);
713
714 set_yuv_layer(s, c, 0, src, src_rect, NULL, true, deinterlace);
715 vl_compositor_set_layer_dst_area(s, 0, dst_rect);
716 vl_compositor_render(s, c, dst_surfaces[0], NULL, false);
717
718 if (dst_rect) {
719 dst_rect->x0 /= 2;
720 dst_rect->y0 /= 2;
721 dst_rect->x1 /= 2;
722 dst_rect->y1 /= 2;
723 }
724
725 set_yuv_layer(s, c, 0, src, src_rect, NULL, false, deinterlace);
726 vl_compositor_set_layer_dst_area(s, 0, dst_rect);
727 vl_compositor_render(s, c, dst_surfaces[1], NULL, false);
728
729 s->pipe->flush(s->pipe, NULL, 0);
730 }
731
732 void
vl_compositor_convert_rgb_to_yuv(struct vl_compositor_state * s,struct vl_compositor * c,unsigned layer,struct pipe_resource * src_res,struct pipe_video_buffer * dst,struct u_rect * src_rect,struct u_rect * dst_rect)733 vl_compositor_convert_rgb_to_yuv(struct vl_compositor_state *s,
734 struct vl_compositor *c,
735 unsigned layer,
736 struct pipe_resource *src_res,
737 struct pipe_video_buffer *dst,
738 struct u_rect *src_rect,
739 struct u_rect *dst_rect)
740 {
741 struct pipe_sampler_view *sv, sv_templ;
742 struct pipe_surface **dst_surfaces;
743
744 dst_surfaces = dst->get_surfaces(dst);
745
746 memset(&sv_templ, 0, sizeof(sv_templ));
747 u_sampler_view_default_template(&sv_templ, src_res, src_res->format);
748 sv = s->pipe->create_sampler_view(s->pipe, src_res, &sv_templ);
749
750 vl_compositor_clear_layers(s);
751
752 set_rgb_to_yuv_layer(s, c, 0, sv, src_rect, NULL, true);
753 vl_compositor_set_layer_dst_area(s, 0, dst_rect);
754 vl_compositor_render(s, c, dst_surfaces[0], NULL, false);
755
756 if (dst_rect) {
757 dst_rect->x0 /= 2;
758 dst_rect->y0 /= 2;
759 dst_rect->x1 /= 2;
760 dst_rect->y1 /= 2;
761 }
762
763 set_rgb_to_yuv_layer(s, c, 0, sv, src_rect, NULL, false);
764 vl_compositor_set_layer_dst_area(s, 0, dst_rect);
765 vl_compositor_render(s, c, dst_surfaces[1], NULL, false);
766 pipe_sampler_view_reference(&sv, NULL);
767
768 s->pipe->flush(s->pipe, NULL, 0);
769 }
770
771 void
vl_compositor_render(struct vl_compositor_state * s,struct vl_compositor * c,struct pipe_surface * dst_surface,struct u_rect * dirty_area,bool clear_dirty)772 vl_compositor_render(struct vl_compositor_state *s,
773 struct vl_compositor *c,
774 struct pipe_surface *dst_surface,
775 struct u_rect *dirty_area,
776 bool clear_dirty)
777 {
778 assert(s);
779
780 if (s->layers->cs)
781 vl_compositor_cs_render(s, c, dst_surface, dirty_area, clear_dirty);
782 else if (s->layers->fs)
783 vl_compositor_gfx_render(s, c, dst_surface, dirty_area, clear_dirty);
784 else
785 debug_warning("Hardware don't support.\n");;
786 }
787
788 bool
vl_compositor_init(struct vl_compositor * c,struct pipe_context * pipe)789 vl_compositor_init(struct vl_compositor *c, struct pipe_context *pipe)
790 {
791 assert(c);
792
793 memset(c, 0, sizeof(*c));
794
795 c->pipe_cs_composit_supported = pipe->screen->get_param(pipe->screen, PIPE_CAP_PREFER_COMPUTE_FOR_MULTIMEDIA) &&
796 pipe->screen->get_param(pipe->screen, PIPE_CAP_TGSI_TEX_TXF_LZ) &&
797 pipe->screen->get_param(pipe->screen, PIPE_CAP_TGSI_DIV);
798
799 c->pipe_gfx_supported = pipe->screen->get_param(pipe->screen, PIPE_CAP_GRAPHICS);
800 c->pipe = pipe;
801
802 c->deinterlace = VL_COMPOSITOR_NONE;
803
804 if (!init_pipe_state(c)) {
805 return false;
806 }
807
808 if (!init_buffers(c)) {
809 cleanup_shaders(c);
810 cleanup_pipe_state(c);
811 return false;
812 }
813
814 return true;
815 }
816
817 bool
vl_compositor_init_state(struct vl_compositor_state * s,struct pipe_context * pipe)818 vl_compositor_init_state(struct vl_compositor_state *s, struct pipe_context *pipe)
819 {
820 vl_csc_matrix csc_matrix;
821
822 assert(s);
823
824 memset(s, 0, sizeof(*s));
825
826 s->pipe = pipe;
827
828 s->clear_color.f[0] = s->clear_color.f[1] = 0.0f;
829 s->clear_color.f[2] = s->clear_color.f[3] = 0.0f;
830
831 /*
832 * Create our fragment shader's constant buffer
833 * Const buffer contains the color conversion matrix and bias vectors
834 */
835 /* XXX: Create with IMMUTABLE/STATIC... although it does change every once in a long while... */
836 s->shader_params = pipe_buffer_create_const0
837 (
838 pipe->screen,
839 PIPE_BIND_CONSTANT_BUFFER,
840 PIPE_USAGE_DEFAULT,
841 sizeof(csc_matrix) + 16*sizeof(float) + 2*sizeof(int)
842 );
843
844 if (!s->shader_params)
845 return false;
846
847 vl_compositor_clear_layers(s);
848
849 vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_IDENTITY, NULL, true, &csc_matrix);
850 if (!vl_compositor_set_csc_matrix(s, (const vl_csc_matrix *)&csc_matrix, 1.0f, 0.0f))
851 return false;
852
853 return true;
854 }
855
856 void
vl_compositor_cleanup_state(struct vl_compositor_state * s)857 vl_compositor_cleanup_state(struct vl_compositor_state *s)
858 {
859 assert(s);
860
861 vl_compositor_clear_layers(s);
862 pipe_resource_reference(&s->shader_params, NULL);
863 }
864