xref: /aosp_15_r20/external/virglrenderer/src/venus/vkr_cs.c (revision bbecb9d118dfdb95f99bd754f8fa9be01f189df3)
1 /*
2  * Copyright 2021 Google LLC
3  * SPDX-License-Identifier: MIT
4  */
5 
6 #include "vkr_cs.h"
7 
8 #include "vrend_iov.h"
9 
10 #include "vkr_context.h"
11 
12 void
vkr_cs_encoder_set_stream(struct vkr_cs_encoder * enc,const struct vkr_resource_attachment * att,size_t offset,size_t size)13 vkr_cs_encoder_set_stream(struct vkr_cs_encoder *enc,
14                           const struct vkr_resource_attachment *att,
15                           size_t offset,
16                           size_t size)
17 {
18    if (!att) {
19       memset(&enc->stream, 0, sizeof(enc->stream));
20       enc->remaining_size = 0;
21       enc->next_iov = 0;
22       enc->cur = NULL;
23       enc->end = NULL;
24       return;
25    }
26 
27    enc->stream.attachment = att;
28    enc->stream.iov = att->iov;
29    enc->stream.iov_count = att->iov_count;
30    enc->stream.offset = offset;
31    enc->stream.size = size;
32    /* clear cache */
33    enc->stream.cached_index = 0;
34    enc->stream.cached_offset = 0;
35 
36    vkr_cs_encoder_seek_stream(enc, 0);
37 }
38 
39 static bool
vkr_cs_encoder_translate_stream_offset(struct vkr_cs_encoder * enc,size_t offset,int * iov_index,size_t * iov_offset)40 vkr_cs_encoder_translate_stream_offset(struct vkr_cs_encoder *enc,
41                                        size_t offset,
42                                        int *iov_index,
43                                        size_t *iov_offset)
44 {
45    int idx = 0;
46 
47    /* use or clear cache */
48    if (offset >= enc->stream.cached_offset) {
49       offset -= enc->stream.cached_offset;
50       idx = enc->stream.cached_index;
51    } else {
52       enc->stream.cached_index = 0;
53       enc->stream.cached_offset = 0;
54    }
55 
56    while (true) {
57       if (idx >= enc->stream.iov_count)
58          return false;
59 
60       const struct iovec *iov = &enc->stream.iov[idx];
61       if (offset < iov->iov_len)
62          break;
63 
64       idx++;
65       offset -= iov->iov_len;
66 
67       /* update cache */
68       enc->stream.cached_index++;
69       enc->stream.cached_offset += iov->iov_len;
70    }
71 
72    *iov_index = idx;
73    *iov_offset = offset;
74 
75    return true;
76 }
77 
78 static void
vkr_cs_encoder_update_end(struct vkr_cs_encoder * enc)79 vkr_cs_encoder_update_end(struct vkr_cs_encoder *enc)
80 {
81    const struct iovec *iov = &enc->stream.iov[enc->next_iov - 1];
82    const size_t iov_offset = enc->cur - (uint8_t *)iov->iov_base;
83    const size_t iov_remain = iov->iov_len - iov_offset;
84 
85    if (enc->remaining_size >= iov_remain) {
86       enc->end = enc->cur + iov_remain;
87       enc->remaining_size -= iov_remain;
88    } else {
89       enc->end = enc->cur + enc->remaining_size;
90       enc->remaining_size = 0;
91    }
92 }
93 
94 void
vkr_cs_encoder_seek_stream(struct vkr_cs_encoder * enc,size_t pos)95 vkr_cs_encoder_seek_stream(struct vkr_cs_encoder *enc, size_t pos)
96 {
97    const size_t offset = enc->stream.offset + pos;
98    int iov_index;
99    size_t iov_offset;
100    if (pos > enc->stream.size ||
101        !vkr_cs_encoder_translate_stream_offset(enc, offset, &iov_index, &iov_offset)) {
102       vkr_log("failed to seek the reply stream to %zu", pos);
103       vkr_cs_encoder_set_fatal(enc);
104       return;
105    }
106 
107    enc->remaining_size = enc->stream.size - pos;
108    enc->next_iov = iov_index + 1;
109 
110    const struct iovec *iov = &enc->stream.iov[iov_index];
111    enc->cur = iov->iov_base;
112    enc->cur += iov_offset;
113 
114    vkr_cs_encoder_update_end(enc);
115 }
116 
117 static bool
vkr_cs_encoder_next_iov(struct vkr_cs_encoder * enc)118 vkr_cs_encoder_next_iov(struct vkr_cs_encoder *enc)
119 {
120    if (enc->next_iov >= enc->stream.iov_count)
121       return false;
122 
123    const struct iovec *iov = &enc->stream.iov[enc->next_iov++];
124    enc->cur = iov->iov_base;
125    vkr_cs_encoder_update_end(enc);
126 
127    return true;
128 }
129 
130 static uint8_t *
vkr_cs_encoder_get_ptr(struct vkr_cs_encoder * enc,size_t size,size_t * ptr_size)131 vkr_cs_encoder_get_ptr(struct vkr_cs_encoder *enc, size_t size, size_t *ptr_size)
132 {
133    while (true) {
134       uint8_t *ptr = enc->cur;
135       const size_t avail = enc->end - enc->cur;
136 
137       if (avail) {
138          *ptr_size = MIN2(size, avail);
139          enc->cur += *ptr_size;
140          return ptr;
141       }
142 
143       if (!vkr_cs_encoder_next_iov(enc)) {
144          *ptr_size = 0;
145          return size ? NULL : ptr;
146       }
147    }
148 }
149 
150 void
vkr_cs_encoder_write_internal(struct vkr_cs_encoder * enc,size_t size,const void * val,size_t val_size)151 vkr_cs_encoder_write_internal(struct vkr_cs_encoder *enc,
152                               size_t size,
153                               const void *val,
154                               size_t val_size)
155 {
156    size_t pad_size = size - val_size;
157 
158    do {
159       size_t ptr_size;
160       uint8_t *ptr = vkr_cs_encoder_get_ptr(enc, val_size, &ptr_size);
161       if (unlikely(!ptr)) {
162          vkr_log("failed to write value to the reply stream");
163          vkr_cs_encoder_set_fatal(enc);
164          return;
165       }
166 
167       memcpy(ptr, val, ptr_size);
168       val = (const uint8_t *)val + ptr_size;
169       val_size -= ptr_size;
170    } while (val_size);
171 
172    while (pad_size) {
173       size_t ptr_size;
174       const void *ptr = vkr_cs_encoder_get_ptr(enc, pad_size, &ptr_size);
175       if (unlikely(!ptr)) {
176          vkr_log("failed to write padding to the reply stream");
177          vkr_cs_encoder_set_fatal(enc);
178          return;
179       }
180       pad_size -= ptr_size;
181    }
182 }
183 
184 void
vkr_cs_decoder_init(struct vkr_cs_decoder * dec,const struct hash_table * object_table)185 vkr_cs_decoder_init(struct vkr_cs_decoder *dec, const struct hash_table *object_table)
186 {
187    memset(dec, 0, sizeof(*dec));
188    dec->object_table = object_table;
189 }
190 
191 void
vkr_cs_decoder_fini(struct vkr_cs_decoder * dec)192 vkr_cs_decoder_fini(struct vkr_cs_decoder *dec)
193 {
194    struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
195    for (uint32_t i = 0; i < pool->buffer_count; i++)
196       free(pool->buffers[i]);
197    if (pool->buffers)
198       free(pool->buffers);
199 }
200 
201 static void
vkr_cs_decoder_sanity_check(const struct vkr_cs_decoder * dec)202 vkr_cs_decoder_sanity_check(const struct vkr_cs_decoder *dec)
203 {
204    const struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
205    assert(pool->buffer_count <= pool->buffer_max);
206    if (pool->buffer_count) {
207       assert(pool->buffers[pool->buffer_count - 1] <= pool->reset_to);
208       assert(pool->reset_to <= pool->cur);
209       assert(pool->cur <= pool->end);
210    }
211 
212    assert(dec->cur <= dec->end);
213 }
214 
215 static void
vkr_cs_decoder_gc_temp_pool(struct vkr_cs_decoder * dec)216 vkr_cs_decoder_gc_temp_pool(struct vkr_cs_decoder *dec)
217 {
218    struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
219    if (!pool->buffer_count)
220       return;
221 
222    /* free all but the last buffer */
223    if (pool->buffer_count > 1) {
224       for (uint32_t i = 0; i < pool->buffer_count - 1; i++)
225          free(pool->buffers[i]);
226 
227       pool->buffers[0] = pool->buffers[pool->buffer_count - 1];
228       pool->buffer_count = 1;
229    }
230 
231    pool->reset_to = pool->buffers[0];
232    pool->cur = pool->buffers[0];
233 
234    pool->total_size = pool->end - pool->cur;
235 
236    vkr_cs_decoder_sanity_check(dec);
237 }
238 
239 /**
240  * Reset a decoder for reuse.
241  */
242 void
vkr_cs_decoder_reset(struct vkr_cs_decoder * dec)243 vkr_cs_decoder_reset(struct vkr_cs_decoder *dec)
244 {
245    /* dec->fatal_error is sticky */
246 
247    vkr_cs_decoder_gc_temp_pool(dec);
248 
249    dec->saved_state_count = 0;
250    dec->cur = NULL;
251    dec->end = NULL;
252 }
253 
254 bool
vkr_cs_decoder_push_state(struct vkr_cs_decoder * dec)255 vkr_cs_decoder_push_state(struct vkr_cs_decoder *dec)
256 {
257    struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
258    struct vkr_cs_decoder_saved_state *saved;
259 
260    if (dec->saved_state_count >= ARRAY_SIZE(dec->saved_states))
261       return false;
262 
263    saved = &dec->saved_states[dec->saved_state_count++];
264    saved->cur = dec->cur;
265    saved->end = dec->end;
266 
267    saved->pool_buffer_count = pool->buffer_count;
268    saved->pool_reset_to = pool->reset_to;
269    /* avoid temp data corruption */
270    pool->reset_to = pool->cur;
271 
272    vkr_cs_decoder_sanity_check(dec);
273 
274    return true;
275 }
276 
277 void
vkr_cs_decoder_pop_state(struct vkr_cs_decoder * dec)278 vkr_cs_decoder_pop_state(struct vkr_cs_decoder *dec)
279 {
280    struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
281    const struct vkr_cs_decoder_saved_state *saved;
282 
283    assert(dec->saved_state_count);
284    saved = &dec->saved_states[--dec->saved_state_count];
285    dec->cur = saved->cur;
286    dec->end = saved->end;
287 
288    /* restore only if pool->reset_to points to the same buffer */
289    if (pool->buffer_count == saved->pool_buffer_count)
290       pool->reset_to = saved->pool_reset_to;
291 
292    vkr_cs_decoder_sanity_check(dec);
293 }
294 
295 static uint32_t
next_array_size(uint32_t cur_size,uint32_t min_size)296 next_array_size(uint32_t cur_size, uint32_t min_size)
297 {
298    const uint32_t next_size = cur_size ? cur_size * 2 : min_size;
299    return next_size > cur_size ? next_size : 0;
300 }
301 
302 static size_t
next_buffer_size(size_t cur_size,size_t min_size,size_t need)303 next_buffer_size(size_t cur_size, size_t min_size, size_t need)
304 {
305    size_t next_size = cur_size ? cur_size * 2 : min_size;
306    while (next_size < need) {
307       next_size *= 2;
308       if (!next_size)
309          return 0;
310    }
311    return next_size;
312 }
313 
314 static bool
vkr_cs_decoder_grow_temp_pool(struct vkr_cs_decoder * dec)315 vkr_cs_decoder_grow_temp_pool(struct vkr_cs_decoder *dec)
316 {
317    struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
318    const uint32_t buf_max = next_array_size(pool->buffer_max, 4);
319    if (!buf_max)
320       return false;
321 
322    uint8_t **bufs = realloc(pool->buffers, sizeof(*pool->buffers) * buf_max);
323    if (!bufs)
324       return false;
325 
326    pool->buffers = bufs;
327    pool->buffer_max = buf_max;
328 
329    return true;
330 }
331 
332 bool
vkr_cs_decoder_alloc_temp_internal(struct vkr_cs_decoder * dec,size_t size)333 vkr_cs_decoder_alloc_temp_internal(struct vkr_cs_decoder *dec, size_t size)
334 {
335    struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
336 
337    if (pool->buffer_count >= pool->buffer_max) {
338       if (!vkr_cs_decoder_grow_temp_pool(dec))
339          return false;
340       assert(pool->buffer_count < pool->buffer_max);
341    }
342 
343    const size_t cur_buf_size =
344       pool->buffer_count ? pool->end - pool->buffers[pool->buffer_count - 1] : 0;
345    const size_t buf_size = next_buffer_size(cur_buf_size, 4096, size);
346    if (!buf_size)
347       return false;
348 
349    if (buf_size > VKR_CS_DECODER_TEMP_POOL_MAX_SIZE - pool->total_size)
350       return false;
351 
352    uint8_t *buf = malloc(buf_size);
353    if (!buf)
354       return false;
355 
356    pool->total_size += buf_size;
357    pool->buffers[pool->buffer_count++] = buf;
358    pool->reset_to = buf;
359    pool->cur = buf;
360    pool->end = buf + buf_size;
361 
362    vkr_cs_decoder_sanity_check(dec);
363 
364    return true;
365 }
366