1 // Copyright 2019 The ChromiumOS Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #![allow(dead_code)]
6 #![allow(non_camel_case_types)]
7
8 use std::cmp::min;
9 use std::convert::From;
10 use std::fmt;
11 use std::fmt::Display;
12 use std::io;
13 use std::io::Write;
14 use std::marker::PhantomData;
15 use std::mem::size_of;
16 use std::mem::size_of_val;
17 use std::str::from_utf8;
18
19 use base::Error as BaseError;
20 use base::TubeError;
21 use data_model::Le32;
22 use data_model::Le64;
23 use gpu_display::GpuDisplayError;
24 use remain::sorted;
25 use rutabaga_gfx::RutabagaError;
26 use thiserror::Error;
27 use vm_memory::udmabuf::UdmabufError;
28 use zerocopy::AsBytes;
29 use zerocopy::FromBytes;
30 use zerocopy::FromZeroes;
31
32 pub use super::super::device_constants::gpu::virtio_gpu_config;
33 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_CONTEXT_INIT;
34 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_CREATE_GUEST_HANDLE;
35 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_EDID;
36 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_FENCE_PASSING;
37 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_RESOURCE_BLOB;
38 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_RESOURCE_UUID;
39 pub use super::super::device_constants::gpu::VIRTIO_GPU_F_VIRGL;
40 use super::edid::EdidBytes;
41 use super::Reader;
42 use super::Writer;
43
44 pub const VIRTIO_GPU_UNDEFINED: u32 = 0x0;
45
46 /* 2d commands */
47 pub const VIRTIO_GPU_CMD_GET_DISPLAY_INFO: u32 = 0x100;
48 pub const VIRTIO_GPU_CMD_RESOURCE_CREATE_2D: u32 = 0x101;
49 pub const VIRTIO_GPU_CMD_RESOURCE_UNREF: u32 = 0x102;
50 pub const VIRTIO_GPU_CMD_SET_SCANOUT: u32 = 0x103;
51 pub const VIRTIO_GPU_CMD_RESOURCE_FLUSH: u32 = 0x104;
52 pub const VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D: u32 = 0x105;
53 pub const VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING: u32 = 0x106;
54 pub const VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING: u32 = 0x107;
55 pub const VIRTIO_GPU_CMD_GET_CAPSET_INFO: u32 = 0x108;
56 pub const VIRTIO_GPU_CMD_GET_CAPSET: u32 = 0x109;
57 pub const VIRTIO_GPU_CMD_GET_EDID: u32 = 0x10a;
58 pub const VIRTIO_GPU_CMD_RESOURCE_ASSIGN_UUID: u32 = 0x10b;
59 pub const VIRTIO_GPU_CMD_RESOURCE_CREATE_BLOB: u32 = 0x10c;
60 pub const VIRTIO_GPU_CMD_SET_SCANOUT_BLOB: u32 = 0x10d;
61
62 /* 3d commands */
63 pub const VIRTIO_GPU_CMD_CTX_CREATE: u32 = 0x200;
64 pub const VIRTIO_GPU_CMD_CTX_DESTROY: u32 = 0x201;
65 pub const VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE: u32 = 0x202;
66 pub const VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE: u32 = 0x203;
67 pub const VIRTIO_GPU_CMD_RESOURCE_CREATE_3D: u32 = 0x204;
68 pub const VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D: u32 = 0x205;
69 pub const VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D: u32 = 0x206;
70 pub const VIRTIO_GPU_CMD_SUBMIT_3D: u32 = 0x207;
71 pub const VIRTIO_GPU_CMD_RESOURCE_MAP_BLOB: u32 = 0x208;
72 pub const VIRTIO_GPU_CMD_RESOURCE_UNMAP_BLOB: u32 = 0x209;
73
74 /* cursor commands */
75 pub const VIRTIO_GPU_CMD_UPDATE_CURSOR: u32 = 0x300;
76 pub const VIRTIO_GPU_CMD_MOVE_CURSOR: u32 = 0x301;
77
78 /* success responses */
79 pub const VIRTIO_GPU_RESP_OK_NODATA: u32 = 0x1100;
80 pub const VIRTIO_GPU_RESP_OK_DISPLAY_INFO: u32 = 0x1101;
81 pub const VIRTIO_GPU_RESP_OK_CAPSET_INFO: u32 = 0x1102;
82 pub const VIRTIO_GPU_RESP_OK_CAPSET: u32 = 0x1103;
83 pub const VIRTIO_GPU_RESP_OK_EDID: u32 = 0x1104;
84 pub const VIRTIO_GPU_RESP_OK_RESOURCE_UUID: u32 = 0x1105;
85 pub const VIRTIO_GPU_RESP_OK_MAP_INFO: u32 = 0x1106;
86
87 /* CHROMIUM(b/277982577): success responses */
88 pub const VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO: u32 = 0x11FF;
89
90 /* error responses */
91 pub const VIRTIO_GPU_RESP_ERR_UNSPEC: u32 = 0x1200;
92 pub const VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY: u32 = 0x1201;
93 pub const VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID: u32 = 0x1202;
94 pub const VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID: u32 = 0x1203;
95 pub const VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID: u32 = 0x1204;
96 pub const VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER: u32 = 0x1205;
97
98 pub const VIRTIO_GPU_BLOB_MEM_GUEST: u32 = 0x0001;
99 pub const VIRTIO_GPU_BLOB_MEM_HOST3D: u32 = 0x0002;
100 pub const VIRTIO_GPU_BLOB_MEM_HOST3D_GUEST: u32 = 0x0003;
101
102 pub const VIRTIO_GPU_BLOB_FLAG_USE_MAPPABLE: u32 = 0x0001;
103 pub const VIRTIO_GPU_BLOB_FLAG_USE_SHAREABLE: u32 = 0x0002;
104 pub const VIRTIO_GPU_BLOB_FLAG_USE_CROSS_DEVICE: u32 = 0x0004;
105 /* Create a OS-specific handle from guest memory (not upstreamed). */
106 pub const VIRTIO_GPU_BLOB_FLAG_CREATE_GUEST_HANDLE: u32 = 0x0008;
107
108 pub const VIRTIO_GPU_SHM_ID_NONE: u8 = 0x0000;
109 pub const VIRTIO_GPU_SHM_ID_HOST_VISIBLE: u8 = 0x0001;
110
virtio_gpu_cmd_str(cmd: u32) -> &'static str111 pub fn virtio_gpu_cmd_str(cmd: u32) -> &'static str {
112 match cmd {
113 VIRTIO_GPU_CMD_GET_DISPLAY_INFO => "VIRTIO_GPU_CMD_GET_DISPLAY_INFO",
114 VIRTIO_GPU_CMD_RESOURCE_CREATE_2D => "VIRTIO_GPU_CMD_RESOURCE_CREATE_2D",
115 VIRTIO_GPU_CMD_RESOURCE_UNREF => "VIRTIO_GPU_CMD_RESOURCE_UNREF",
116 VIRTIO_GPU_CMD_SET_SCANOUT => "VIRTIO_GPU_CMD_SET_SCANOUT",
117 VIRTIO_GPU_CMD_SET_SCANOUT_BLOB => "VIRTIO_GPU_CMD_SET_SCANOUT_BLOB",
118 VIRTIO_GPU_CMD_RESOURCE_FLUSH => "VIRTIO_GPU_CMD_RESOURCE_FLUSH",
119 VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D => "VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D",
120 VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING => "VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING",
121 VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING => "VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING",
122 VIRTIO_GPU_CMD_GET_CAPSET_INFO => "VIRTIO_GPU_CMD_GET_CAPSET_INFO",
123 VIRTIO_GPU_CMD_GET_CAPSET => "VIRTIO_GPU_CMD_GET_CAPSET",
124 VIRTIO_GPU_CMD_GET_EDID => "VIRTIO_GPU_CMD_GET_EDID",
125 VIRTIO_GPU_CMD_CTX_CREATE => "VIRTIO_GPU_CMD_CTX_CREATE",
126 VIRTIO_GPU_CMD_CTX_DESTROY => "VIRTIO_GPU_CMD_CTX_DESTROY",
127 VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE => "VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE",
128 VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE => "VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE",
129 VIRTIO_GPU_CMD_RESOURCE_ASSIGN_UUID => "VIRTIO_GPU_CMD_RESOURCE_ASSIGN_UUID",
130 VIRTIO_GPU_CMD_RESOURCE_CREATE_BLOB => "VIRTIO_GPU_CMD_RESOURCE_CREATE_BLOB",
131 VIRTIO_GPU_CMD_RESOURCE_CREATE_3D => "VIRTIO_GPU_CMD_RESOURCE_CREATE_3D",
132 VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D => "VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D",
133 VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D => "VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D",
134 VIRTIO_GPU_CMD_SUBMIT_3D => "VIRTIO_GPU_CMD_SUBMIT_3D",
135 VIRTIO_GPU_CMD_RESOURCE_MAP_BLOB => "VIRTIO_GPU_RESOURCE_MAP_BLOB",
136 VIRTIO_GPU_CMD_RESOURCE_UNMAP_BLOB => "VIRTIO_GPU_RESOURCE_UNMAP_BLOB",
137 VIRTIO_GPU_CMD_UPDATE_CURSOR => "VIRTIO_GPU_CMD_UPDATE_CURSOR",
138 VIRTIO_GPU_CMD_MOVE_CURSOR => "VIRTIO_GPU_CMD_MOVE_CURSOR",
139 VIRTIO_GPU_RESP_OK_NODATA => "VIRTIO_GPU_RESP_OK_NODATA",
140 VIRTIO_GPU_RESP_OK_DISPLAY_INFO => "VIRTIO_GPU_RESP_OK_DISPLAY_INFO",
141 VIRTIO_GPU_RESP_OK_CAPSET_INFO => "VIRTIO_GPU_RESP_OK_CAPSET_INFO",
142 VIRTIO_GPU_RESP_OK_CAPSET => "VIRTIO_GPU_RESP_OK_CAPSET",
143 VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO => "VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO",
144 VIRTIO_GPU_RESP_OK_RESOURCE_UUID => "VIRTIO_GPU_RESP_OK_RESOURCE_UUID",
145 VIRTIO_GPU_RESP_OK_MAP_INFO => "VIRTIO_GPU_RESP_OK_MAP_INFO",
146 VIRTIO_GPU_RESP_ERR_UNSPEC => "VIRTIO_GPU_RESP_ERR_UNSPEC",
147 VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY => "VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY",
148 VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID => "VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID",
149 VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID => "VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID",
150 VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID => "VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID",
151 VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER => "VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER",
152 _ => "UNKNOWN",
153 }
154 }
155
156 pub const VIRTIO_GPU_FLAG_FENCE: u32 = 1 << 0;
157 pub const VIRTIO_GPU_FLAG_INFO_RING_IDX: u32 = 1 << 1;
158 pub const VIRTIO_GPU_FLAG_FENCE_HOST_SHAREABLE: u32 = 1 << 2;
159
160 #[derive(Copy, Clone, Debug, Default, AsBytes, FromZeroes, FromBytes)]
161 #[repr(C)]
162 pub struct virtio_gpu_ctrl_hdr {
163 pub type_: Le32,
164 pub flags: Le32,
165 pub fence_id: Le64,
166 pub ctx_id: Le32,
167 pub ring_idx: u8,
168 pub padding: [u8; 3],
169 }
170
171 /* data passed in the cursor vq */
172
173 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
174 #[repr(C)]
175 pub struct virtio_gpu_cursor_pos {
176 pub scanout_id: Le32,
177 pub x: Le32,
178 pub y: Le32,
179 pub padding: Le32,
180 }
181
182 /* VIRTIO_GPU_CMD_UPDATE_CURSOR, VIRTIO_GPU_CMD_MOVE_CURSOR */
183 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
184 #[repr(C)]
185 pub struct virtio_gpu_update_cursor {
186 pub hdr: virtio_gpu_ctrl_hdr,
187 pub pos: virtio_gpu_cursor_pos, /* update & move */
188 pub resource_id: Le32, /* update only */
189 pub hot_x: Le32, /* update only */
190 pub hot_y: Le32, /* update only */
191 pub padding: Le32,
192 }
193
194 /* data passed in the control vq, 2d related */
195
196 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
197 #[repr(C)]
198 pub struct virtio_gpu_rect {
199 pub x: Le32,
200 pub y: Le32,
201 pub width: Le32,
202 pub height: Le32,
203 }
204
205 /* VIRTIO_GPU_CMD_RESOURCE_UNREF */
206 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
207 #[repr(C)]
208 pub struct virtio_gpu_resource_unref {
209 pub hdr: virtio_gpu_ctrl_hdr,
210 pub resource_id: Le32,
211 pub padding: Le32,
212 }
213
214 /* VIRTIO_GPU_CMD_RESOURCE_CREATE_2D: create a 2d resource with a format */
215 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
216 #[repr(C)]
217 pub struct virtio_gpu_resource_create_2d {
218 pub hdr: virtio_gpu_ctrl_hdr,
219 pub resource_id: Le32,
220 pub format: Le32,
221 pub width: Le32,
222 pub height: Le32,
223 }
224
225 /* VIRTIO_GPU_CMD_SET_SCANOUT */
226 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
227 #[repr(C)]
228 pub struct virtio_gpu_set_scanout {
229 pub hdr: virtio_gpu_ctrl_hdr,
230 pub r: virtio_gpu_rect,
231 pub scanout_id: Le32,
232 pub resource_id: Le32,
233 }
234
235 /* VIRTIO_GPU_CMD_RESOURCE_FLUSH */
236 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
237 #[repr(C)]
238 pub struct virtio_gpu_resource_flush {
239 pub hdr: virtio_gpu_ctrl_hdr,
240 pub r: virtio_gpu_rect,
241 pub resource_id: Le32,
242 pub padding: Le32,
243 }
244
245 /* VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D: simple transfer to_host */
246 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
247 #[repr(C)]
248 pub struct virtio_gpu_transfer_to_host_2d {
249 pub hdr: virtio_gpu_ctrl_hdr,
250 pub r: virtio_gpu_rect,
251 pub offset: Le64,
252 pub resource_id: Le32,
253 pub padding: Le32,
254 }
255
256 #[derive(Copy, Clone, Debug, Default, AsBytes, FromZeroes, FromBytes)]
257 #[repr(C)]
258 pub struct virtio_gpu_mem_entry {
259 pub addr: Le64,
260 pub length: Le32,
261 pub padding: Le32,
262 }
263
264 /* VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING */
265 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
266 #[repr(C)]
267 pub struct virtio_gpu_resource_attach_backing {
268 pub hdr: virtio_gpu_ctrl_hdr,
269 pub resource_id: Le32,
270 pub nr_entries: Le32,
271 }
272
273 /* VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING */
274 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
275 #[repr(C)]
276 pub struct virtio_gpu_resource_detach_backing {
277 pub hdr: virtio_gpu_ctrl_hdr,
278 pub resource_id: Le32,
279 pub padding: Le32,
280 }
281
282 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
283 #[repr(C)]
284 pub struct virtio_gpu_display_one {
285 pub r: virtio_gpu_rect,
286 pub enabled: Le32,
287 pub flags: Le32,
288 }
289
290 /* VIRTIO_GPU_RESP_OK_DISPLAY_INFO */
291 pub const VIRTIO_GPU_MAX_SCANOUTS: usize = 16;
292 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
293 #[repr(C)]
294 pub struct virtio_gpu_resp_display_info {
295 pub hdr: virtio_gpu_ctrl_hdr,
296 pub pmodes: [virtio_gpu_display_one; VIRTIO_GPU_MAX_SCANOUTS],
297 }
298
299 /* data passed in the control vq, 3d related */
300
301 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
302 #[repr(C)]
303 pub struct virtio_gpu_box {
304 pub x: Le32,
305 pub y: Le32,
306 pub z: Le32,
307 pub w: Le32,
308 pub h: Le32,
309 pub d: Le32,
310 }
311
312 /* VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D, VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D */
313 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
314 #[repr(C)]
315 pub struct virtio_gpu_transfer_host_3d {
316 pub hdr: virtio_gpu_ctrl_hdr,
317 pub box_: virtio_gpu_box,
318 pub offset: Le64,
319 pub resource_id: Le32,
320 pub level: Le32,
321 pub stride: Le32,
322 pub layer_stride: Le32,
323 }
324
325 /* VIRTIO_GPU_CMD_RESOURCE_CREATE_3D */
326 pub const VIRTIO_GPU_RESOURCE_FLAG_Y_0_TOP: u32 = 1 << 0;
327 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
328 #[repr(C)]
329 pub struct virtio_gpu_resource_create_3d {
330 pub hdr: virtio_gpu_ctrl_hdr,
331 pub resource_id: Le32,
332 pub target: Le32,
333 pub format: Le32,
334 pub bind: Le32,
335 pub width: Le32,
336 pub height: Le32,
337 pub depth: Le32,
338 pub array_size: Le32,
339 pub last_level: Le32,
340 pub nr_samples: Le32,
341 pub flags: Le32,
342 pub padding: Le32,
343 }
344
345 /* VIRTIO_GPU_CMD_CTX_CREATE */
346 pub const VIRTIO_GPU_CONTEXT_INIT_CAPSET_ID_MASK: u32 = 1 << 0;
347 #[derive(Copy, FromZeroes, FromBytes, AsBytes)]
348 #[repr(C)]
349 pub struct virtio_gpu_ctx_create {
350 pub hdr: virtio_gpu_ctrl_hdr,
351 pub nlen: Le32,
352 pub context_init: Le32,
353 pub debug_name: [u8; 64],
354 }
355
356 impl Default for virtio_gpu_ctx_create {
default() -> Self357 fn default() -> Self {
358 // SAFETY: trivially safe
359 unsafe { ::std::mem::zeroed() }
360 }
361 }
362
363 impl Clone for virtio_gpu_ctx_create {
clone(&self) -> virtio_gpu_ctx_create364 fn clone(&self) -> virtio_gpu_ctx_create {
365 *self
366 }
367 }
368
369 impl fmt::Debug for virtio_gpu_ctx_create {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result370 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
371 let debug_name = from_utf8(&self.debug_name[..min(64, self.nlen.to_native() as usize)])
372 .unwrap_or("<invalid>");
373 f.debug_struct("virtio_gpu_ctx_create")
374 .field("hdr", &self.hdr)
375 .field("debug_name", &debug_name)
376 .finish()
377 }
378 }
379
380 /* VIRTIO_GPU_CMD_CTX_DESTROY */
381 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
382 #[repr(C)]
383 pub struct virtio_gpu_ctx_destroy {
384 pub hdr: virtio_gpu_ctrl_hdr,
385 }
386
387 /* VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE, VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE */
388 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
389 #[repr(C)]
390 pub struct virtio_gpu_ctx_resource {
391 pub hdr: virtio_gpu_ctrl_hdr,
392 pub resource_id: Le32,
393 pub padding: Le32,
394 }
395
396 /* VIRTIO_GPU_CMD_SUBMIT_3D */
397 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
398 #[repr(C)]
399 pub struct virtio_gpu_cmd_submit {
400 pub hdr: virtio_gpu_ctrl_hdr,
401 pub size: Le32,
402
403 // The in-fence IDs are prepended to the cmd_buf and memory layout
404 // of the VIRTIO_GPU_CMD_SUBMIT_3D buffer looks like this:
405 // _________________
406 // | CMD_SUBMIT_3D |
407 // -----------------
408 // | header |
409 // | in-fence IDs |
410 // | cmd_buf |
411 // -----------------
412 //
413 // This makes in-fence IDs naturally aligned to the sizeof(u64) inside
414 // of the virtio buffer.
415 pub num_in_fences: Le32,
416 }
417
418 pub const VIRTIO_GPU_CAPSET_VIRGL: u32 = 1;
419 pub const VIRTIO_GPU_CAPSET_VIRGL2: u32 = 2;
420 pub const VIRTIO_GPU_CAPSET_GFXSTREAM: u32 = 3;
421 pub const VIRTIO_GPU_CAPSET_VENUS: u32 = 4;
422 pub const VIRTIO_GPU_CAPSET_CROSS_DOMAIN: u32 = 5;
423
424 /* VIRTIO_GPU_CMD_GET_CAPSET_INFO */
425 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
426 #[repr(C)]
427 pub struct virtio_gpu_get_capset_info {
428 pub hdr: virtio_gpu_ctrl_hdr,
429 pub capset_index: Le32,
430 pub padding: Le32,
431 }
432
433 /* VIRTIO_GPU_RESP_OK_CAPSET_INFO */
434 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
435 #[repr(C)]
436 pub struct virtio_gpu_resp_capset_info {
437 pub hdr: virtio_gpu_ctrl_hdr,
438 pub capset_id: Le32,
439 pub capset_max_version: Le32,
440 pub capset_max_size: Le32,
441 pub padding: Le32,
442 }
443
444 /* VIRTIO_GPU_CMD_GET_CAPSET */
445 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
446 #[repr(C)]
447 pub struct virtio_gpu_get_capset {
448 pub hdr: virtio_gpu_ctrl_hdr,
449 pub capset_id: Le32,
450 pub capset_version: Le32,
451 }
452
453 /* VIRTIO_GPU_RESP_OK_CAPSET */
454 #[derive(Copy, Clone, Debug, Default)]
455 #[repr(C)]
456 pub struct virtio_gpu_resp_capset {
457 pub hdr: virtio_gpu_ctrl_hdr,
458 pub capset_data: PhantomData<[u8]>,
459 }
460
461 /* VIRTIO_GPU_CMD_GET_EDID */
462 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
463 #[repr(C)]
464 pub struct virtio_gpu_get_edid {
465 pub hdr: virtio_gpu_ctrl_hdr,
466 pub scanout: Le32,
467 pub padding: Le32,
468 }
469
470 /* VIRTIO_GPU_RESP_OK_EDID */
471 #[derive(Copy, Clone, FromZeroes, FromBytes, AsBytes)]
472 #[repr(C)]
473 pub struct virtio_gpu_resp_get_edid {
474 pub hdr: virtio_gpu_ctrl_hdr,
475 pub size: Le32,
476 pub padding: Le32,
477 pub edid: [u8; 1024],
478 }
479
480 /* VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO */
481 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
482 #[repr(C)]
483 pub struct virtio_gpu_resp_resource_plane_info {
484 pub hdr: virtio_gpu_ctrl_hdr,
485 pub count: Le32,
486 pub padding: Le32,
487 pub format_modifier: Le64,
488 pub strides: [Le32; 4],
489 pub offsets: [Le32; 4],
490 }
491
492 pub const PLANE_INFO_MAX_COUNT: usize = 4;
493
494 pub const VIRTIO_GPU_EVENT_DISPLAY: u32 = 1 << 0;
495
496 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
497 #[repr(C)]
498 pub struct virtio_gpu_resource_create_blob {
499 pub hdr: virtio_gpu_ctrl_hdr,
500 pub resource_id: Le32,
501 pub blob_mem: Le32,
502 pub blob_flags: Le32,
503 pub nr_entries: Le32,
504 pub blob_id: Le64,
505 pub size: Le64,
506 }
507
508 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
509 #[repr(C)]
510 pub struct virtio_gpu_resource_map_blob {
511 pub hdr: virtio_gpu_ctrl_hdr,
512 pub resource_id: Le32,
513 pub padding: Le32,
514 pub offset: Le64,
515 }
516
517 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
518 #[repr(C)]
519 pub struct virtio_gpu_resource_unmap_blob {
520 pub hdr: virtio_gpu_ctrl_hdr,
521 pub resource_id: Le32,
522 pub padding: Le32,
523 }
524
525 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
526 #[repr(C)]
527 pub struct virtio_gpu_resp_map_info {
528 pub hdr: virtio_gpu_ctrl_hdr,
529 pub map_info: Le32,
530 pub padding: u32,
531 }
532
533 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
534 #[repr(C)]
535 pub struct virtio_gpu_resource_assign_uuid {
536 pub hdr: virtio_gpu_ctrl_hdr,
537 pub resource_id: Le32,
538 pub padding: Le32,
539 }
540
541 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
542 #[repr(C)]
543 pub struct virtio_gpu_resp_resource_uuid {
544 pub hdr: virtio_gpu_ctrl_hdr,
545 pub uuid: [u8; 16],
546 }
547
548 /* VIRTIO_GPU_CMD_SET_SCANOUT_BLOB */
549 #[derive(Copy, Clone, Debug, Default, FromZeroes, FromBytes, AsBytes)]
550 #[repr(C)]
551 pub struct virtio_gpu_set_scanout_blob {
552 pub hdr: virtio_gpu_ctrl_hdr,
553 pub r: virtio_gpu_rect,
554 pub scanout_id: Le32,
555 pub resource_id: Le32,
556 pub width: Le32,
557 pub height: Le32,
558 pub format: Le32,
559 pub padding: Le32,
560 pub strides: [Le32; 4],
561 pub offsets: [Le32; 4],
562 }
563
564 /* simple formats for fbcon/X use */
565 pub const VIRTIO_GPU_FORMAT_B8G8R8A8_UNORM: u32 = 1;
566 pub const VIRTIO_GPU_FORMAT_B8G8R8X8_UNORM: u32 = 2;
567 pub const VIRTIO_GPU_FORMAT_A8R8G8B8_UNORM: u32 = 3;
568 pub const VIRTIO_GPU_FORMAT_X8R8G8B8_UNORM: u32 = 4;
569 pub const VIRTIO_GPU_FORMAT_R8G8B8A8_UNORM: u32 = 67;
570 pub const VIRTIO_GPU_FORMAT_X8B8G8R8_UNORM: u32 = 68;
571 pub const VIRTIO_GPU_FORMAT_A8B8G8R8_UNORM: u32 = 121;
572 pub const VIRTIO_GPU_FORMAT_R8G8B8X8_UNORM: u32 = 134;
573
574 /// A virtio gpu command and associated metadata specific to each command.
575 #[derive(Copy, Clone)]
576 pub enum GpuCommand {
577 GetDisplayInfo(virtio_gpu_ctrl_hdr),
578 ResourceCreate2d(virtio_gpu_resource_create_2d),
579 ResourceUnref(virtio_gpu_resource_unref),
580 SetScanout(virtio_gpu_set_scanout),
581 SetScanoutBlob(virtio_gpu_set_scanout_blob),
582 ResourceFlush(virtio_gpu_resource_flush),
583 TransferToHost2d(virtio_gpu_transfer_to_host_2d),
584 ResourceAttachBacking(virtio_gpu_resource_attach_backing),
585 ResourceDetachBacking(virtio_gpu_resource_detach_backing),
586 GetCapsetInfo(virtio_gpu_get_capset_info),
587 GetCapset(virtio_gpu_get_capset),
588 GetEdid(virtio_gpu_get_edid),
589 CtxCreate(virtio_gpu_ctx_create),
590 CtxDestroy(virtio_gpu_ctx_destroy),
591 CtxAttachResource(virtio_gpu_ctx_resource),
592 CtxDetachResource(virtio_gpu_ctx_resource),
593 ResourceCreate3d(virtio_gpu_resource_create_3d),
594 TransferToHost3d(virtio_gpu_transfer_host_3d),
595 TransferFromHost3d(virtio_gpu_transfer_host_3d),
596 CmdSubmit3d(virtio_gpu_cmd_submit),
597 ResourceCreateBlob(virtio_gpu_resource_create_blob),
598 ResourceMapBlob(virtio_gpu_resource_map_blob),
599 ResourceUnmapBlob(virtio_gpu_resource_unmap_blob),
600 UpdateCursor(virtio_gpu_update_cursor),
601 MoveCursor(virtio_gpu_update_cursor),
602 ResourceAssignUuid(virtio_gpu_resource_assign_uuid),
603 }
604
605 /// An error indicating something went wrong decoding a `GpuCommand`. These correspond to
606 /// `VIRTIO_GPU_CMD_*`.
607 #[sorted]
608 #[derive(Error, Debug)]
609 pub enum GpuCommandDecodeError {
610 /// The type of the command was invalid.
611 #[error("invalid command type ({0})")]
612 InvalidType(u32),
613 /// An I/O error occurred.
614 #[error("an I/O error occurred: {0}")]
615 IO(io::Error),
616 }
617
618 impl From<io::Error> for GpuCommandDecodeError {
from(e: io::Error) -> GpuCommandDecodeError619 fn from(e: io::Error) -> GpuCommandDecodeError {
620 GpuCommandDecodeError::IO(e)
621 }
622 }
623
624 impl fmt::Debug for GpuCommand {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result625 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
626 use self::GpuCommand::*;
627 match self {
628 GetDisplayInfo(_info) => f.debug_struct("GetDisplayInfo").finish(),
629 ResourceCreate2d(_info) => f.debug_struct("ResourceCreate2d").finish(),
630 ResourceUnref(_info) => f.debug_struct("ResourceUnref").finish(),
631 SetScanout(_info) => f.debug_struct("SetScanout").finish(),
632 SetScanoutBlob(_info) => f.debug_struct("SetScanoutBlob").finish(),
633 ResourceFlush(_info) => f.debug_struct("ResourceFlush").finish(),
634 TransferToHost2d(_info) => f.debug_struct("TransferToHost2d").finish(),
635 ResourceAttachBacking(_info) => f.debug_struct("ResourceAttachBacking").finish(),
636 ResourceDetachBacking(_info) => f.debug_struct("ResourceDetachBacking").finish(),
637 GetCapsetInfo(_info) => f.debug_struct("GetCapsetInfo").finish(),
638 GetCapset(_info) => f.debug_struct("GetCapset").finish(),
639 GetEdid(_info) => f.debug_struct("GetEdid").finish(),
640 CtxCreate(_info) => f.debug_struct("CtxCreate").finish(),
641 CtxDestroy(_info) => f.debug_struct("CtxDestroy").finish(),
642 CtxAttachResource(_info) => f.debug_struct("CtxAttachResource").finish(),
643 CtxDetachResource(_info) => f.debug_struct("CtxDetachResource").finish(),
644 ResourceCreate3d(_info) => f.debug_struct("ResourceCreate3d").finish(),
645 TransferToHost3d(_info) => f.debug_struct("TransferToHost3d").finish(),
646 TransferFromHost3d(_info) => f.debug_struct("TransferFromHost3d").finish(),
647 CmdSubmit3d(_info) => f.debug_struct("CmdSubmit3d").finish(),
648 ResourceCreateBlob(_info) => f.debug_struct("ResourceCreateBlob").finish(),
649 ResourceMapBlob(_info) => f.debug_struct("ResourceMapBlob").finish(),
650 ResourceUnmapBlob(_info) => f.debug_struct("ResourceUnmapBlob").finish(),
651 UpdateCursor(_info) => f.debug_struct("UpdateCursor").finish(),
652 MoveCursor(_info) => f.debug_struct("MoveCursor").finish(),
653 ResourceAssignUuid(_info) => f.debug_struct("ResourceAssignUuid").finish(),
654 }
655 }
656 }
657
658 impl GpuCommand {
659 /// Decodes a command from the given chunk of memory.
decode(cmd: &mut Reader) -> Result<GpuCommand, GpuCommandDecodeError>660 pub fn decode(cmd: &mut Reader) -> Result<GpuCommand, GpuCommandDecodeError> {
661 use self::GpuCommand::*;
662 let hdr = cmd.peek_obj::<virtio_gpu_ctrl_hdr>()?;
663 Ok(match hdr.type_.into() {
664 VIRTIO_GPU_CMD_GET_DISPLAY_INFO => GetDisplayInfo(cmd.read_obj()?),
665 VIRTIO_GPU_CMD_RESOURCE_CREATE_2D => ResourceCreate2d(cmd.read_obj()?),
666 VIRTIO_GPU_CMD_RESOURCE_UNREF => ResourceUnref(cmd.read_obj()?),
667 VIRTIO_GPU_CMD_SET_SCANOUT => SetScanout(cmd.read_obj()?),
668 VIRTIO_GPU_CMD_SET_SCANOUT_BLOB => SetScanoutBlob(cmd.read_obj()?),
669 VIRTIO_GPU_CMD_RESOURCE_FLUSH => ResourceFlush(cmd.read_obj()?),
670 VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D => TransferToHost2d(cmd.read_obj()?),
671 VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING => ResourceAttachBacking(cmd.read_obj()?),
672 VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING => ResourceDetachBacking(cmd.read_obj()?),
673 VIRTIO_GPU_CMD_GET_CAPSET_INFO => GetCapsetInfo(cmd.read_obj()?),
674 VIRTIO_GPU_CMD_GET_CAPSET => GetCapset(cmd.read_obj()?),
675 VIRTIO_GPU_CMD_GET_EDID => GetEdid(cmd.read_obj()?),
676 VIRTIO_GPU_CMD_CTX_CREATE => CtxCreate(cmd.read_obj()?),
677 VIRTIO_GPU_CMD_CTX_DESTROY => CtxDestroy(cmd.read_obj()?),
678 VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE => CtxAttachResource(cmd.read_obj()?),
679 VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE => CtxDetachResource(cmd.read_obj()?),
680 VIRTIO_GPU_CMD_RESOURCE_CREATE_3D => ResourceCreate3d(cmd.read_obj()?),
681 VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D => TransferToHost3d(cmd.read_obj()?),
682 VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D => TransferFromHost3d(cmd.read_obj()?),
683 VIRTIO_GPU_CMD_SUBMIT_3D => CmdSubmit3d(cmd.read_obj()?),
684 VIRTIO_GPU_CMD_RESOURCE_CREATE_BLOB => ResourceCreateBlob(cmd.read_obj()?),
685 VIRTIO_GPU_CMD_RESOURCE_MAP_BLOB => ResourceMapBlob(cmd.read_obj()?),
686 VIRTIO_GPU_CMD_RESOURCE_UNMAP_BLOB => ResourceUnmapBlob(cmd.read_obj()?),
687 VIRTIO_GPU_CMD_UPDATE_CURSOR => UpdateCursor(cmd.read_obj()?),
688 VIRTIO_GPU_CMD_MOVE_CURSOR => MoveCursor(cmd.read_obj()?),
689 VIRTIO_GPU_CMD_RESOURCE_ASSIGN_UUID => ResourceAssignUuid(cmd.read_obj()?),
690 _ => return Err(GpuCommandDecodeError::InvalidType(hdr.type_.into())),
691 })
692 }
693
694 /// Gets the generic `virtio_gpu_ctrl_hdr` from this command.
ctrl_hdr(&self) -> &virtio_gpu_ctrl_hdr695 pub fn ctrl_hdr(&self) -> &virtio_gpu_ctrl_hdr {
696 use self::GpuCommand::*;
697 match self {
698 GetDisplayInfo(info) => info,
699 ResourceCreate2d(info) => &info.hdr,
700 ResourceUnref(info) => &info.hdr,
701 SetScanout(info) => &info.hdr,
702 SetScanoutBlob(info) => &info.hdr,
703 ResourceFlush(info) => &info.hdr,
704 TransferToHost2d(info) => &info.hdr,
705 ResourceAttachBacking(info) => &info.hdr,
706 ResourceDetachBacking(info) => &info.hdr,
707 GetCapsetInfo(info) => &info.hdr,
708 GetCapset(info) => &info.hdr,
709 GetEdid(info) => &info.hdr,
710 CtxCreate(info) => &info.hdr,
711 CtxDestroy(info) => &info.hdr,
712 CtxAttachResource(info) => &info.hdr,
713 CtxDetachResource(info) => &info.hdr,
714 ResourceCreate3d(info) => &info.hdr,
715 TransferToHost3d(info) => &info.hdr,
716 TransferFromHost3d(info) => &info.hdr,
717 CmdSubmit3d(info) => &info.hdr,
718 ResourceCreateBlob(info) => &info.hdr,
719 ResourceMapBlob(info) => &info.hdr,
720 ResourceUnmapBlob(info) => &info.hdr,
721 UpdateCursor(info) => &info.hdr,
722 MoveCursor(info) => &info.hdr,
723 ResourceAssignUuid(info) => &info.hdr,
724 }
725 }
726 }
727
728 #[derive(Debug, PartialEq, Eq)]
729 pub struct GpuResponsePlaneInfo {
730 pub stride: u32,
731 pub offset: u32,
732 }
733
734 /// A response to a `GpuCommand`. These correspond to `VIRTIO_GPU_RESP_*`.
735 #[derive(Debug)]
736 pub enum GpuResponse {
737 OkNoData,
738 OkDisplayInfo(Vec<(u32, u32, bool)>),
739 OkCapsetInfo {
740 capset_id: u32,
741 version: u32,
742 size: u32,
743 },
744 OkCapset(Vec<u8>),
745 OkEdid(Box<EdidBytes>),
746 OkResourcePlaneInfo {
747 format_modifier: u64,
748 plane_info: Vec<GpuResponsePlaneInfo>,
749 },
750 OkResourceUuid {
751 uuid: [u8; 16],
752 },
753 OkMapInfo {
754 map_info: u32,
755 },
756 ErrUnspec,
757 ErrTube(TubeError),
758 ErrBase(BaseError),
759 ErrRutabaga(RutabagaError),
760 ErrDisplay(GpuDisplayError),
761 ErrScanout {
762 num_scanouts: u32,
763 },
764 ErrEdid(String),
765 ErrOutOfMemory,
766 ErrInvalidScanoutId,
767 ErrInvalidResourceId,
768 ErrInvalidContextId,
769 ErrInvalidParameter,
770 ErrUdmabuf(UdmabufError),
771 }
772
773 impl From<TubeError> for GpuResponse {
from(e: TubeError) -> GpuResponse774 fn from(e: TubeError) -> GpuResponse {
775 GpuResponse::ErrTube(e)
776 }
777 }
778
779 impl From<RutabagaError> for GpuResponse {
from(e: RutabagaError) -> GpuResponse780 fn from(e: RutabagaError) -> GpuResponse {
781 GpuResponse::ErrRutabaga(e)
782 }
783 }
784
785 impl From<GpuDisplayError> for GpuResponse {
from(e: GpuDisplayError) -> GpuResponse786 fn from(e: GpuDisplayError) -> GpuResponse {
787 GpuResponse::ErrDisplay(e)
788 }
789 }
790
791 impl From<UdmabufError> for GpuResponse {
from(e: UdmabufError) -> GpuResponse792 fn from(e: UdmabufError) -> GpuResponse {
793 GpuResponse::ErrUdmabuf(e)
794 }
795 }
796
797 impl Display for GpuResponse {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result798 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
799 use self::GpuResponse::*;
800 match self {
801 ErrTube(e) => write!(f, "tube error: {}", e),
802 ErrBase(e) => write!(f, "base error: {}", e),
803 ErrRutabaga(e) => write!(f, "renderer error: {}", e),
804 ErrDisplay(e) => write!(f, "display error: {}", e),
805 ErrScanout { num_scanouts } => write!(f, "non-zero scanout: {}", num_scanouts),
806 ErrUdmabuf(e) => write!(f, "udmabuf error: {}", e),
807 _ => Ok(()),
808 }
809 }
810 }
811
812 impl std::error::Error for GpuResponse {}
813
814 /// An error indicating something went wrong decoding a `GpuCommand`.
815 #[sorted]
816 #[derive(Error, Debug)]
817 pub enum GpuResponseEncodeError {
818 /// An I/O error occurred.
819 #[error("an I/O error occurred: {0}")]
820 IO(io::Error),
821 /// More displays than are valid were in a `OkDisplayInfo`.
822 #[error("{0} is more displays than are valid")]
823 TooManyDisplays(usize),
824 /// More planes than are valid were in a `OkResourcePlaneInfo`.
825 #[error("{0} is more planes than are valid")]
826 TooManyPlanes(usize),
827 }
828
829 impl From<io::Error> for GpuResponseEncodeError {
from(e: io::Error) -> GpuResponseEncodeError830 fn from(e: io::Error) -> GpuResponseEncodeError {
831 GpuResponseEncodeError::IO(e)
832 }
833 }
834
835 pub type VirtioGpuResult = std::result::Result<GpuResponse, GpuResponse>;
836
837 impl GpuResponse {
838 /// Encodes a this `GpuResponse` into `resp` and the given set of metadata.
encode( &self, flags: u32, fence_id: u64, ctx_id: u32, ring_idx: u8, resp: &mut Writer, ) -> Result<u32, GpuResponseEncodeError>839 pub fn encode(
840 &self,
841 flags: u32,
842 fence_id: u64,
843 ctx_id: u32,
844 ring_idx: u8,
845 resp: &mut Writer,
846 ) -> Result<u32, GpuResponseEncodeError> {
847 let hdr = virtio_gpu_ctrl_hdr {
848 type_: Le32::from(self.get_type()),
849 flags: Le32::from(flags),
850 fence_id: Le64::from(fence_id),
851 ctx_id: Le32::from(ctx_id),
852 ring_idx,
853 padding: Default::default(),
854 };
855 let len = match *self {
856 GpuResponse::OkDisplayInfo(ref info) => {
857 if info.len() > VIRTIO_GPU_MAX_SCANOUTS {
858 return Err(GpuResponseEncodeError::TooManyDisplays(info.len()));
859 }
860 let mut disp_info = virtio_gpu_resp_display_info {
861 hdr,
862 pmodes: Default::default(),
863 };
864 for (disp_mode, &(width, height, enabled)) in disp_info.pmodes.iter_mut().zip(info)
865 {
866 disp_mode.r.width = Le32::from(width);
867 disp_mode.r.height = Le32::from(height);
868 disp_mode.enabled = Le32::from(enabled as u32);
869 }
870 resp.write_obj(disp_info)?;
871 size_of_val(&disp_info)
872 }
873 GpuResponse::OkCapsetInfo {
874 capset_id,
875 version,
876 size,
877 } => {
878 resp.write_obj(virtio_gpu_resp_capset_info {
879 hdr,
880 capset_id: Le32::from(capset_id),
881 capset_max_version: Le32::from(version),
882 capset_max_size: Le32::from(size),
883 padding: Le32::from(0),
884 })?;
885 size_of::<virtio_gpu_resp_capset_info>()
886 }
887 GpuResponse::OkCapset(ref data) => {
888 resp.write_obj(hdr)?;
889 resp.write_all(data)?;
890 size_of_val(&hdr) + data.len()
891 }
892 GpuResponse::OkEdid(ref edid_bytes) => {
893 let mut edid_resp = virtio_gpu_resp_get_edid {
894 hdr,
895 size: Le32::from(1024),
896 padding: Le32::from(0),
897 edid: [0; 1024],
898 };
899
900 edid_resp.edid[0..edid_bytes.len()].copy_from_slice(edid_bytes.as_bytes());
901 resp.write_obj(edid_resp)?;
902 size_of::<virtio_gpu_resp_get_edid>()
903 }
904 GpuResponse::OkResourcePlaneInfo {
905 format_modifier,
906 ref plane_info,
907 } => {
908 if plane_info.len() > PLANE_INFO_MAX_COUNT {
909 return Err(GpuResponseEncodeError::TooManyPlanes(plane_info.len()));
910 }
911 let mut strides = [Le32::default(); PLANE_INFO_MAX_COUNT];
912 let mut offsets = [Le32::default(); PLANE_INFO_MAX_COUNT];
913 for (plane_index, plane) in plane_info.iter().enumerate() {
914 strides[plane_index] = plane.stride.into();
915 offsets[plane_index] = plane.offset.into();
916 }
917 let plane_info = virtio_gpu_resp_resource_plane_info {
918 hdr,
919 count: Le32::from(plane_info.len() as u32),
920 padding: 0.into(),
921 format_modifier: format_modifier.into(),
922 strides,
923 offsets,
924 };
925 if resp.available_bytes() >= size_of_val(&plane_info) {
926 resp.write_obj(plane_info)?;
927 size_of_val(&plane_info)
928 } else {
929 // In case there is too little room in the response slice to store the
930 // entire virtio_gpu_resp_resource_plane_info, convert response to a regular
931 // VIRTIO_GPU_RESP_OK_NODATA and attempt to return that.
932 resp.write_obj(virtio_gpu_ctrl_hdr {
933 type_: Le32::from(VIRTIO_GPU_RESP_OK_NODATA),
934 ..hdr
935 })?;
936 size_of_val(&hdr)
937 }
938 }
939 GpuResponse::OkResourceUuid { uuid } => {
940 let resp_info = virtio_gpu_resp_resource_uuid { hdr, uuid };
941
942 resp.write_obj(resp_info)?;
943 size_of_val(&resp_info)
944 }
945 GpuResponse::OkMapInfo { map_info } => {
946 let resp_info = virtio_gpu_resp_map_info {
947 hdr,
948 map_info: Le32::from(map_info),
949 padding: Default::default(),
950 };
951
952 resp.write_obj(resp_info)?;
953 size_of_val(&resp_info)
954 }
955 _ => {
956 resp.write_obj(hdr)?;
957 size_of_val(&hdr)
958 }
959 };
960 Ok(len as u32)
961 }
962
963 /// Gets the `VIRTIO_GPU_*` enum value that corresponds to this variant.
get_type(&self) -> u32964 pub fn get_type(&self) -> u32 {
965 match self {
966 GpuResponse::OkNoData => VIRTIO_GPU_RESP_OK_NODATA,
967 GpuResponse::OkDisplayInfo(_) => VIRTIO_GPU_RESP_OK_DISPLAY_INFO,
968 GpuResponse::OkCapsetInfo { .. } => VIRTIO_GPU_RESP_OK_CAPSET_INFO,
969 GpuResponse::OkCapset(_) => VIRTIO_GPU_RESP_OK_CAPSET,
970 GpuResponse::OkEdid(_) => VIRTIO_GPU_RESP_OK_EDID,
971 GpuResponse::OkResourcePlaneInfo { .. } => VIRTIO_GPU_RESP_OK_RESOURCE_PLANE_INFO,
972 GpuResponse::OkResourceUuid { .. } => VIRTIO_GPU_RESP_OK_RESOURCE_UUID,
973 GpuResponse::OkMapInfo { .. } => VIRTIO_GPU_RESP_OK_MAP_INFO,
974 GpuResponse::ErrUnspec => VIRTIO_GPU_RESP_ERR_UNSPEC,
975 GpuResponse::ErrTube(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
976 GpuResponse::ErrBase(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
977 GpuResponse::ErrRutabaga(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
978 GpuResponse::ErrDisplay(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
979 GpuResponse::ErrUdmabuf(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
980 GpuResponse::ErrScanout { num_scanouts: _ } => VIRTIO_GPU_RESP_ERR_UNSPEC,
981 GpuResponse::ErrEdid(_) => VIRTIO_GPU_RESP_ERR_UNSPEC,
982 GpuResponse::ErrOutOfMemory => VIRTIO_GPU_RESP_ERR_OUT_OF_MEMORY,
983 GpuResponse::ErrInvalidScanoutId => VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID,
984 GpuResponse::ErrInvalidResourceId => VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID,
985 GpuResponse::ErrInvalidContextId => VIRTIO_GPU_RESP_ERR_INVALID_CONTEXT_ID,
986 GpuResponse::ErrInvalidParameter => VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER,
987 }
988 }
989 }
990