1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 // Note: ported from Chromium commit head: 2f13d62f0c0d
5 // Note: Added some missing defines that are only defined in newer kernel
6 // versions (e.g. V4L2_PIX_FMT_VP8_FRAME)
7
8 //#define LOG_NDEBUG 0
9 #define ATRACE_TAG ATRACE_TAG_VIDEO
10 #define LOG_TAG "V4L2Device"
11
12 #include <linux/v4l2-controls.h>
13 #include <v4l2_codec2/v4l2/V4L2Device.h>
14
15 #include <fcntl.h>
16 #include <inttypes.h>
17 #include <linux/media.h>
18 #include <linux/videodev2.h>
19 #include <poll.h>
20 #include <string.h>
21 #include <sys/eventfd.h>
22 #include <sys/ioctl.h>
23 #include <sys/mman.h>
24 #include <utils/Trace.h>
25
26 #include <algorithm>
27 #include <mutex>
28 #include <set>
29 #include <sstream>
30
31 #include <base/bind.h>
32 #include <base/numerics/safe_conversions.h>
33 #include <base/posix/eintr_wrapper.h>
34 #include <base/strings/stringprintf.h>
35 #include <base/thread_annotations.h>
36 #include <utils/Log.h>
37
38 #include <v4l2_codec2/common/Fourcc.h>
39 #include <v4l2_codec2/common/VideoPixelFormat.h>
40
41 namespace android {
42
isValidPixFmtForCodec(VideoCodec codec,uint32_t pixFmt)43 bool isValidPixFmtForCodec(VideoCodec codec, uint32_t pixFmt) {
44 switch (pixFmt) {
45 case V4L2_PIX_FMT_H264:
46 case V4L2_PIX_FMT_H264_SLICE:
47 return codec == VideoCodec::H264;
48 break;
49 case V4L2_PIX_FMT_VP8:
50 case V4L2_PIX_FMT_VP8_FRAME:
51 return codec == VideoCodec::VP8;
52 break;
53 case V4L2_PIX_FMT_VP9:
54 case V4L2_PIX_FMT_VP9_FRAME:
55 return codec == VideoCodec::VP9;
56 break;
57 case V4L2_PIX_FMT_HEVC:
58 case V4L2_PIX_FMT_HEVC_SLICE:
59 return codec == VideoCodec::HEVC;
60 break;
61 default:
62 ALOGE("Unhandled pixelformat %s", fourccToString(pixFmt).c_str());
63 return false;
64 }
65 }
66
buildV4L2Format(const enum v4l2_buf_type type,uint32_t fourcc,const ui::Size & size,size_t buffer_size,uint32_t stride)67 struct v4l2_format buildV4L2Format(const enum v4l2_buf_type type, uint32_t fourcc,
68 const ui::Size& size, size_t buffer_size, uint32_t stride) {
69 struct v4l2_format format;
70 memset(&format, 0, sizeof(format));
71 format.type = type;
72 format.fmt.pix_mp.pixelformat = fourcc;
73 format.fmt.pix_mp.width = size.width;
74 format.fmt.pix_mp.height = size.height;
75 format.fmt.pix_mp.num_planes = V4L2Device::getNumPlanesOfV4L2PixFmt(fourcc);
76 format.fmt.pix_mp.plane_fmt[0].sizeimage = buffer_size;
77
78 // When the image format is planar the bytesperline value applies to the first plane and is
79 // divided by the same factor as the width field for the other planes.
80 format.fmt.pix_mp.plane_fmt[0].bytesperline = stride;
81
82 return format;
83 }
84
V4L2ExtCtrl(uint32_t id)85 V4L2ExtCtrl::V4L2ExtCtrl(uint32_t id) {
86 memset(&ctrl, 0, sizeof(ctrl));
87 ctrl.id = id;
88 }
89
V4L2ExtCtrl(uint32_t id,int32_t val)90 V4L2ExtCtrl::V4L2ExtCtrl(uint32_t id, int32_t val) : V4L2ExtCtrl(id) {
91 ctrl.value = val;
92 }
93
94 // Class used to store the state of a buffer that should persist between reference creations. This
95 // includes:
96 // * Result of initial VIDIOC_QUERYBUF ioctl,
97 // * Plane mappings.
98 //
99 // Also provides helper functions.
100 class V4L2Buffer {
101 public:
102 static std::unique_ptr<V4L2Buffer> create(scoped_refptr<V4L2Device> device,
103 enum v4l2_buf_type type, enum v4l2_memory memory,
104 const struct v4l2_format& format, size_t bufferId);
105 ~V4L2Buffer();
106
107 V4L2Buffer(const V4L2Buffer&) = delete;
108 V4L2Buffer& operator=(const V4L2Buffer&) = delete;
109
110 void* getPlaneMapping(const size_t plane);
111 size_t getMemoryUsage() const;
v4l2_buffer() const112 const struct v4l2_buffer& v4l2_buffer() const { return mV4l2Buffer; }
113
114 private:
115 V4L2Buffer(scoped_refptr<V4L2Device> device, enum v4l2_buf_type type, enum v4l2_memory memory,
116 const struct v4l2_format& format, size_t bufferId);
117 bool query();
118
119 scoped_refptr<V4L2Device> mDevice;
120 std::vector<void*> mPlaneMappings;
121
122 // V4L2 data as queried by QUERYBUF.
123 struct v4l2_buffer mV4l2Buffer;
124 // WARNING: do not change this to a vector or something smaller than VIDEO_MAX_PLANES, otherwise
125 // the Tegra libv4l2 will write data beyond the number of allocated planes, resulting in memory
126 // corruption.
127 struct v4l2_plane mV4l2Planes[VIDEO_MAX_PLANES];
128
129 struct v4l2_format mFormat __attribute__((unused));
130 };
131
create(scoped_refptr<V4L2Device> device,enum v4l2_buf_type type,enum v4l2_memory memory,const struct v4l2_format & format,size_t bufferId)132 std::unique_ptr<V4L2Buffer> V4L2Buffer::create(scoped_refptr<V4L2Device> device,
133 enum v4l2_buf_type type, enum v4l2_memory memory,
134 const struct v4l2_format& format, size_t bufferId) {
135 // Not using std::make_unique because constructor is private.
136 std::unique_ptr<V4L2Buffer> buffer(new V4L2Buffer(device, type, memory, format, bufferId));
137
138 if (!buffer->query()) return nullptr;
139
140 return buffer;
141 }
142
V4L2Buffer(scoped_refptr<V4L2Device> device,enum v4l2_buf_type type,enum v4l2_memory memory,const struct v4l2_format & format,size_t bufferId)143 V4L2Buffer::V4L2Buffer(scoped_refptr<V4L2Device> device, enum v4l2_buf_type type,
144 enum v4l2_memory memory, const struct v4l2_format& format, size_t bufferId)
145 : mDevice(device), mFormat(format) {
146 ALOG_ASSERT(V4L2_TYPE_IS_MULTIPLANAR(type));
147 ALOG_ASSERT(format.fmt.pix_mp.num_planes <= base::size(mV4l2Planes));
148
149 memset(mV4l2Planes, 0, sizeof(mV4l2Planes));
150 memset(&mV4l2Buffer, 0, sizeof(mV4l2Buffer));
151 mV4l2Buffer.m.planes = mV4l2Planes;
152 // Just in case we got more planes than we want.
153 mV4l2Buffer.length =
154 std::min(static_cast<size_t>(format.fmt.pix_mp.num_planes), base::size(mV4l2Planes));
155 mV4l2Buffer.index = bufferId;
156 mV4l2Buffer.type = type;
157 mV4l2Buffer.memory = memory;
158 mV4l2Buffer.memory = V4L2_MEMORY_DMABUF;
159 mPlaneMappings.resize(mV4l2Buffer.length);
160 }
161
~V4L2Buffer()162 V4L2Buffer::~V4L2Buffer() {
163 if (mV4l2Buffer.memory == V4L2_MEMORY_MMAP) {
164 for (size_t i = 0; i < mPlaneMappings.size(); i++) {
165 if (mPlaneMappings[i] != nullptr) {
166 mDevice->munmap(mPlaneMappings[i], mV4l2Buffer.m.planes[i].length);
167 }
168 }
169 }
170 }
171
query()172 bool V4L2Buffer::query() {
173 int ret = mDevice->ioctl(VIDIOC_QUERYBUF, &mV4l2Buffer);
174 if (ret) {
175 ALOGE("VIDIOC_QUERYBUF failed");
176 return false;
177 }
178
179 DCHECK(mPlaneMappings.size() == mV4l2Buffer.length);
180
181 return true;
182 }
183
getPlaneMapping(const size_t plane)184 void* V4L2Buffer::getPlaneMapping(const size_t plane) {
185 if (plane >= mPlaneMappings.size()) {
186 ALOGE("Invalid plane %zu requested.", plane);
187 return nullptr;
188 }
189
190 void* p = mPlaneMappings[plane];
191 if (p) {
192 return p;
193 }
194
195 // Do this check here to avoid repeating it after a buffer has been successfully mapped (we know
196 // we are of MMAP type by then).
197 if (mV4l2Buffer.memory != V4L2_MEMORY_MMAP) {
198 ALOGE("Cannot create mapping on non-MMAP buffer");
199 return nullptr;
200 }
201
202 p = mDevice->mmap(NULL, mV4l2Buffer.m.planes[plane].length, PROT_READ | PROT_WRITE, MAP_SHARED,
203 mV4l2Buffer.m.planes[plane].m.mem_offset);
204 if (p == MAP_FAILED) {
205 ALOGE("mmap() failed: ");
206 return nullptr;
207 }
208
209 mPlaneMappings[plane] = p;
210 return p;
211 }
212
getMemoryUsage() const213 size_t V4L2Buffer::getMemoryUsage() const {
214 size_t usage = 0;
215 for (size_t i = 0; i < mV4l2Buffer.length; i++) {
216 usage += mV4l2Buffer.m.planes[i].length;
217 }
218 return usage;
219 }
220
221 // A thread-safe pool of buffer indexes, allowing buffers to be obtained and returned from different
222 // threads. All the methods of this class are thread-safe. Users should keep a scoped_refptr to
223 // instances of this class in order to ensure the list remains alive as long as they need it.
224 class V4L2BuffersList : public base::RefCountedThreadSafe<V4L2BuffersList> {
225 public:
226 V4L2BuffersList() = default;
227
228 V4L2BuffersList(const V4L2BuffersList&) = delete;
229 V4L2BuffersList& operator=(const V4L2BuffersList&) = delete;
230
231 // Return a buffer to this list. Also can be called to set the initial pool of buffers.
232 // Note that it is illegal to return the same buffer twice.
233 void returnBuffer(size_t bufferId);
234 // Get any of the buffers in the list. There is no order guarantee whatsoever.
235 std::optional<size_t> getFreeBuffer();
236 // Get the buffer with specified index.
237 std::optional<size_t> getFreeBuffer(size_t requestedBufferId);
238 // Number of buffers currently in this list.
239 size_t size() const;
240
241 private:
242 friend class base::RefCountedThreadSafe<V4L2BuffersList>;
243 ~V4L2BuffersList() = default;
244
245 mutable std::mutex mLock;
246 std::set<size_t> mFreeBuffers GUARDED_BY(mLock);
247 };
248
returnBuffer(size_t bufferId)249 void V4L2BuffersList::returnBuffer(size_t bufferId) {
250 std::lock_guard<std::mutex> lock(mLock);
251
252 auto inserted = mFreeBuffers.emplace(bufferId);
253 if (!inserted.second) {
254 ALOGE("Returning buffer failed");
255 }
256 }
257
getFreeBuffer()258 std::optional<size_t> V4L2BuffersList::getFreeBuffer() {
259 std::lock_guard<std::mutex> lock(mLock);
260
261 auto iter = mFreeBuffers.begin();
262 if (iter == mFreeBuffers.end()) {
263 ALOGV("No free buffer available!");
264 return std::nullopt;
265 }
266
267 size_t bufferId = *iter;
268 mFreeBuffers.erase(iter);
269
270 return bufferId;
271 }
272
getFreeBuffer(size_t requestedBufferId)273 std::optional<size_t> V4L2BuffersList::getFreeBuffer(size_t requestedBufferId) {
274 std::lock_guard<std::mutex> lock(mLock);
275
276 return (mFreeBuffers.erase(requestedBufferId) > 0) ? std::make_optional(requestedBufferId)
277 : std::nullopt;
278 }
279
size() const280 size_t V4L2BuffersList::size() const {
281 std::lock_guard<std::mutex> lock(mLock);
282
283 return mFreeBuffers.size();
284 }
285
286 // Module-private class that let users query/write V4L2 buffer information. It also makes some
287 // private V4L2Queue methods available to this module only.
288 class V4L2BufferRefBase {
289 public:
290 V4L2BufferRefBase(const struct v4l2_buffer& v4l2Buffer, base::WeakPtr<V4L2Queue> queue);
291 ~V4L2BufferRefBase();
292
293 V4L2BufferRefBase(const V4L2BufferRefBase&) = delete;
294 V4L2BufferRefBase& operator=(const V4L2BufferRefBase&) = delete;
295
296 bool queueBuffer();
297 void* getPlaneMapping(const size_t plane);
298
299 // Checks that the number of passed FDs is adequate for the current format and buffer
300 // configuration. Only useful for DMABUF buffers.
301 bool checkNumFDsForFormat(const size_t numFds) const;
302
303 // Data from the buffer, that users can query and/or write.
304 struct v4l2_buffer mV4l2Buffer;
305 // WARNING: do not change this to a vector or something smaller than VIDEO_MAX_PLANES, otherwise
306 // the Tegra libv4l2 will write data beyond the number of allocated planes, resulting in memory
307 // corruption.
308 struct v4l2_plane mV4l2Planes[VIDEO_MAX_PLANES];
309
310 private:
bufferId() const311 size_t bufferId() const { return mV4l2Buffer.index; }
312
313 friend class V4L2WritableBufferRef;
314 // A weak pointer to the queue this buffer belongs to. Will remain valid as long as the
315 // underlying V4L2 buffer is valid too. This can only be accessed from the sequence protected by
316 // sequence_checker_. Thread-safe methods (like ~V4L2BufferRefBase) must *never* access this.
317 base::WeakPtr<V4L2Queue> mQueue;
318 // Where to return this buffer if it goes out of scope without being queued.
319 scoped_refptr<V4L2BuffersList> mReturnTo;
320 bool queued = false;
321
322 SEQUENCE_CHECKER(mSequenceChecker);
323 };
324
V4L2BufferRefBase(const struct v4l2_buffer & v4l2Buffer,base::WeakPtr<V4L2Queue> queue)325 V4L2BufferRefBase::V4L2BufferRefBase(const struct v4l2_buffer& v4l2Buffer,
326 base::WeakPtr<V4L2Queue> queue)
327 : mQueue(std::move(queue)), mReturnTo(mQueue->mFreeBuffers) {
328 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
329 ALOG_ASSERT(V4L2_TYPE_IS_MULTIPLANAR(v4l2Buffer.type));
330 ALOG_ASSERT(v4l2Buffer.length <= base::size(mV4l2Planes));
331 ALOG_ASSERT(mReturnTo);
332
333 memcpy(&mV4l2Buffer, &v4l2Buffer, sizeof(mV4l2Buffer));
334 memcpy(mV4l2Planes, v4l2Buffer.m.planes, sizeof(struct v4l2_plane) * v4l2Buffer.length);
335 mV4l2Buffer.m.planes = mV4l2Planes;
336 }
337
~V4L2BufferRefBase()338 V4L2BufferRefBase::~V4L2BufferRefBase() {
339 // We are the last reference and are only accessing the thread-safe mReturnTo, so we are safe
340 // to call from any sequence. If we have been queued, then the queue is our owner so we don't
341 // need to return to the free buffers list.
342 if (!queued) mReturnTo->returnBuffer(bufferId());
343 }
344
queueBuffer()345 bool V4L2BufferRefBase::queueBuffer() {
346 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
347
348 if (!mQueue) return false;
349
350 queued = mQueue->queueBuffer(&mV4l2Buffer);
351
352 return queued;
353 }
354
getPlaneMapping(const size_t plane)355 void* V4L2BufferRefBase::getPlaneMapping(const size_t plane) {
356 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
357
358 if (!mQueue) return nullptr;
359
360 return mQueue->mBuffers[bufferId()]->getPlaneMapping(plane);
361 }
362
checkNumFDsForFormat(const size_t numFds) const363 bool V4L2BufferRefBase::checkNumFDsForFormat(const size_t numFds) const {
364 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
365
366 if (!mQueue) return false;
367
368 // We have not used SetFormat(), assume this is ok.
369 // Hopefully we standardize SetFormat() in the future.
370 if (!mQueue->mCurrentFormat) return true;
371
372 const size_t requiredFds = mQueue->mCurrentFormat->fmt.pix_mp.num_planes;
373 // Sanity check.
374 ALOG_ASSERT(mV4l2Buffer.length == requiredFds);
375 if (numFds < requiredFds) {
376 ALOGE("Insufficient number of FDs given for the current format. "
377 "%zu provided, %zu required.",
378 numFds, requiredFds);
379 return false;
380 }
381
382 const auto* planes = mV4l2Buffer.m.planes;
383 for (size_t i = mV4l2Buffer.length - 1; i >= numFds; --i) {
384 // Assume that an fd is a duplicate of a previous plane's fd if offset != 0. Otherwise, if
385 // offset == 0, return error as it is likely pointing to a new plane.
386 if (planes[i].data_offset == 0) {
387 ALOGE("Additional dmabuf fds point to a new buffer.");
388 return false;
389 }
390 }
391
392 return true;
393 }
394
V4L2WritableBufferRef(const struct v4l2_buffer & v4l2Buffer,base::WeakPtr<V4L2Queue> queue)395 V4L2WritableBufferRef::V4L2WritableBufferRef(const struct v4l2_buffer& v4l2Buffer,
396 base::WeakPtr<V4L2Queue> queue)
397 : mBufferData(std::make_unique<V4L2BufferRefBase>(v4l2Buffer, std::move(queue))) {
398 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
399 }
400
V4L2WritableBufferRef(V4L2WritableBufferRef && other)401 V4L2WritableBufferRef::V4L2WritableBufferRef(V4L2WritableBufferRef&& other)
402 : mBufferData(std::move(other.mBufferData)) {
403 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
404 DCHECK_CALLED_ON_VALID_SEQUENCE(other.mSequenceChecker);
405 }
406
~V4L2WritableBufferRef()407 V4L2WritableBufferRef::~V4L2WritableBufferRef() {
408 // Only valid references should be sequence-checked
409 if (mBufferData) {
410 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
411 }
412 }
413
operator =(V4L2WritableBufferRef && other)414 V4L2WritableBufferRef& V4L2WritableBufferRef::operator=(V4L2WritableBufferRef&& other) {
415 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
416 DCHECK_CALLED_ON_VALID_SEQUENCE(other.mSequenceChecker);
417
418 if (this == &other) return *this;
419
420 mBufferData = std::move(other.mBufferData);
421
422 return *this;
423 }
424
memory() const425 enum v4l2_memory V4L2WritableBufferRef::memory() const {
426 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
427 ALOG_ASSERT(mBufferData);
428
429 return static_cast<enum v4l2_memory>(mBufferData->mV4l2Buffer.memory);
430 }
431
doQueue()432 bool V4L2WritableBufferRef::doQueue() && {
433 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
434 ALOG_ASSERT(mBufferData);
435
436 bool queued = mBufferData->queueBuffer();
437
438 // Clear our own reference.
439 mBufferData.reset();
440
441 return queued;
442 }
443
queueMMap()444 bool V4L2WritableBufferRef::queueMMap() && {
445 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
446 ALOG_ASSERT(mBufferData);
447
448 // Move ourselves so our data gets freed no matter when we return
449 V4L2WritableBufferRef self(std::move(*this));
450
451 if (self.memory() != V4L2_MEMORY_MMAP) {
452 ALOGE("Called on invalid buffer type!");
453 return false;
454 }
455
456 return std::move(self).doQueue();
457 }
458
queueUserPtr(const std::vector<void * > & ptrs)459 bool V4L2WritableBufferRef::queueUserPtr(const std::vector<void*>& ptrs) && {
460 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
461 ALOG_ASSERT(mBufferData);
462
463 // Move ourselves so our data gets freed no matter when we return
464 V4L2WritableBufferRef self(std::move(*this));
465
466 if (self.memory() != V4L2_MEMORY_USERPTR) {
467 ALOGE("Called on invalid buffer type!");
468 return false;
469 }
470
471 if (ptrs.size() != self.planesCount()) {
472 ALOGE("Provided %zu pointers while we require %u.", ptrs.size(),
473 self.mBufferData->mV4l2Buffer.length);
474 return false;
475 }
476
477 for (size_t i = 0; i < ptrs.size(); i++) {
478 self.mBufferData->mV4l2Buffer.m.planes[i].m.userptr =
479 reinterpret_cast<unsigned long>(ptrs[i]);
480 }
481
482 return std::move(self).doQueue();
483 }
484
queueDMABuf(const std::vector<int> & fds)485 bool V4L2WritableBufferRef::queueDMABuf(const std::vector<int>& fds) && {
486 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
487 ALOG_ASSERT(mBufferData);
488
489 // Move ourselves so our data gets freed no matter when we return
490 V4L2WritableBufferRef self(std::move(*this));
491
492 if (self.memory() != V4L2_MEMORY_DMABUF) {
493 ALOGE("Called on invalid buffer type!");
494 return false;
495 }
496
497 if (!self.mBufferData->checkNumFDsForFormat(fds.size())) return false;
498
499 size_t numPlanes = self.planesCount();
500 for (size_t i = 0; i < numPlanes; i++) self.mBufferData->mV4l2Buffer.m.planes[i].m.fd = fds[i];
501
502 return std::move(self).doQueue();
503 }
504
planesCount() const505 size_t V4L2WritableBufferRef::planesCount() const {
506 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
507 ALOG_ASSERT(mBufferData);
508
509 return mBufferData->mV4l2Buffer.length;
510 }
511
getPlaneSize(const size_t plane) const512 size_t V4L2WritableBufferRef::getPlaneSize(const size_t plane) const {
513 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
514 ALOG_ASSERT(mBufferData);
515
516 if (plane >= planesCount()) {
517 ALOGE("Invalid plane %zu requested.", plane);
518 return 0;
519 }
520
521 return mBufferData->mV4l2Buffer.m.planes[plane].length;
522 }
523
setPlaneSize(const size_t plane,const size_t size)524 void V4L2WritableBufferRef::setPlaneSize(const size_t plane, const size_t size) {
525 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
526 ALOG_ASSERT(mBufferData);
527
528 enum v4l2_memory mem = memory();
529 if (mem == V4L2_MEMORY_MMAP) {
530 ALOG_ASSERT(mBufferData->mV4l2Buffer.m.planes[plane].length == size);
531 return;
532 }
533 ALOG_ASSERT(mem == V4L2_MEMORY_USERPTR || mem == V4L2_MEMORY_DMABUF);
534
535 if (plane >= planesCount()) {
536 ALOGE("Invalid plane %zu requested.", plane);
537 return;
538 }
539
540 mBufferData->mV4l2Buffer.m.planes[plane].length = size;
541 }
542
getPlaneMapping(const size_t plane)543 void* V4L2WritableBufferRef::getPlaneMapping(const size_t plane) {
544 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
545 ALOG_ASSERT(mBufferData);
546
547 return mBufferData->getPlaneMapping(plane);
548 }
549
setTimeStamp(const struct timeval & timestamp)550 void V4L2WritableBufferRef::setTimeStamp(const struct timeval& timestamp) {
551 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
552 ALOG_ASSERT(mBufferData);
553
554 mBufferData->mV4l2Buffer.timestamp = timestamp;
555 }
556
getTimeStamp() const557 const struct timeval& V4L2WritableBufferRef::getTimeStamp() const {
558 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
559 ALOG_ASSERT(mBufferData);
560
561 return mBufferData->mV4l2Buffer.timestamp;
562 }
563
setPlaneBytesUsed(const size_t plane,const size_t bytesUsed)564 void V4L2WritableBufferRef::setPlaneBytesUsed(const size_t plane, const size_t bytesUsed) {
565 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
566 ALOG_ASSERT(mBufferData);
567
568 if (plane >= planesCount()) {
569 ALOGE("Invalid plane %zu requested.", plane);
570 return;
571 }
572
573 if (bytesUsed > getPlaneSize(plane)) {
574 ALOGE("Set bytes used %zu larger than plane size %zu.", bytesUsed, getPlaneSize(plane));
575 return;
576 }
577
578 mBufferData->mV4l2Buffer.m.planes[plane].bytesused = bytesUsed;
579 }
580
getPlaneBytesUsed(const size_t plane) const581 size_t V4L2WritableBufferRef::getPlaneBytesUsed(const size_t plane) const {
582 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
583 ALOG_ASSERT(mBufferData);
584
585 if (plane >= planesCount()) {
586 ALOGE("Invalid plane %zu requested.", plane);
587 return 0;
588 }
589
590 return mBufferData->mV4l2Buffer.m.planes[plane].bytesused;
591 }
592
setPlaneDataOffset(const size_t plane,const size_t dataOffset)593 void V4L2WritableBufferRef::setPlaneDataOffset(const size_t plane, const size_t dataOffset) {
594 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
595 ALOG_ASSERT(mBufferData);
596
597 if (plane >= planesCount()) {
598 ALOGE("Invalid plane %zu requested.", plane);
599 return;
600 }
601
602 mBufferData->mV4l2Buffer.m.planes[plane].data_offset = dataOffset;
603 }
604
bufferId() const605 size_t V4L2WritableBufferRef::bufferId() const {
606 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
607 ALOG_ASSERT(mBufferData);
608
609 return mBufferData->mV4l2Buffer.index;
610 }
611
V4L2ReadableBuffer(const struct v4l2_buffer & v4l2Buffer,base::WeakPtr<V4L2Queue> queue)612 V4L2ReadableBuffer::V4L2ReadableBuffer(const struct v4l2_buffer& v4l2Buffer,
613 base::WeakPtr<V4L2Queue> queue)
614 : mBufferData(std::make_unique<V4L2BufferRefBase>(v4l2Buffer, std::move(queue))) {
615 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
616 }
617
~V4L2ReadableBuffer()618 V4L2ReadableBuffer::~V4L2ReadableBuffer() {
619 // This method is thread-safe. Since we are the destructor, we are guaranteed to be called from
620 // the only remaining reference to us. Also, we are just calling the destructor of buffer_data_,
621 // which is also thread-safe.
622 ALOG_ASSERT(mBufferData);
623 }
624
isLast() const625 bool V4L2ReadableBuffer::isLast() const {
626 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
627 ALOG_ASSERT(mBufferData);
628
629 return mBufferData->mV4l2Buffer.flags & V4L2_BUF_FLAG_LAST;
630 }
631
isKeyframe() const632 bool V4L2ReadableBuffer::isKeyframe() const {
633 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
634 ALOG_ASSERT(mBufferData);
635
636 return mBufferData->mV4l2Buffer.flags & V4L2_BUF_FLAG_KEYFRAME;
637 }
638
getTimeStamp() const639 struct timeval V4L2ReadableBuffer::getTimeStamp() const {
640 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
641 ALOG_ASSERT(mBufferData);
642
643 return mBufferData->mV4l2Buffer.timestamp;
644 }
645
planesCount() const646 size_t V4L2ReadableBuffer::planesCount() const {
647 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
648 ALOG_ASSERT(mBufferData);
649
650 return mBufferData->mV4l2Buffer.length;
651 }
652
getPlaneMapping(const size_t plane) const653 const void* V4L2ReadableBuffer::getPlaneMapping(const size_t plane) const {
654 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
655 DCHECK(mBufferData);
656
657 return mBufferData->getPlaneMapping(plane);
658 }
659
getPlaneBytesUsed(const size_t plane) const660 size_t V4L2ReadableBuffer::getPlaneBytesUsed(const size_t plane) const {
661 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
662 ALOG_ASSERT(mBufferData);
663
664 if (plane >= planesCount()) {
665 ALOGE("Invalid plane %zu requested.", plane);
666 return 0;
667 }
668
669 return mBufferData->mV4l2Planes[plane].bytesused;
670 }
671
getPlaneDataOffset(const size_t plane) const672 size_t V4L2ReadableBuffer::getPlaneDataOffset(const size_t plane) const {
673 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
674 ALOG_ASSERT(mBufferData);
675
676 if (plane >= planesCount()) {
677 ALOGE("Invalid plane %zu requested.", plane);
678 return 0;
679 }
680
681 return mBufferData->mV4l2Planes[plane].data_offset;
682 }
683
bufferId() const684 size_t V4L2ReadableBuffer::bufferId() const {
685 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
686 ALOG_ASSERT(mBufferData);
687
688 return mBufferData->mV4l2Buffer.index;
689 }
690
691 // This class is used to expose buffer reference classes constructors to this module. This is to
692 // ensure that nobody else can create buffer references.
693 class V4L2BufferRefFactory {
694 public:
CreateWritableRef(const struct v4l2_buffer & v4l2Buffer,base::WeakPtr<V4L2Queue> queue)695 static V4L2WritableBufferRef CreateWritableRef(const struct v4l2_buffer& v4l2Buffer,
696 base::WeakPtr<V4L2Queue> queue) {
697 return V4L2WritableBufferRef(v4l2Buffer, std::move(queue));
698 }
699
CreateReadableRef(const struct v4l2_buffer & v4l2Buffer,base::WeakPtr<V4L2Queue> queue)700 static V4L2ReadableBufferRef CreateReadableRef(const struct v4l2_buffer& v4l2Buffer,
701 base::WeakPtr<V4L2Queue> queue) {
702 return new V4L2ReadableBuffer(v4l2Buffer, std::move(queue));
703 }
704 };
705
706 //// Helper macros that print the queue type with logs.
707 #define ALOGEQ(fmt, ...) ALOGE("(%s)" fmt, V4L2Device::v4L2BufferTypeToString(mType), ##__VA_ARGS__)
708 #define ALOGVQ(fmt, ...) ALOGD("(%s)" fmt, V4L2Device::v4L2BufferTypeToString(mType), ##__VA_ARGS__)
709
V4L2Queue(scoped_refptr<V4L2Device> dev,enum v4l2_buf_type type,base::OnceClosure destroyCb)710 V4L2Queue::V4L2Queue(scoped_refptr<V4L2Device> dev, enum v4l2_buf_type type,
711 base::OnceClosure destroyCb)
712 : mType(type), mDevice(dev), mDestroyCb(std::move(destroyCb)) {
713 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
714 }
715
~V4L2Queue()716 V4L2Queue::~V4L2Queue() {
717 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
718
719 if (mIsStreaming) {
720 ALOGEQ("Queue is still streaming, trying to stop it...");
721 streamoff();
722 }
723
724 ALOG_ASSERT(mQueuedBuffers.empty());
725 ALOG_ASSERT(!mFreeBuffers);
726
727 if (!mBuffers.empty()) {
728 ALOGEQ("Buffers are still allocated, trying to deallocate them...");
729 deallocateBuffers();
730 }
731
732 std::move(mDestroyCb).Run();
733 }
734
setFormat(uint32_t fourcc,const ui::Size & size,size_t bufferSize,uint32_t stride)735 std::optional<struct v4l2_format> V4L2Queue::setFormat(uint32_t fourcc, const ui::Size& size,
736 size_t bufferSize, uint32_t stride) {
737 ATRACE_CALL();
738 struct v4l2_format format = buildV4L2Format(mType, fourcc, size, bufferSize, stride);
739 if (mDevice->ioctl(VIDIOC_S_FMT, &format) != 0 || format.fmt.pix_mp.pixelformat != fourcc) {
740 ALOGEQ("Failed to set format (format_fourcc=0x%" PRIx32 ")", fourcc);
741 return std::nullopt;
742 }
743
744 mCurrentFormat = format;
745 return mCurrentFormat;
746 }
747
tryFormat(uint32_t fourcc,const ui::Size & size,size_t bufferSize)748 std::optional<struct v4l2_format> V4L2Queue::tryFormat(uint32_t fourcc, const ui::Size& size,
749 size_t bufferSize) {
750 struct v4l2_format format = buildV4L2Format(mType, fourcc, size, bufferSize, 0);
751 if (mDevice->ioctl(VIDIOC_TRY_FMT, &format) != 0 || format.fmt.pix_mp.pixelformat != fourcc) {
752 ALOGEQ("Tried format not supported (format_fourcc=0x%" PRIx32 ")", fourcc);
753 return std::nullopt;
754 }
755
756 return format;
757 }
758
getFormat()759 std::pair<std::optional<struct v4l2_format>, int> V4L2Queue::getFormat() {
760 struct v4l2_format format;
761 memset(&format, 0, sizeof(format));
762 format.type = mType;
763 if (mDevice->ioctl(VIDIOC_G_FMT, &format) != 0) {
764 ALOGEQ("Failed to get format");
765 return std::make_pair(std::nullopt, errno);
766 }
767
768 return std::make_pair(format, 0);
769 }
770
allocateBuffers(size_t count,enum v4l2_memory memory)771 size_t V4L2Queue::allocateBuffers(size_t count, enum v4l2_memory memory) {
772 ATRACE_CALL();
773 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
774 ALOG_ASSERT(!mFreeBuffers);
775 ALOG_ASSERT(mQueuedBuffers.size() == 0u);
776
777 if (isStreaming()) {
778 ALOGEQ("Cannot allocate buffers while streaming.");
779 return 0;
780 }
781
782 if (mBuffers.size() != 0) {
783 ALOGEQ("Cannot allocate new buffers while others are still allocated.");
784 return 0;
785 }
786
787 if (count == 0) {
788 ALOGEQ("Attempting to allocate 0 buffers.");
789 return 0;
790 }
791
792 // First query the number of planes in the buffers we are about to request. This should not be
793 // required, but Tegra's VIDIOC_QUERYBUF will fail on output buffers if the number of specified
794 // planes does not exactly match the format.
795 struct v4l2_format format = {.type = mType};
796 int ret = mDevice->ioctl(VIDIOC_G_FMT, &format);
797 if (ret) {
798 ALOGEQ("VIDIOC_G_FMT failed");
799 return 0;
800 }
801 mPlanesCount = format.fmt.pix_mp.num_planes;
802 ALOG_ASSERT(mPlanesCount <= static_cast<size_t>(VIDEO_MAX_PLANES));
803
804 struct v4l2_requestbuffers reqbufs;
805 memset(&reqbufs, 0, sizeof(reqbufs));
806 reqbufs.count = count;
807 reqbufs.type = mType;
808 reqbufs.memory = memory;
809 ALOGVQ("Requesting %zu buffers.", count);
810
811 ret = mDevice->ioctl(VIDIOC_REQBUFS, &reqbufs);
812 if (ret) {
813 ALOGEQ("VIDIOC_REQBUFS failed");
814 return 0;
815 }
816 ALOGVQ("Queue %u: got %u buffers.", mType, reqbufs.count);
817
818 mMemory = memory;
819
820 mFreeBuffers = new V4L2BuffersList();
821
822 // Now query all buffer information.
823 for (size_t i = 0; i < reqbufs.count; i++) {
824 auto buffer = V4L2Buffer::create(mDevice, mType, mMemory, format, i);
825
826 if (!buffer) {
827 deallocateBuffers();
828
829 return 0;
830 }
831
832 mBuffers.emplace_back(std::move(buffer));
833 mFreeBuffers->returnBuffer(i);
834 }
835
836 ALOG_ASSERT(mFreeBuffers);
837 ALOG_ASSERT(mFreeBuffers->size() == mBuffers.size());
838 ALOG_ASSERT(mQueuedBuffers.size() == 0u);
839 reportTraceMetrics();
840
841 return mBuffers.size();
842 }
843
deallocateBuffers()844 bool V4L2Queue::deallocateBuffers() {
845 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
846
847 if (isStreaming()) {
848 ALOGEQ("Cannot deallocate buffers while streaming.");
849 return false;
850 }
851
852 if (mBuffers.size() == 0) return true;
853
854 mWeakThisFactory.InvalidateWeakPtrs();
855 mBuffers.clear();
856 mFreeBuffers = nullptr;
857
858 // Free all buffers.
859 struct v4l2_requestbuffers reqbufs;
860 memset(&reqbufs, 0, sizeof(reqbufs));
861 reqbufs.count = 0;
862 reqbufs.type = mType;
863 reqbufs.memory = mMemory;
864
865 int ret = mDevice->ioctl(VIDIOC_REQBUFS, &reqbufs);
866 if (ret) {
867 ALOGEQ("VIDIOC_REQBUFS failed");
868 return false;
869 }
870
871 ALOG_ASSERT(!mFreeBuffers);
872 ALOG_ASSERT(mQueuedBuffers.size() == 0u);
873 reportTraceMetrics();
874
875 return true;
876 }
877
getMemoryUsage() const878 size_t V4L2Queue::getMemoryUsage() const {
879 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
880 size_t usage = 0;
881 for (const auto& buf : mBuffers) {
882 usage += buf->getMemoryUsage();
883 }
884 return usage;
885 }
886
getMemoryType() const887 v4l2_memory V4L2Queue::getMemoryType() const {
888 return mMemory;
889 }
890
getFreeBuffer()891 std::optional<V4L2WritableBufferRef> V4L2Queue::getFreeBuffer() {
892 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
893
894 // No buffers allocated at the moment?
895 if (!mFreeBuffers) return std::nullopt;
896
897 auto bufferId = mFreeBuffers->getFreeBuffer();
898 if (!bufferId.has_value()) return std::nullopt;
899
900 return V4L2BufferRefFactory::CreateWritableRef(mBuffers[bufferId.value()]->v4l2_buffer(),
901 mWeakThisFactory.GetWeakPtr());
902 }
903
getFreeBuffer(size_t requestedBufferIid)904 std::optional<V4L2WritableBufferRef> V4L2Queue::getFreeBuffer(size_t requestedBufferIid) {
905 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
906
907 // No buffers allocated at the moment?
908 if (!mFreeBuffers) return std::nullopt;
909
910 auto bufferId = mFreeBuffers->getFreeBuffer(requestedBufferIid);
911 if (!bufferId.has_value()) return std::nullopt;
912
913 return V4L2BufferRefFactory::CreateWritableRef(mBuffers[bufferId.value()]->v4l2_buffer(),
914 mWeakThisFactory.GetWeakPtr());
915 }
916
reportTraceMetrics()917 void V4L2Queue::reportTraceMetrics() {
918 // Don't printf labels if ATrace is not enabled
919 if (!ATRACE_ENABLED()) return;
920
921 std::string atraceLabel;
922
923 atraceLabel =
924 V4L2Device::v4L2BufferTypeToATraceLabel(mDevice->getDebugStreamId(), mType, "streamon");
925 ATRACE_INT(atraceLabel.c_str(), isStreaming());
926
927 atraceLabel = V4L2Device::v4L2BufferTypeToATraceLabel(mDevice->getDebugStreamId(), mType,
928 "buffers free");
929 ATRACE_INT64(atraceLabel.c_str(), freeBuffersCount());
930
931 atraceLabel = V4L2Device::v4L2BufferTypeToATraceLabel(mDevice->getDebugStreamId(), mType,
932 "buffers queued");
933 ATRACE_INT64(atraceLabel.c_str(), queuedBuffersCount());
934
935 atraceLabel = V4L2Device::v4L2BufferTypeToATraceLabel(mDevice->getDebugStreamId(), mType,
936 "buffers allocated");
937 ATRACE_INT64(atraceLabel.c_str(), allocatedBuffersCount());
938 }
939
queueBuffer(struct v4l2_buffer * v4l2Buffer)940 bool V4L2Queue::queueBuffer(struct v4l2_buffer* v4l2Buffer) {
941 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
942
943 int ret = mDevice->ioctl(VIDIOC_QBUF, v4l2Buffer);
944 if (ret) {
945 ALOGEQ("VIDIOC_QBUF failed");
946 return false;
947 }
948
949 if (ATRACE_ENABLED()) {
950 std::string atraceLabel = V4L2Device::v4L2BufferTypeToATraceLabel(
951 mDevice->getDebugStreamId(), mType, "enqueued buffer");
952 ATRACE_ASYNC_BEGIN(atraceLabel.c_str(), v4l2Buffer->index);
953 }
954
955 auto inserted = mQueuedBuffers.emplace(v4l2Buffer->index);
956 if (!inserted.second) {
957 ALOGE("Queuing buffer failed");
958 return false;
959 }
960
961 mDevice->schedulePoll();
962
963 reportTraceMetrics();
964
965 return true;
966 }
967
dequeueBuffer()968 std::pair<bool, V4L2ReadableBufferRef> V4L2Queue::dequeueBuffer() {
969 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
970
971 // No need to dequeue if no buffers queued.
972 if (queuedBuffersCount() == 0) return std::make_pair(true, nullptr);
973
974 if (!isStreaming()) {
975 ALOGEQ("Attempting to dequeue a buffer while not streaming.");
976 return std::make_pair(true, nullptr);
977 }
978
979 struct v4l2_buffer v4l2Buffer;
980 memset(&v4l2Buffer, 0, sizeof(v4l2Buffer));
981 // WARNING: do not change this to a vector or something smaller than VIDEO_MAX_PLANES, otherwise
982 // the Tegra libv4l2 will write data beyond the number of allocated planes, resulting in memory
983 // corruption.
984 struct v4l2_plane planes[VIDEO_MAX_PLANES];
985 memset(planes, 0, sizeof(planes));
986 v4l2Buffer.type = mType;
987 v4l2Buffer.memory = mMemory;
988 v4l2Buffer.m.planes = planes;
989 v4l2Buffer.length = mPlanesCount;
990 int ret = mDevice->ioctl(VIDIOC_DQBUF, &v4l2Buffer);
991 if (ret) {
992 // TODO(acourbot): we should not have to check for EPIPE as codec clients should not call
993 // this method after the last buffer is dequeued.
994 switch (errno) {
995 case EAGAIN:
996 case EPIPE:
997 // This is not an error so we'll need to continue polling but won't provide a buffer.
998 mDevice->schedulePoll();
999 return std::make_pair(true, nullptr);
1000 default:
1001 ALOGEQ("VIDIOC_DQBUF failed");
1002 return std::make_pair(false, nullptr);
1003 }
1004 }
1005
1006 if (ATRACE_ENABLED()) {
1007 std::string atraceLabel = V4L2Device::v4L2BufferTypeToATraceLabel(
1008 mDevice->getDebugStreamId(), mType, "enqueued buffer");
1009 ATRACE_ASYNC_END(atraceLabel.c_str(), v4l2Buffer.index);
1010 }
1011
1012 auto it = mQueuedBuffers.find(v4l2Buffer.index);
1013 ALOG_ASSERT(it != mQueuedBuffers.end());
1014 mQueuedBuffers.erase(*it);
1015
1016 if (queuedBuffersCount() > 0) mDevice->schedulePoll();
1017
1018 reportTraceMetrics();
1019
1020 ALOG_ASSERT(mFreeBuffers);
1021 return std::make_pair(true, V4L2BufferRefFactory::CreateReadableRef(
1022 v4l2Buffer, mWeakThisFactory.GetWeakPtr()));
1023 }
1024
isStreaming() const1025 bool V4L2Queue::isStreaming() const {
1026 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
1027
1028 return mIsStreaming;
1029 }
1030
streamon()1031 bool V4L2Queue::streamon() {
1032 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
1033
1034 if (mIsStreaming) return true;
1035
1036 int arg = static_cast<int>(mType);
1037 int ret = mDevice->ioctl(VIDIOC_STREAMON, &arg);
1038 if (ret) {
1039 ALOGEQ("VIDIOC_STREAMON failed");
1040 return false;
1041 }
1042
1043 mIsStreaming = true;
1044 reportTraceMetrics();
1045
1046 return true;
1047 }
1048
streamoff()1049 bool V4L2Queue::streamoff() {
1050 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
1051
1052 // We do not check the value of IsStreaming(), because we may have queued buffers to the queue
1053 // and wish to get them back - in such as case, we may need to do a VIDIOC_STREAMOFF on a
1054 // stopped queue.
1055
1056 int arg = static_cast<int>(mType);
1057 int ret = mDevice->ioctl(VIDIOC_STREAMOFF, &arg);
1058 if (ret) {
1059 ALOGEQ("VIDIOC_STREAMOFF failed");
1060 return false;
1061 }
1062
1063 for (const auto& bufferId : mQueuedBuffers) {
1064 ALOG_ASSERT(mFreeBuffers);
1065 mFreeBuffers->returnBuffer(bufferId);
1066 }
1067
1068 mQueuedBuffers.clear();
1069
1070 mIsStreaming = false;
1071
1072 reportTraceMetrics();
1073
1074 return true;
1075 }
1076
allocatedBuffersCount() const1077 size_t V4L2Queue::allocatedBuffersCount() const {
1078 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
1079
1080 return mBuffers.size();
1081 }
1082
freeBuffersCount() const1083 size_t V4L2Queue::freeBuffersCount() const {
1084 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
1085
1086 return mFreeBuffers ? mFreeBuffers->size() : 0;
1087 }
1088
queuedBuffersCount() const1089 size_t V4L2Queue::queuedBuffersCount() const {
1090 DCHECK_CALLED_ON_VALID_SEQUENCE(mSequenceChecker);
1091
1092 return mQueuedBuffers.size();
1093 }
1094
1095 #undef ALOGEQ
1096 #undef ALOGVQ
1097
1098 // This class is used to expose V4L2Queue's constructor to this module. This is to ensure that
1099 // nobody else can create instances of it.
1100 class V4L2QueueFactory {
1101 public:
createQueue(scoped_refptr<V4L2Device> dev,enum v4l2_buf_type type,base::OnceClosure destroyCb)1102 static scoped_refptr<V4L2Queue> createQueue(scoped_refptr<V4L2Device> dev,
1103 enum v4l2_buf_type type,
1104 base::OnceClosure destroyCb) {
1105 return new V4L2Queue(std::move(dev), type, std::move(destroyCb));
1106 }
1107 };
1108
V4L2Device(uint32_t debugStreamId)1109 V4L2Device::V4L2Device(uint32_t debugStreamId) : mDebugStreamId(debugStreamId) {
1110 DETACH_FROM_SEQUENCE(mClientSequenceChecker);
1111 }
1112
~V4L2Device()1113 V4L2Device::~V4L2Device() {
1114 closeDevice();
1115 }
1116
getQueue(enum v4l2_buf_type type)1117 scoped_refptr<V4L2Queue> V4L2Device::getQueue(enum v4l2_buf_type type) {
1118 DCHECK_CALLED_ON_VALID_SEQUENCE(mClientSequenceChecker);
1119
1120 switch (type) {
1121 // Supported queue types.
1122 case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
1123 case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
1124 break;
1125 default:
1126 ALOGE("Unsupported V4L2 queue type: %u", type);
1127 return nullptr;
1128 }
1129
1130 // TODO(acourbot): we should instead query the device for available queues, and allocate them
1131 // accordingly. This will do for now though.
1132 auto it = mQueues.find(type);
1133 if (it != mQueues.end()) return scoped_refptr<V4L2Queue>(it->second);
1134
1135 scoped_refptr<V4L2Queue> queue = V4L2QueueFactory::createQueue(
1136 this, type, base::BindOnce(&V4L2Device::onQueueDestroyed, this, type));
1137
1138 mQueues[type] = queue.get();
1139 return queue;
1140 }
1141
onQueueDestroyed(v4l2_buf_type bufType)1142 void V4L2Device::onQueueDestroyed(v4l2_buf_type bufType) {
1143 DCHECK_CALLED_ON_VALID_SEQUENCE(mClientSequenceChecker);
1144
1145 auto it = mQueues.find(bufType);
1146 ALOG_ASSERT(it != mQueues.end());
1147 mQueues.erase(it);
1148 }
1149
1150 // static
create(uint32_t debugStreamId)1151 scoped_refptr<V4L2Device> V4L2Device::create(uint32_t debugStreamId) {
1152 ALOGV("%s()", __func__);
1153 return scoped_refptr<V4L2Device>(new V4L2Device(debugStreamId));
1154 }
1155
open(Type type,uint32_t v4l2PixFmt)1156 bool V4L2Device::open(Type type, uint32_t v4l2PixFmt) {
1157 ALOGV("%s()", __func__);
1158
1159 std::string path = getDevicePathFor(type, v4l2PixFmt);
1160
1161 if (path.empty()) {
1162 ALOGE("No devices supporting %s for type: %u", fourccToString(v4l2PixFmt).c_str(),
1163 static_cast<uint32_t>(type));
1164 return false;
1165 }
1166
1167 if (!openDevicePath(path, type)) {
1168 ALOGE("Failed opening %s", path.c_str());
1169 return false;
1170 }
1171
1172 mDevicePollInterruptFd.reset(eventfd(0, EFD_NONBLOCK | EFD_CLOEXEC));
1173 if (!mDevicePollInterruptFd.is_valid()) {
1174 ALOGE("Failed creating a poll interrupt fd");
1175 return false;
1176 }
1177
1178 return true;
1179 }
1180
ioctl(int request,void * arg)1181 int V4L2Device::ioctl(int request, void* arg) {
1182 ALOG_ASSERT(mDeviceFd.is_valid());
1183 return HANDLE_EINTR(::ioctl(mDeviceFd.get(), request, arg));
1184 }
1185
poll(bool pollDevice,bool pollBuffers,bool * eventPending,bool * buffersPending)1186 bool V4L2Device::poll(bool pollDevice, bool pollBuffers, bool* eventPending, bool* buffersPending) {
1187 struct pollfd pollfds[2];
1188 nfds_t nfds;
1189 int pollfd = -1;
1190
1191 pollfds[0].fd = mDevicePollInterruptFd.get();
1192 pollfds[0].events = POLLIN | POLLERR;
1193 nfds = 1;
1194
1195 if (pollDevice) {
1196 ALOGV("adding device fd to poll() set");
1197 pollfds[nfds].fd = mDeviceFd.get();
1198 pollfds[nfds].events = POLLERR | POLLPRI;
1199 if (pollBuffers) {
1200 ALOGV("will poll buffers");
1201 pollfds[nfds].events |= POLLIN | POLLOUT;
1202 }
1203 pollfd = nfds;
1204 nfds++;
1205 }
1206
1207 if (HANDLE_EINTR(::poll(pollfds, nfds, -1)) == -1) {
1208 ALOGE("poll() failed");
1209 return false;
1210 }
1211 *eventPending = (pollfd != -1 && pollfds[pollfd].revents & POLLPRI);
1212 *buffersPending = (pollfd != -1 && pollfds[pollfd].revents & (POLLIN | POLLOUT));
1213 return true;
1214 }
1215
mmap(void * addr,unsigned int len,int prot,int flags,unsigned int offset)1216 void* V4L2Device::mmap(void* addr, unsigned int len, int prot, int flags, unsigned int offset) {
1217 DCHECK(mDeviceFd.is_valid());
1218 return ::mmap(addr, len, prot, flags, mDeviceFd.get(), offset);
1219 }
1220
munmap(void * addr,unsigned int len)1221 void V4L2Device::munmap(void* addr, unsigned int len) {
1222 ::munmap(addr, len);
1223 }
1224
setDevicePollInterrupt()1225 bool V4L2Device::setDevicePollInterrupt() {
1226 ALOGV("%s()", __func__);
1227
1228 const uint64_t buf = 1;
1229 if (HANDLE_EINTR(write(mDevicePollInterruptFd.get(), &buf, sizeof(buf))) == -1) {
1230 ALOGE("write() failed");
1231 return false;
1232 }
1233 return true;
1234 }
1235
clearDevicePollInterrupt()1236 bool V4L2Device::clearDevicePollInterrupt() {
1237 ALOGV("%s()", __func__);
1238
1239 uint64_t buf;
1240 if (HANDLE_EINTR(read(mDevicePollInterruptFd.get(), &buf, sizeof(buf))) == -1) {
1241 if (errno == EAGAIN) {
1242 // No interrupt flag set, and we're reading nonblocking. Not an error.
1243 return true;
1244 } else {
1245 ALOGE("read() failed");
1246 return false;
1247 }
1248 }
1249 return true;
1250 }
1251
getDmabufsForV4L2Buffer(int index,size_t numPlanes,enum v4l2_buf_type bufType)1252 std::vector<base::ScopedFD> V4L2Device::getDmabufsForV4L2Buffer(int index, size_t numPlanes,
1253 enum v4l2_buf_type bufType) {
1254 ALOGV("%s()", __func__);
1255 ALOG_ASSERT(V4L2_TYPE_IS_MULTIPLANAR(bufType));
1256
1257 std::vector<base::ScopedFD> dmabufFds;
1258 for (size_t i = 0; i < numPlanes; ++i) {
1259 struct v4l2_exportbuffer expbuf;
1260 memset(&expbuf, 0, sizeof(expbuf));
1261 expbuf.type = bufType;
1262 expbuf.index = index;
1263 expbuf.plane = i;
1264 expbuf.flags = O_CLOEXEC;
1265 if (ioctl(VIDIOC_EXPBUF, &expbuf) != 0) {
1266 dmabufFds.clear();
1267 break;
1268 }
1269
1270 dmabufFds.push_back(base::ScopedFD(expbuf.fd));
1271 }
1272
1273 return dmabufFds;
1274 }
1275
preferredInputFormat(Type type)1276 std::vector<uint32_t> V4L2Device::preferredInputFormat(Type type) {
1277 if (type == Type::kEncoder) return {V4L2_PIX_FMT_NV12M, V4L2_PIX_FMT_NV12};
1278
1279 return {};
1280 }
1281
1282 // static
c2ProfileToV4L2PixFmt(C2Config::profile_t profile,bool sliceBased)1283 uint32_t V4L2Device::c2ProfileToV4L2PixFmt(C2Config::profile_t profile, bool sliceBased) {
1284 if (profile >= C2Config::PROFILE_AVC_BASELINE &&
1285 profile <= C2Config::PROFILE_AVC_ENHANCED_MULTIVIEW_DEPTH_HIGH) {
1286 if (sliceBased) {
1287 return V4L2_PIX_FMT_H264_SLICE;
1288 } else {
1289 return V4L2_PIX_FMT_H264;
1290 }
1291 } else if (profile >= C2Config::PROFILE_VP8_0 && profile <= C2Config::PROFILE_VP8_3) {
1292 if (sliceBased) {
1293 return V4L2_PIX_FMT_VP8_FRAME;
1294 } else {
1295 return V4L2_PIX_FMT_VP8;
1296 }
1297 } else if (profile >= C2Config::PROFILE_VP9_0 && profile <= C2Config::PROFILE_VP9_3) {
1298 if (sliceBased) {
1299 return V4L2_PIX_FMT_VP9_FRAME;
1300 } else {
1301 return V4L2_PIX_FMT_VP9;
1302 }
1303 } else if (profile >= C2Config::PROFILE_HEVC_MAIN &&
1304 profile <= C2Config::PROFILE_HEVC_3D_MAIN) {
1305 if (sliceBased) {
1306 return V4L2_PIX_FMT_HEVC_SLICE;
1307 } else {
1308 return V4L2_PIX_FMT_HEVC;
1309 }
1310 } else {
1311 ALOGE("Unknown profile: %s", profileToString(profile));
1312 return 0;
1313 }
1314 }
1315
1316 // static
v4L2LevelToC2Level(VideoCodec codec,uint32_t level)1317 C2Config::level_t V4L2Device::v4L2LevelToC2Level(VideoCodec codec, uint32_t level) {
1318 switch (codec) {
1319 case VideoCodec::H264:
1320 switch (level) {
1321 case V4L2_MPEG_VIDEO_H264_LEVEL_1_0:
1322 return C2Config::LEVEL_AVC_1;
1323 case V4L2_MPEG_VIDEO_H264_LEVEL_1B:
1324 return C2Config::LEVEL_AVC_1B;
1325 case V4L2_MPEG_VIDEO_H264_LEVEL_1_1:
1326 return C2Config::LEVEL_AVC_1_1;
1327 case V4L2_MPEG_VIDEO_H264_LEVEL_1_2:
1328 return C2Config::LEVEL_AVC_1_2;
1329 case V4L2_MPEG_VIDEO_H264_LEVEL_1_3:
1330 return C2Config::LEVEL_AVC_1_3;
1331 case V4L2_MPEG_VIDEO_H264_LEVEL_2_0:
1332 return C2Config::LEVEL_AVC_2;
1333 case V4L2_MPEG_VIDEO_H264_LEVEL_2_1:
1334 return C2Config::LEVEL_AVC_2_1;
1335 case V4L2_MPEG_VIDEO_H264_LEVEL_2_2:
1336 return C2Config::LEVEL_AVC_2_2;
1337 case V4L2_MPEG_VIDEO_H264_LEVEL_3_0:
1338 return C2Config::LEVEL_AVC_3;
1339 case V4L2_MPEG_VIDEO_H264_LEVEL_3_1:
1340 return C2Config::LEVEL_AVC_3_1;
1341 case V4L2_MPEG_VIDEO_H264_LEVEL_3_2:
1342 return C2Config::LEVEL_AVC_3_2;
1343 case V4L2_MPEG_VIDEO_H264_LEVEL_4_0:
1344 return C2Config::LEVEL_AVC_4;
1345 case V4L2_MPEG_VIDEO_H264_LEVEL_4_1:
1346 return C2Config::LEVEL_AVC_4_1;
1347 case V4L2_MPEG_VIDEO_H264_LEVEL_4_2:
1348 return C2Config::LEVEL_AVC_4_2;
1349 case V4L2_MPEG_VIDEO_H264_LEVEL_5_0:
1350 return C2Config::LEVEL_AVC_5;
1351 #ifdef V4L2_MPEG_VIDEO_H264_LEVEL_5_1
1352 case V4L2_MPEG_VIDEO_H264_LEVEL_5_1:
1353 return C2Config::LEVEL_AVC_5_1;
1354 #endif
1355 #ifdef V4L2_MPEG_VIDEO_H264_LEVEL_5_2
1356 case V4L2_MPEG_VIDEO_H264_LEVEL_5_2:
1357 return C2Config::LEVEL_AVC_5_2;
1358 #endif
1359 #ifdef V4L2_MPEG_VIDEO_H264_LEVEL_6_0
1360 case V4L2_MPEG_VIDEO_H264_LEVEL_6_0:
1361 return C2Config::LEVEL_AVC_6;
1362 #endif
1363 #ifdef V4L2_MPEG_VIDEO_H264_LEVEL_6_1
1364 case V4L2_MPEG_VIDEO_H264_LEVEL_6_1:
1365 return C2Config::LEVEL_AVC_6_1;
1366 #endif
1367 #ifdef V4L2_MPEG_VIDEO_H264_LEVEL_6_2
1368 case V4L2_MPEG_VIDEO_H264_LEVEL_6_2:
1369 return C2Config::LEVEL_AVC_6_2;
1370 #endif
1371 }
1372 break;
1373 case VideoCodec::VP8:
1374 return C2Config::LEVEL_UNUSED;
1375 break;
1376 case VideoCodec::VP9:
1377 switch (level) {
1378 #ifdef V4L2_MPEG_VIDEO_VP9_LEVEL_1_0
1379 case V4L2_MPEG_VIDEO_VP9_LEVEL_1_0:
1380 return C2Config::LEVEL_VP9_1;
1381 case V4L2_MPEG_VIDEO_VP9_LEVEL_1_1:
1382 return C2Config::LEVEL_VP9_1_1;
1383 case V4L2_MPEG_VIDEO_VP9_LEVEL_2_0:
1384 return C2Config::LEVEL_VP9_2;
1385 case V4L2_MPEG_VIDEO_VP9_LEVEL_2_1:
1386 return C2Config::LEVEL_VP9_2_1;
1387 case V4L2_MPEG_VIDEO_VP9_LEVEL_3_0:
1388 return C2Config::LEVEL_VP9_3;
1389 case V4L2_MPEG_VIDEO_VP9_LEVEL_3_1:
1390 return C2Config::LEVEL_VP9_3_1;
1391 case V4L2_MPEG_VIDEO_VP9_LEVEL_4_0:
1392 return C2Config::LEVEL_VP9_4;
1393 case V4L2_MPEG_VIDEO_VP9_LEVEL_4_1:
1394 return C2Config::LEVEL_VP9_4_1;
1395 #ifdef V4L2_MPEG_VIDEO_VP9_LEVEL_5_0
1396 case V4L2_MPEG_VIDEO_VP9_LEVEL_5_0:
1397 return C2Config::LEVEL_VP9_5;
1398 #endif
1399 #ifdef V4L2_MPEG_VIDEO_VP9_LEVEL_5_1
1400 case V4L2_MPEG_VIDEO_VP9_LEVEL_5_1:
1401 return C2Config::LEVEL_VP9_5_1;
1402 #endif
1403 #ifdef V4L2_MPEG_VIDEO_VP9_LEVEL_5_2
1404 case V4L2_MPEG_VIDEO_VP9_LEVEL_5_2:
1405 return C2Config::LEVEL_VP9_5_2;
1406 #endif
1407 #ifdef V4L2_MPEG_VIDEO_VP9_LEVEL_6_0
1408 case V4L2_MPEG_VIDEO_VP9_LEVEL_6_0:
1409 return C2Config::LEVEL_VP9_6;
1410 #endif
1411 #ifdef V4L2_MPEG_VIDEO_VP9_LEVEL_6_1
1412 case V4L2_MPEG_VIDEO_VP9_LEVEL_6_1:
1413 return C2Config::LEVEL_VP9_6_1;
1414 #endif
1415 #ifdef V4L2_MPEG_VIDEO_VP9_LEVEL_6_2
1416 case V4L2_MPEG_VIDEO_VP9_LEVEL_6_2:
1417 return C2Config::LEVEL_VP9_6_2;
1418 #endif
1419 #endif
1420 default:
1421 return C2Config::LEVEL_UNUSED;
1422 }
1423 break;
1424 case VideoCodec::HEVC:
1425 switch (level) {
1426 case V4L2_MPEG_VIDEO_HEVC_LEVEL_1:
1427 return C2Config::LEVEL_HEVC_MAIN_1;
1428 case V4L2_MPEG_VIDEO_HEVC_LEVEL_2:
1429 return C2Config::LEVEL_HEVC_MAIN_2;
1430 case V4L2_MPEG_VIDEO_HEVC_LEVEL_2_1:
1431 return C2Config::LEVEL_HEVC_MAIN_2_1;
1432 case V4L2_MPEG_VIDEO_HEVC_LEVEL_3:
1433 return C2Config::LEVEL_HEVC_MAIN_3;
1434 case V4L2_MPEG_VIDEO_HEVC_LEVEL_3_1:
1435 return C2Config::LEVEL_HEVC_MAIN_3_1;
1436 case V4L2_MPEG_VIDEO_HEVC_LEVEL_4:
1437 return C2Config::LEVEL_HEVC_MAIN_4;
1438 case V4L2_MPEG_VIDEO_HEVC_LEVEL_4_1:
1439 return C2Config::LEVEL_HEVC_MAIN_4_1;
1440 case V4L2_MPEG_VIDEO_HEVC_LEVEL_5:
1441 return C2Config::LEVEL_HEVC_MAIN_5;
1442 case V4L2_MPEG_VIDEO_HEVC_LEVEL_5_1:
1443 return C2Config::LEVEL_HEVC_MAIN_5_1;
1444 case V4L2_MPEG_VIDEO_HEVC_LEVEL_5_2:
1445 return C2Config::LEVEL_HEVC_MAIN_5_2;
1446 case V4L2_MPEG_VIDEO_HEVC_LEVEL_6:
1447 return C2Config::LEVEL_HEVC_MAIN_6;
1448 case V4L2_MPEG_VIDEO_HEVC_LEVEL_6_1:
1449 return C2Config::LEVEL_HEVC_MAIN_6_1;
1450 case V4L2_MPEG_VIDEO_HEVC_LEVEL_6_2:
1451 return C2Config::LEVEL_HEVC_MAIN_6_2;
1452 }
1453 break;
1454 default:
1455 ALOGE("Unknown codec: %u", codec);
1456 }
1457 ALOGE("Unknown level: %u", level);
1458 return C2Config::LEVEL_UNUSED;
1459 }
1460
1461 // static
v4L2ProfileToC2Profile(VideoCodec codec,uint32_t profile)1462 C2Config::profile_t V4L2Device::v4L2ProfileToC2Profile(VideoCodec codec, uint32_t profile) {
1463 switch (codec) {
1464 case VideoCodec::H264:
1465 switch (profile) {
1466 case V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE:
1467 return C2Config::PROFILE_AVC_BASELINE;
1468 case V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE:
1469 return C2Config::PROFILE_AVC_CONSTRAINED_BASELINE;
1470 case V4L2_MPEG_VIDEO_H264_PROFILE_MAIN:
1471 return C2Config::PROFILE_AVC_MAIN;
1472 case V4L2_MPEG_VIDEO_H264_PROFILE_EXTENDED:
1473 return C2Config::PROFILE_AVC_EXTENDED;
1474 case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH:
1475 return C2Config::PROFILE_AVC_HIGH;
1476 }
1477 break;
1478 case VideoCodec::VP8:
1479 switch (profile) {
1480 case V4L2_MPEG_VIDEO_VP8_PROFILE_0:
1481 return C2Config::PROFILE_VP8_0;
1482 case V4L2_MPEG_VIDEO_VP8_PROFILE_1:
1483 return C2Config::PROFILE_VP8_1;
1484 case V4L2_MPEG_VIDEO_VP8_PROFILE_2:
1485 return C2Config::PROFILE_VP8_2;
1486 case V4L2_MPEG_VIDEO_VP8_PROFILE_3:
1487 return C2Config::PROFILE_VP8_3;
1488 }
1489 break;
1490 case VideoCodec::VP9:
1491 switch (profile) {
1492 case V4L2_MPEG_VIDEO_VP9_PROFILE_0:
1493 return C2Config::PROFILE_VP9_0;
1494 case V4L2_MPEG_VIDEO_VP9_PROFILE_1:
1495 return C2Config::PROFILE_VP9_1;
1496 case V4L2_MPEG_VIDEO_VP9_PROFILE_2:
1497 return C2Config::PROFILE_VP9_2;
1498 case V4L2_MPEG_VIDEO_VP9_PROFILE_3:
1499 return C2Config::PROFILE_VP9_3;
1500 }
1501 break;
1502 case VideoCodec::HEVC:
1503 switch (profile) {
1504 case V4L2_MPEG_VIDEO_HEVC_PROFILE_MAIN:
1505 return C2Config::PROFILE_HEVC_MAIN;
1506 case V4L2_MPEG_VIDEO_HEVC_PROFILE_MAIN_STILL_PICTURE:
1507 return C2Config::PROFILE_HEVC_MAIN_STILL;
1508 case V4L2_MPEG_VIDEO_HEVC_PROFILE_MAIN_10:
1509 return C2Config::PROFILE_HEVC_MAIN_10;
1510 }
1511 break;
1512 default:
1513 ALOGE("Unknown codec: %u", codec);
1514 }
1515 ALOGE("Unknown profile: %u", profile);
1516 return C2Config::PROFILE_UNUSED;
1517 }
1518
1519 // static
videoCodecToPixFmt(VideoCodec codec)1520 uint32_t V4L2Device::videoCodecToPixFmt(VideoCodec codec) {
1521 switch (codec) {
1522 case VideoCodec::H264:
1523 return V4L2_PIX_FMT_H264;
1524 case VideoCodec::VP8:
1525 return V4L2_PIX_FMT_VP8;
1526 case VideoCodec::VP9:
1527 return V4L2_PIX_FMT_VP9;
1528 case VideoCodec::HEVC:
1529 return V4L2_PIX_FMT_HEVC;
1530 }
1531 }
1532
queryC2Levels(uint32_t pixFmt)1533 std::vector<C2Config::level_t> V4L2Device::queryC2Levels(uint32_t pixFmt) {
1534 auto getSupportedLevels = [this](VideoCodec codec, std::vector<C2Config::level_t>* levels) {
1535 uint32_t queryId = 0;
1536 switch (codec) {
1537 case VideoCodec::H264:
1538 queryId = V4L2_CID_MPEG_VIDEO_H264_LEVEL;
1539 break;
1540 #ifdef V4L2_CID_MPEG_VIDEO_VP9_LEVEL
1541 case VideoCodec::VP9:
1542 queryId = V4L2_CID_MPEG_VIDEO_VP9_LEVEL;
1543 break;
1544 #endif
1545 case VideoCodec::HEVC:
1546 queryId = V4L2_CID_MPEG_VIDEO_HEVC_LEVEL;
1547 break;
1548 default:
1549 return false;
1550 }
1551
1552 v4l2_queryctrl queryCtrl = {};
1553 queryCtrl.id = queryId;
1554 if (ioctl(VIDIOC_QUERYCTRL, &queryCtrl) != 0) {
1555 return false;
1556 }
1557 v4l2_querymenu queryMenu = {};
1558 queryMenu.id = queryCtrl.id;
1559 for (queryMenu.index = queryCtrl.minimum;
1560 static_cast<int>(queryMenu.index) <= queryCtrl.maximum; queryMenu.index++) {
1561 if (ioctl(VIDIOC_QUERYMENU, &queryMenu) == 0) {
1562 const C2Config::level_t level =
1563 V4L2Device::v4L2LevelToC2Level(codec, queryMenu.index);
1564 if (level != C2Config::LEVEL_UNUSED) levels->push_back(level);
1565 }
1566 }
1567 return true;
1568 };
1569
1570 std::vector<C2Config::level_t> levels;
1571 switch (pixFmt) {
1572 case V4L2_PIX_FMT_H264:
1573 case V4L2_PIX_FMT_H264_SLICE:
1574 if (!getSupportedLevels(VideoCodec::H264, &levels)) {
1575 ALOGW("Driver doesn't support QUERY H264 levels, "
1576 "use default values, 1-5_2");
1577 levels = {C2Config::LEVEL_AVC_1, C2Config::LEVEL_AVC_1B, C2Config::LEVEL_AVC_1_1,
1578 C2Config::LEVEL_AVC_1_2, C2Config::LEVEL_AVC_1_3, C2Config::LEVEL_AVC_2,
1579 C2Config::LEVEL_AVC_2_1, C2Config::LEVEL_AVC_2_2, C2Config::LEVEL_AVC_3,
1580 C2Config::LEVEL_AVC_3_1, C2Config::LEVEL_AVC_3_2, C2Config::LEVEL_AVC_4,
1581 C2Config::LEVEL_AVC_4_1, C2Config::LEVEL_AVC_4_2, C2Config::LEVEL_AVC_5,
1582 C2Config::LEVEL_AVC_5_1, C2Config::LEVEL_AVC_5_2};
1583 }
1584 break;
1585 case V4L2_PIX_FMT_VP8:
1586 case V4L2_PIX_FMT_VP8_FRAME:
1587 if (!getSupportedLevels(VideoCodec::VP8, &levels)) {
1588 ALOGW("Driver doesn't support QUERY VP8 levels, use default values, unused");
1589 levels = {C2Config::LEVEL_UNUSED};
1590 }
1591 break;
1592 case V4L2_PIX_FMT_VP9:
1593 case V4L2_PIX_FMT_VP9_FRAME:
1594 if (!getSupportedLevels(VideoCodec::VP9, &levels)) {
1595 ALOGW("Driver doesn't support QUERY VP9 levels, use default values, 1-5");
1596 levels = {C2Config::LEVEL_VP9_1, C2Config::LEVEL_VP9_1_1, C2Config::LEVEL_VP9_2,
1597 C2Config::LEVEL_VP9_2_1, C2Config::LEVEL_VP9_3, C2Config::LEVEL_VP9_3_1,
1598 C2Config::LEVEL_VP9_4, C2Config::LEVEL_VP9_4_1, C2Config::LEVEL_VP9_5};
1599 }
1600 break;
1601 case V4L2_PIX_FMT_HEVC:
1602 case V4L2_PIX_FMT_HEVC_SLICE:
1603 if (!getSupportedLevels(VideoCodec::VP9, &levels)) {
1604 ALOGW("Driver doesn't support QUERY HEVC levels, use default values");
1605 levels = {C2Config::LEVEL_HEVC_MAIN_1, C2Config::LEVEL_HEVC_MAIN_2,
1606 C2Config::LEVEL_HEVC_MAIN_2_1, C2Config::LEVEL_HEVC_MAIN_3,
1607 C2Config::LEVEL_HEVC_MAIN_3_1, C2Config::LEVEL_HEVC_MAIN_4,
1608 C2Config::LEVEL_HEVC_MAIN_4_1, C2Config::LEVEL_HEVC_MAIN_5,
1609 C2Config::LEVEL_HEVC_MAIN_5_1, C2Config::LEVEL_HEVC_MAIN_5_2,
1610 C2Config::LEVEL_HEVC_MAIN_6, C2Config::LEVEL_HEVC_MAIN_6_1,
1611 C2Config::LEVEL_HEVC_MAIN_6_2};
1612 }
1613 break;
1614 default:
1615 ALOGE("Unhandled pixelformat %s", fourccToString(pixFmt).c_str());
1616 return {};
1617 }
1618
1619 std::sort(levels.begin(), levels.end());
1620 levels.erase(std::unique(levels.begin(), levels.end()), levels.end());
1621 return levels;
1622 }
1623
queryC2Profiles(uint32_t pixFmt)1624 std::vector<C2Config::profile_t> V4L2Device::queryC2Profiles(uint32_t pixFmt) {
1625 auto getSupportedProfiles = [this](VideoCodec codec,
1626 std::vector<C2Config::profile_t>* profiles) {
1627 uint32_t queryId = 0;
1628 switch (codec) {
1629 case VideoCodec::H264:
1630 queryId = V4L2_CID_MPEG_VIDEO_H264_PROFILE;
1631 break;
1632 case VideoCodec::VP8:
1633 queryId = V4L2_CID_MPEG_VIDEO_VP8_PROFILE;
1634 break;
1635 case VideoCodec::VP9:
1636 queryId = V4L2_CID_MPEG_VIDEO_VP9_PROFILE;
1637 break;
1638 case VideoCodec::HEVC:
1639 queryId = V4L2_CID_MPEG_VIDEO_HEVC_PROFILE;
1640 break;
1641 default:
1642 return false;
1643 }
1644
1645 v4l2_queryctrl queryCtrl = {};
1646 queryCtrl.id = queryId;
1647 if (ioctl(VIDIOC_QUERYCTRL, &queryCtrl) != 0) {
1648 return false;
1649 }
1650 v4l2_querymenu queryMenu = {};
1651 queryMenu.id = queryCtrl.id;
1652 for (queryMenu.index = queryCtrl.minimum;
1653 static_cast<int>(queryMenu.index) <= queryCtrl.maximum; queryMenu.index++) {
1654 if (ioctl(VIDIOC_QUERYMENU, &queryMenu) == 0) {
1655 const C2Config::profile_t profile =
1656 V4L2Device::v4L2ProfileToC2Profile(codec, queryMenu.index);
1657 if (profile != C2Config::PROFILE_UNUSED) profiles->push_back(profile);
1658 }
1659 }
1660 return true;
1661 };
1662
1663 std::vector<C2Config::profile_t> profiles;
1664 switch (pixFmt) {
1665 case V4L2_PIX_FMT_H264:
1666 case V4L2_PIX_FMT_H264_SLICE:
1667 if (!getSupportedProfiles(VideoCodec::H264, &profiles)) {
1668 ALOGW("Driver doesn't support QUERY H264 profiles, "
1669 "use default values, Base, Main, High");
1670 profiles = {
1671 C2Config::PROFILE_AVC_BASELINE,
1672 C2Config::PROFILE_AVC_CONSTRAINED_BASELINE,
1673 C2Config::PROFILE_AVC_MAIN,
1674 C2Config::PROFILE_AVC_HIGH,
1675 };
1676 }
1677 break;
1678 case V4L2_PIX_FMT_VP8:
1679 case V4L2_PIX_FMT_VP8_FRAME:
1680 if (!getSupportedProfiles(VideoCodec::VP8, &profiles)) {
1681 ALOGW("Driver doesn't support QUERY VP8 profiles, use default values, Profile0");
1682 profiles = {C2Config::PROFILE_VP8_0};
1683 }
1684 break;
1685 case V4L2_PIX_FMT_VP9:
1686 case V4L2_PIX_FMT_VP9_FRAME:
1687 if (!getSupportedProfiles(VideoCodec::VP9, &profiles)) {
1688 ALOGW("Driver doesn't support QUERY VP9 profiles, use default values, Profile0");
1689 profiles = {C2Config::PROFILE_VP9_0};
1690 }
1691 break;
1692 case V4L2_PIX_FMT_HEVC:
1693 case V4L2_PIX_FMT_HEVC_SLICE:
1694 if (!getSupportedProfiles(VideoCodec::HEVC, &profiles)) {
1695 ALOGW("Driver doesn't support QUERY HEVC profiles, "
1696 "use default values, Main");
1697 profiles = {
1698 C2Config::PROFILE_HEVC_MAIN,
1699 };
1700 }
1701 break;
1702 default:
1703 ALOGE("Unhandled pixelformat %s", fourccToString(pixFmt).c_str());
1704 return {};
1705 }
1706
1707 // Erase duplicated profiles.
1708 std::sort(profiles.begin(), profiles.end());
1709 profiles.erase(std::unique(profiles.begin(), profiles.end()), profiles.end());
1710 return profiles;
1711 }
1712
1713 // static
c2ProfileToV4L2H264Profile(C2Config::profile_t profile)1714 int32_t V4L2Device::c2ProfileToV4L2H264Profile(C2Config::profile_t profile) {
1715 switch (profile) {
1716 case C2Config::PROFILE_AVC_BASELINE:
1717 return V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE;
1718 case C2Config::PROFILE_AVC_CONSTRAINED_BASELINE:
1719 return V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE;
1720 case C2Config::PROFILE_AVC_MAIN:
1721 return V4L2_MPEG_VIDEO_H264_PROFILE_MAIN;
1722 case C2Config::PROFILE_AVC_EXTENDED:
1723 return V4L2_MPEG_VIDEO_H264_PROFILE_EXTENDED;
1724 case C2Config::PROFILE_AVC_HIGH:
1725 return V4L2_MPEG_VIDEO_H264_PROFILE_HIGH;
1726 case C2Config::PROFILE_AVC_HIGH_10:
1727 return V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10;
1728 case C2Config::PROFILE_AVC_HIGH_422:
1729 return V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422;
1730 case C2Config::PROFILE_AVC_HIGH_444_PREDICTIVE:
1731 return V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE;
1732 case C2Config::PROFILE_AVC_SCALABLE_BASELINE:
1733 return V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_BASELINE;
1734 case C2Config::PROFILE_AVC_SCALABLE_HIGH:
1735 return V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH;
1736 case C2Config::PROFILE_AVC_STEREO_HIGH:
1737 return V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH;
1738 case C2Config::PROFILE_AVC_MULTIVIEW_HIGH:
1739 return V4L2_MPEG_VIDEO_H264_PROFILE_MULTIVIEW_HIGH;
1740 default:
1741 ALOGE("Add more cases as needed");
1742 return -1;
1743 }
1744 }
1745
1746 // static
h264LevelIdcToV4L2H264Level(uint8_t levelIdc)1747 int32_t V4L2Device::h264LevelIdcToV4L2H264Level(uint8_t levelIdc) {
1748 switch (levelIdc) {
1749 case 10:
1750 return V4L2_MPEG_VIDEO_H264_LEVEL_1_0;
1751 case 9:
1752 return V4L2_MPEG_VIDEO_H264_LEVEL_1B;
1753 case 11:
1754 return V4L2_MPEG_VIDEO_H264_LEVEL_1_1;
1755 case 12:
1756 return V4L2_MPEG_VIDEO_H264_LEVEL_1_2;
1757 case 13:
1758 return V4L2_MPEG_VIDEO_H264_LEVEL_1_3;
1759 case 20:
1760 return V4L2_MPEG_VIDEO_H264_LEVEL_2_0;
1761 case 21:
1762 return V4L2_MPEG_VIDEO_H264_LEVEL_2_1;
1763 case 22:
1764 return V4L2_MPEG_VIDEO_H264_LEVEL_2_2;
1765 case 30:
1766 return V4L2_MPEG_VIDEO_H264_LEVEL_3_0;
1767 case 31:
1768 return V4L2_MPEG_VIDEO_H264_LEVEL_3_1;
1769 case 32:
1770 return V4L2_MPEG_VIDEO_H264_LEVEL_3_2;
1771 case 40:
1772 return V4L2_MPEG_VIDEO_H264_LEVEL_4_0;
1773 case 41:
1774 return V4L2_MPEG_VIDEO_H264_LEVEL_4_1;
1775 case 42:
1776 return V4L2_MPEG_VIDEO_H264_LEVEL_4_2;
1777 case 50:
1778 return V4L2_MPEG_VIDEO_H264_LEVEL_5_0;
1779 case 51:
1780 return V4L2_MPEG_VIDEO_H264_LEVEL_5_1;
1781 default:
1782 ALOGE("Unrecognized levelIdc: %u", static_cast<uint32_t>(levelIdc));
1783 return -1;
1784 }
1785 }
1786
1787 // static
c2BitrateModeToV4L2BitrateMode(C2Config::bitrate_mode_t bitrateMode)1788 v4l2_mpeg_video_bitrate_mode V4L2Device::c2BitrateModeToV4L2BitrateMode(
1789 C2Config::bitrate_mode_t bitrateMode) {
1790 switch (bitrateMode) {
1791 case C2Config::bitrate_mode_t::BITRATE_CONST_SKIP_ALLOWED:
1792 ALOGW("BITRATE_CONST_SKIP_ALLOWED not supported, defaulting to BITRATE_CONST");
1793 FALLTHROUGH;
1794 case C2Config::bitrate_mode_t::BITRATE_CONST:
1795 return V4L2_MPEG_VIDEO_BITRATE_MODE_CBR;
1796 case C2Config::bitrate_mode_t::BITRATE_VARIABLE_SKIP_ALLOWED:
1797 ALOGW("BITRATE_VARIABLE_SKIP_ALLOWED not supported, defaulting to BITRATE_VARIABLE");
1798 FALLTHROUGH;
1799 case C2Config::bitrate_mode_t::BITRATE_VARIABLE:
1800 return V4L2_MPEG_VIDEO_BITRATE_MODE_VBR;
1801 default:
1802 ALOGW("Unsupported bitrate mode %u, defaulting to BITRATE_VARIABLE",
1803 static_cast<uint32_t>(bitrateMode));
1804 return V4L2_MPEG_VIDEO_BITRATE_MODE_VBR;
1805 }
1806 }
1807
1808 // static
allocatedSizeFromV4L2Format(const struct v4l2_format & format)1809 ui::Size V4L2Device::allocatedSizeFromV4L2Format(const struct v4l2_format& format) {
1810 ui::Size codedSize;
1811 ui::Size visibleSize;
1812 VideoPixelFormat frameFormat = VideoPixelFormat::UNKNOWN;
1813 size_t bytesPerLine = 0;
1814 // Total bytes in the frame.
1815 size_t sizeimage = 0;
1816
1817 if (V4L2_TYPE_IS_MULTIPLANAR(format.type)) {
1818 ALOG_ASSERT(format.fmt.pix_mp.num_planes > 0);
1819 bytesPerLine = base::checked_cast<int>(format.fmt.pix_mp.plane_fmt[0].bytesperline);
1820 for (size_t i = 0; i < format.fmt.pix_mp.num_planes; ++i) {
1821 sizeimage += base::checked_cast<int>(format.fmt.pix_mp.plane_fmt[i].sizeimage);
1822 }
1823 visibleSize.set(base::checked_cast<int>(format.fmt.pix_mp.width),
1824 base::checked_cast<int>(format.fmt.pix_mp.height));
1825 const uint32_t pixFmt = format.fmt.pix_mp.pixelformat;
1826 const auto frameFourcc = Fourcc::fromV4L2PixFmt(pixFmt);
1827 if (!frameFourcc) {
1828 ALOGE("Unsupported format %s", fourccToString(pixFmt).c_str());
1829 return codedSize;
1830 }
1831 frameFormat = frameFourcc->toVideoPixelFormat();
1832 } else {
1833 bytesPerLine = base::checked_cast<int>(format.fmt.pix.bytesperline);
1834 sizeimage = base::checked_cast<int>(format.fmt.pix.sizeimage);
1835 visibleSize.set(base::checked_cast<int>(format.fmt.pix.width),
1836 base::checked_cast<int>(format.fmt.pix.height));
1837 const uint32_t fourcc = format.fmt.pix.pixelformat;
1838 const auto frameFourcc = Fourcc::fromV4L2PixFmt(fourcc);
1839 if (!frameFourcc) {
1840 ALOGE("Unsupported format %s", fourccToString(fourcc).c_str());
1841 return codedSize;
1842 }
1843 frameFormat = frameFourcc ? frameFourcc->toVideoPixelFormat() : VideoPixelFormat::UNKNOWN;
1844 }
1845
1846 // V4L2 does not provide per-plane bytesperline (bpl) when different components are sharing one
1847 // physical plane buffer. In this case, it only provides bpl for the first component in the
1848 // plane. So we can't depend on it for calculating height, because bpl may vary within one
1849 // physical plane buffer. For example, YUV420 contains 3 components in one physical plane, with
1850 // Y at 8 bits per pixel, and Cb/Cr at 4 bits per pixel per component, but we only get 8 pits
1851 // per pixel from bytesperline in physical plane 0. So we need to get total frame bpp from
1852 // elsewhere to calculate coded height.
1853
1854 // We need bits per pixel for one component only to calculate the coded width from bytesperline.
1855 int planeHorizBitsPerPixel = planeHorizontalBitsPerPixel(frameFormat, 0);
1856
1857 // Adding up bpp for each component will give us total bpp for all components.
1858 int totalBpp = 0;
1859 for (size_t i = 0; i < numPlanes(frameFormat); ++i)
1860 totalBpp += planeBitsPerPixel(frameFormat, i);
1861
1862 if (sizeimage == 0 || bytesPerLine == 0 || planeHorizBitsPerPixel == 0 || totalBpp == 0 ||
1863 (bytesPerLine * 8) % planeHorizBitsPerPixel != 0) {
1864 ALOGE("Invalid format provided");
1865 return codedSize;
1866 }
1867
1868 // Coded width can be calculated by taking the first component's bytesperline, which in V4L2
1869 // always applies to the first component in physical plane buffer.
1870 int codedWidth = bytesPerLine * 8 / planeHorizBitsPerPixel;
1871 // Sizeimage is codedWidth * codedHeight * totalBpp.
1872 int codedHeight = sizeimage * 8 / codedWidth / totalBpp;
1873
1874 codedSize.set(codedWidth, codedHeight);
1875 ALOGV("codedSize=%s", toString(codedSize).c_str());
1876
1877 // Sanity checks. Calculated coded size has to contain given visible size and fulfill buffer
1878 // byte size requirements.
1879 ALOG_ASSERT(contains(Rect(codedSize), Rect(visibleSize)));
1880 ALOG_ASSERT(sizeimage <= allocationSize(frameFormat, codedSize));
1881
1882 return codedSize;
1883 }
1884
1885 // static
v4L2MemoryToString(const v4l2_memory memory)1886 const char* V4L2Device::v4L2MemoryToString(const v4l2_memory memory) {
1887 switch (memory) {
1888 case V4L2_MEMORY_MMAP:
1889 return "V4L2_MEMORY_MMAP";
1890 case V4L2_MEMORY_USERPTR:
1891 return "V4L2_MEMORY_USERPTR";
1892 case V4L2_MEMORY_DMABUF:
1893 return "V4L2_MEMORY_DMABUF";
1894 case V4L2_MEMORY_OVERLAY:
1895 return "V4L2_MEMORY_OVERLAY";
1896 default:
1897 return "UNKNOWN";
1898 }
1899 }
1900
1901 // static
v4L2BufferTypeToString(const enum v4l2_buf_type bufType)1902 const char* V4L2Device::v4L2BufferTypeToString(const enum v4l2_buf_type bufType) {
1903 switch (bufType) {
1904 case V4L2_BUF_TYPE_VIDEO_OUTPUT:
1905 return "OUTPUT";
1906 case V4L2_BUF_TYPE_VIDEO_CAPTURE:
1907 return "CAPTURE";
1908 case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
1909 return "OUTPUT_MPLANE";
1910 case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
1911 return "CAPTURE_MPLANE";
1912 default:
1913 return "UNKNOWN";
1914 }
1915 }
1916
1917 // static
v4L2BufferTypeToATraceLabel(uint32_t debugStreamId,const enum v4l2_buf_type type,const char * label)1918 std::string V4L2Device::v4L2BufferTypeToATraceLabel(uint32_t debugStreamId,
1919 const enum v4l2_buf_type type,
1920 const char* label) {
1921 const char* queueName;
1922 switch (type) {
1923 case V4L2_BUF_TYPE_VIDEO_CAPTURE:
1924 FALLTHROUGH;
1925 case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
1926 queueName = "CAPTURE";
1927 break;
1928 case V4L2_BUF_TYPE_VIDEO_OUTPUT:
1929 FALLTHROUGH;
1930 case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
1931 queueName = "OUTPUT";
1932 break;
1933 default:
1934 queueName = "";
1935 break;
1936 }
1937
1938 return base::StringPrintf("#%u V4L2 %s %s", debugStreamId, queueName, label);
1939 }
1940
1941 // static
v4L2FormatToString(const struct v4l2_format & format)1942 std::string V4L2Device::v4L2FormatToString(const struct v4l2_format& format) {
1943 std::ostringstream s;
1944 s << "v4l2_format type: " << format.type;
1945 if (format.type == V4L2_BUF_TYPE_VIDEO_CAPTURE || format.type == V4L2_BUF_TYPE_VIDEO_OUTPUT) {
1946 // single-planar
1947 const struct v4l2_pix_format& pix = format.fmt.pix;
1948 s << ", width_height: " << toString(ui::Size(pix.width, pix.height))
1949 << ", pixelformat: " << fourccToString(pix.pixelformat) << ", field: " << pix.field
1950 << ", bytesperline: " << pix.bytesperline << ", sizeimage: " << pix.sizeimage;
1951 } else if (V4L2_TYPE_IS_MULTIPLANAR(format.type)) {
1952 const struct v4l2_pix_format_mplane& pixMp = format.fmt.pix_mp;
1953 // As long as num_planes's type is uint8_t, ostringstream treats it as a char instead of an
1954 // integer, which is not what we want. Casting pix_mp.num_planes unsigned int solves the
1955 // issue.
1956 s << ", width_height: " << toString(ui::Size(pixMp.width, pixMp.height))
1957 << ", pixelformat: " << fourccToString(pixMp.pixelformat) << ", field: " << pixMp.field
1958 << ", num_planes: " << static_cast<unsigned int>(pixMp.num_planes);
1959 for (size_t i = 0; i < pixMp.num_planes; ++i) {
1960 const struct v4l2_plane_pix_format& plane_fmt = pixMp.plane_fmt[i];
1961 s << ", plane_fmt[" << i << "].sizeimage: " << plane_fmt.sizeimage << ", plane_fmt["
1962 << i << "].bytesperline: " << plane_fmt.bytesperline;
1963 }
1964 } else {
1965 s << " unsupported yet.";
1966 }
1967 return s.str();
1968 }
1969
1970 // static
v4L2BufferToString(const struct v4l2_buffer & buffer)1971 std::string V4L2Device::v4L2BufferToString(const struct v4l2_buffer& buffer) {
1972 std::ostringstream s;
1973 s << "v4l2_buffer type: " << buffer.type << ", memory: " << buffer.memory
1974 << ", index: " << buffer.index << " bytesused: " << buffer.bytesused
1975 << ", length: " << buffer.length;
1976 if (buffer.type == V4L2_BUF_TYPE_VIDEO_CAPTURE || buffer.type == V4L2_BUF_TYPE_VIDEO_OUTPUT) {
1977 // single-planar
1978 if (buffer.memory == V4L2_MEMORY_MMAP) {
1979 s << ", m.offset: " << buffer.m.offset;
1980 } else if (buffer.memory == V4L2_MEMORY_USERPTR) {
1981 s << ", m.userptr: " << buffer.m.userptr;
1982 } else if (buffer.memory == V4L2_MEMORY_DMABUF) {
1983 s << ", m.fd: " << buffer.m.fd;
1984 };
1985 } else if (V4L2_TYPE_IS_MULTIPLANAR(buffer.type)) {
1986 for (size_t i = 0; i < buffer.length; ++i) {
1987 const struct v4l2_plane& plane = buffer.m.planes[i];
1988 s << ", m.planes[" << i << "](bytesused: " << plane.bytesused
1989 << ", length: " << plane.length << ", data_offset: " << plane.data_offset;
1990 if (buffer.memory == V4L2_MEMORY_MMAP) {
1991 s << ", m.mem_offset: " << plane.m.mem_offset;
1992 } else if (buffer.memory == V4L2_MEMORY_USERPTR) {
1993 s << ", m.userptr: " << plane.m.userptr;
1994 } else if (buffer.memory == V4L2_MEMORY_DMABUF) {
1995 s << ", m.fd: " << plane.m.fd;
1996 }
1997 s << ")";
1998 }
1999 } else {
2000 s << " unsupported yet.";
2001 }
2002 return s.str();
2003 }
2004
2005 // static
v4L2FormatToVideoFrameLayout(const struct v4l2_format & format)2006 std::optional<VideoFrameLayout> V4L2Device::v4L2FormatToVideoFrameLayout(
2007 const struct v4l2_format& format) {
2008 if (!V4L2_TYPE_IS_MULTIPLANAR(format.type)) {
2009 ALOGE("v4l2_buf_type is not multiplanar: 0x%" PRIx32, format.type);
2010 return std::nullopt;
2011 }
2012 const v4l2_pix_format_mplane& pixMp = format.fmt.pix_mp;
2013 const uint32_t& pixFmt = pixMp.pixelformat;
2014 const auto videoFourcc = Fourcc::fromV4L2PixFmt(pixFmt);
2015 if (!videoFourcc) {
2016 ALOGE("Failed to convert pixel format to VideoPixelFormat: %s",
2017 fourccToString(pixFmt).c_str());
2018 return std::nullopt;
2019 }
2020 const VideoPixelFormat videoFormat = videoFourcc->toVideoPixelFormat();
2021 const size_t numBuffers = pixMp.num_planes;
2022 const size_t numColorPlanes = numPlanes(videoFormat);
2023 if (numColorPlanes == 0) {
2024 ALOGE("Unsupported video format for NumPlanes(): %s",
2025 videoPixelFormatToString(videoFormat).c_str());
2026 return std::nullopt;
2027 }
2028 if (numBuffers > numColorPlanes) {
2029 ALOGE("pix_mp.num_planes: %zu should not be larger than NumPlanes(%s): %zu", numBuffers,
2030 videoPixelFormatToString(videoFormat).c_str(), numColorPlanes);
2031 return std::nullopt;
2032 }
2033 // Reserve capacity in advance to prevent unnecessary vector reallocation.
2034 std::vector<VideoFramePlane> planes;
2035 planes.reserve(numColorPlanes);
2036 for (size_t i = 0; i < numBuffers; ++i) {
2037 const v4l2_plane_pix_format& planeFormat = pixMp.plane_fmt[i];
2038 planes.push_back(VideoFramePlane{planeFormat.bytesperline, 0u, planeFormat.sizeimage});
2039 }
2040 // For the case that #color planes > #buffers, it fills stride of color plane which does not map
2041 // to buffer. Right now only some pixel formats are supported: NV12, YUV420, YVU420.
2042 if (numColorPlanes > numBuffers) {
2043 const uint32_t yStride = planes[0].mStride;
2044 // Note that y_stride is from v4l2 bytesperline and its type is uint32_t. It is safe to cast
2045 // to size_t.
2046 const size_t yStrideAbs = static_cast<size_t>(yStride);
2047 switch (pixFmt) {
2048 case V4L2_PIX_FMT_NV12:
2049 // The stride of UV is the same as Y in NV12. The height is half of Y plane.
2050 planes.push_back(VideoFramePlane{yStride, yStrideAbs * pixMp.height,
2051 yStrideAbs * pixMp.height / 2});
2052 ALOG_ASSERT(2u == planes.size());
2053 break;
2054 case V4L2_PIX_FMT_YUV420:
2055 case V4L2_PIX_FMT_YVU420: {
2056 // The spec claims that two Cx rows (including padding) is exactly as long as one Y row
2057 // (including padding). So stride of Y must be even number.
2058 if (yStride % 2 != 0 || pixMp.height % 2 != 0) {
2059 ALOGE("Plane-Y stride and height should be even; stride: %u, height: %u", yStride,
2060 pixMp.height);
2061 return std::nullopt;
2062 }
2063 const uint32_t halfStride = yStride / 2;
2064 const size_t plane0Area = yStrideAbs * pixMp.height;
2065 const size_t plane1Area = plane0Area / 4;
2066 planes.push_back(VideoFramePlane{halfStride, plane0Area, plane1Area});
2067 planes.push_back(VideoFramePlane{halfStride, plane0Area + plane1Area, plane1Area});
2068 ALOG_ASSERT(3u == planes.size());
2069 break;
2070 }
2071 default:
2072 ALOGE("Cannot derive stride for each plane for pixel format %s",
2073 fourccToString(pixFmt).c_str());
2074 return std::nullopt;
2075 }
2076 }
2077
2078 return VideoFrameLayout{videoFormat, ui::Size(pixMp.width, pixMp.height), std::move(planes),
2079 (numBuffers > 1)};
2080 }
2081
2082 // static
getNumPlanesOfV4L2PixFmt(uint32_t pixFmt)2083 size_t V4L2Device::getNumPlanesOfV4L2PixFmt(uint32_t pixFmt) {
2084 std::optional<Fourcc> fourcc = Fourcc::fromV4L2PixFmt(pixFmt);
2085 if (fourcc && fourcc->isMultiPlanar()) {
2086 return numPlanes(fourcc->toVideoPixelFormat());
2087 }
2088 return 1u;
2089 }
2090
getSupportedResolution(uint32_t pixelFormat,ui::Size * minResolution,ui::Size * maxResolution)2091 void V4L2Device::getSupportedResolution(uint32_t pixelFormat, ui::Size* minResolution,
2092 ui::Size* maxResolution) {
2093 maxResolution->set(0, 0);
2094 minResolution->set(0, 0);
2095 v4l2_frmsizeenum frameSize;
2096 memset(&frameSize, 0, sizeof(frameSize));
2097 frameSize.pixel_format = pixelFormat;
2098 for (; ioctl(VIDIOC_ENUM_FRAMESIZES, &frameSize) == 0; ++frameSize.index) {
2099 if (frameSize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
2100 if (frameSize.discrete.width >= base::checked_cast<uint32_t>(maxResolution->width) &&
2101 frameSize.discrete.height >= base::checked_cast<uint32_t>(maxResolution->height)) {
2102 maxResolution->set(frameSize.discrete.width, frameSize.discrete.height);
2103 }
2104 if (isEmpty(*minResolution) ||
2105 (frameSize.discrete.width <= base::checked_cast<uint32_t>(minResolution->width) &&
2106 frameSize.discrete.height <=
2107 base::checked_cast<uint32_t>(minResolution->height))) {
2108 minResolution->set(frameSize.discrete.width, frameSize.discrete.height);
2109 }
2110 } else if (frameSize.type == V4L2_FRMSIZE_TYPE_STEPWISE ||
2111 frameSize.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
2112 maxResolution->set(frameSize.stepwise.max_width, frameSize.stepwise.max_height);
2113 minResolution->set(frameSize.stepwise.min_width, frameSize.stepwise.min_height);
2114 break;
2115 }
2116 }
2117 if (isEmpty(*maxResolution)) {
2118 maxResolution->set(1920, 1088);
2119 ALOGE("GetSupportedResolution failed to get maximum resolution for fourcc %s, "
2120 "fall back to %s",
2121 fourccToString(pixelFormat).c_str(), toString(*maxResolution).c_str());
2122 }
2123 if (isEmpty(*minResolution)) {
2124 minResolution->set(16, 16);
2125 ALOGE("GetSupportedResolution failed to get minimum resolution for fourcc %s, "
2126 "fall back to %s",
2127 fourccToString(pixelFormat).c_str(), toString(*minResolution).c_str());
2128 }
2129 }
2130
enumerateSupportedPixelformats(v4l2_buf_type bufType)2131 std::vector<uint32_t> V4L2Device::enumerateSupportedPixelformats(v4l2_buf_type bufType) {
2132 std::vector<uint32_t> pixelFormats;
2133
2134 v4l2_fmtdesc fmtDesc;
2135 memset(&fmtDesc, 0, sizeof(fmtDesc));
2136 fmtDesc.type = bufType;
2137
2138 for (; ioctl(VIDIOC_ENUM_FMT, &fmtDesc) == 0; ++fmtDesc.index) {
2139 ALOGV("Found %s (0x%" PRIx32 ")", fmtDesc.description, fmtDesc.pixelformat);
2140 pixelFormats.push_back(fmtDesc.pixelformat);
2141 }
2142
2143 return pixelFormats;
2144 }
2145
2146 // static
getSupportedDecodeLevels(VideoCodec videoCodecType)2147 std::vector<C2Config::level_t> V4L2Device::getSupportedDecodeLevels(VideoCodec videoCodecType) {
2148 std::vector<C2Config::level_t> supportedLevels;
2149 Type type = Type::kDecoder;
2150
2151 for (const auto& info : getDeviceInfosForType(type)) {
2152 scoped_refptr<V4L2Device> device = V4L2Device::create();
2153 if (!device->openDevicePath(info.first, type)) {
2154 ALOGV("Failed opening %s", info.first.c_str());
2155 continue;
2156 }
2157
2158 const auto& levels = device->enumerateSupportedDecodeLevels(videoCodecType);
2159 supportedLevels.insert(supportedLevels.end(), levels.begin(), levels.end());
2160 device->closeDevice();
2161 }
2162
2163 return supportedLevels;
2164 }
2165
2166 // static
getSupportedProfiles(V4L2Device::Type type,const std::vector<uint32_t> & pixelFormats)2167 SupportedProfiles V4L2Device::getSupportedProfiles(V4L2Device::Type type,
2168 const std::vector<uint32_t>& pixelFormats) {
2169 SupportedProfiles supportedProfiles;
2170
2171 for (const auto& info : getDeviceInfosForType(type)) {
2172 scoped_refptr<V4L2Device> device = V4L2Device::create();
2173 if (!device->openDevicePath(info.first, type)) {
2174 ALOGV("Failed opening %s", info.first.c_str());
2175 continue;
2176 }
2177
2178 const auto& profiles = device->enumerateSupportedProfiles(type, pixelFormats);
2179 supportedProfiles.insert(supportedProfiles.end(), profiles.begin(), profiles.end());
2180
2181 device->closeDevice();
2182 }
2183
2184 return supportedProfiles;
2185 }
2186
2187 // static
getDefaultProfile(VideoCodec codec)2188 C2Config::profile_t V4L2Device::getDefaultProfile(VideoCodec codec) {
2189 uint32_t queryId = 0;
2190
2191 switch (codec) {
2192 case VideoCodec::H264:
2193 queryId = V4L2_CID_MPEG_VIDEO_H264_PROFILE;
2194 break;
2195 case VideoCodec::VP8:
2196 queryId = V4L2_CID_MPEG_VIDEO_VP8_PROFILE;
2197 break;
2198 case VideoCodec::VP9:
2199 queryId = V4L2_CID_MPEG_VIDEO_VP9_PROFILE;
2200 break;
2201 case VideoCodec::HEVC:
2202 queryId = V4L2_CID_MPEG_VIDEO_HEVC_PROFILE;
2203 break;
2204 default:
2205 return C2Config::PROFILE_UNUSED;
2206 }
2207
2208 for (const auto& info : getDeviceInfosForType(Type::kDecoder)) {
2209 scoped_refptr<V4L2Device> device = V4L2Device::create();
2210 if (!device->openDevicePath(info.first, Type::kDecoder)) {
2211 ALOGV("Failed opening %s", info.first.c_str());
2212 continue;
2213 }
2214
2215 // Call to query control which will return structure including
2216 // index of default profile
2217 v4l2_queryctrl queryCtrl = {};
2218 queryCtrl.id = queryId;
2219 if (device->ioctl(VIDIOC_QUERYCTRL, &queryCtrl) != 0) {
2220 device->closeDevice();
2221 continue;
2222 }
2223
2224 v4l2_querymenu queryMenu = {};
2225 queryMenu.id = queryCtrl.id;
2226 queryMenu.index = queryCtrl.default_value;
2227 if (device->ioctl(VIDIOC_QUERYMENU, &queryMenu) == 0) {
2228 device->closeDevice();
2229 return v4L2ProfileToC2Profile(codec, queryMenu.index);
2230 }
2231
2232 device->closeDevice();
2233 }
2234 return C2Config::PROFILE_UNUSED;
2235 }
2236
2237 // static
getDefaultLevel(VideoCodec codec)2238 C2Config::level_t V4L2Device::getDefaultLevel(VideoCodec codec) {
2239 uint32_t queryId = 0;
2240
2241 switch (codec) {
2242 case VideoCodec::H264:
2243 queryId = V4L2_CID_MPEG_VIDEO_H264_LEVEL;
2244 break;
2245 #ifdef V4L2_CID_MPEG_VIDEO_VP9_LEVEL
2246 case VideoCodec::VP9:
2247 queryId = V4L2_CID_MPEG_VIDEO_VP9_LEVEL;
2248 break;
2249 #endif
2250 case VideoCodec::HEVC:
2251 queryId = V4L2_CID_MPEG_VIDEO_HEVC_LEVEL;
2252 break;
2253 default:
2254 return C2Config::LEVEL_UNUSED;
2255 }
2256
2257 for (const auto& info : getDeviceInfosForType(Type::kDecoder)) {
2258 scoped_refptr<V4L2Device> device = V4L2Device::create();
2259 if (!device->openDevicePath(info.first, Type::kDecoder)) {
2260 ALOGV("Failed opening %s", info.first.c_str());
2261 continue;
2262 }
2263
2264 v4l2_queryctrl queryCtrl = {};
2265 queryCtrl.id = queryId;
2266 if (device->ioctl(VIDIOC_QUERYCTRL, &queryCtrl) != 0) { // gets index of default profile
2267 device->closeDevice();
2268 continue;
2269 }
2270
2271 v4l2_querymenu queryMenu = {};
2272 queryMenu.id = queryCtrl.id;
2273 queryMenu.index = queryCtrl.default_value;
2274 if (device->ioctl(VIDIOC_QUERYMENU, &queryMenu) == 0) {
2275 device->closeDevice();
2276 return v4L2LevelToC2Level(codec, queryMenu.index);
2277 }
2278
2279 device->closeDevice();
2280 }
2281
2282 return C2Config::LEVEL_UNUSED;
2283 }
2284
2285 // static
queryDecodingCapabilities(VideoCodec codec)2286 SupportedCapabilities V4L2Device::queryDecodingCapabilities(VideoCodec codec) {
2287 SupportedCapabilities caps;
2288 caps.codec = codec;
2289 caps.supportedLevels = V4L2Device::getSupportedDecodeLevels(codec);
2290 caps.defaultLevel = V4L2Device::getDefaultLevel(codec);
2291 caps.supportedProfiles = V4L2Device::getSupportedProfiles(
2292 V4L2Device::Type::kDecoder, {V4L2Device::videoCodecToPixFmt(codec)});
2293 caps.defaultLevel = V4L2Device::getDefaultLevel(codec);
2294
2295 return caps;
2296 }
2297
2298 // static
queryEncodingCapabilities(VideoCodec codec)2299 SupportedCapabilities V4L2Device::queryEncodingCapabilities(VideoCodec codec) {
2300 SupportedCapabilities caps;
2301 caps.codec = codec;
2302 caps.supportedProfiles = V4L2Device::getSupportedProfiles(
2303 V4L2Device::Type::kEncoder, {V4L2Device::videoCodecToPixFmt(codec)});
2304 return caps;
2305 }
2306
enumerateSupportedDecodeLevels(VideoCodec videoCodecType)2307 std::vector<C2Config::level_t> V4L2Device::enumerateSupportedDecodeLevels(
2308 VideoCodec videoCodecType) {
2309 std::vector<C2Config::level_t> supportedLevels;
2310
2311 const auto& supportedPixelformats =
2312 enumerateSupportedPixelformats(V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
2313
2314 for (uint32_t pixelFormat : supportedPixelformats) {
2315 if (isValidPixFmtForCodec(videoCodecType, pixelFormat)) {
2316 std::vector<C2Config::level_t> levels = queryC2Levels(pixelFormat);
2317 supportedLevels.insert(supportedLevels.end(), levels.begin(), levels.end());
2318 }
2319 }
2320
2321 return supportedLevels;
2322 }
2323
enumerateSupportedProfiles(V4L2Device::Type type,const std::vector<uint32_t> & pixelFormats)2324 SupportedProfiles V4L2Device::enumerateSupportedProfiles(
2325 V4L2Device::Type type, const std::vector<uint32_t>& pixelFormats) {
2326 SupportedProfiles profiles;
2327
2328 v4l2_buf_type bufType;
2329 switch (type) {
2330 case Type::kDecoder:
2331 bufType = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2332 break;
2333 case Type::kEncoder:
2334 bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2335 break;
2336 }
2337
2338 const auto& supportedPixelformats = enumerateSupportedPixelformats(bufType);
2339
2340 for (uint32_t pixelFormat : supportedPixelformats) {
2341 if (std::find(pixelFormats.begin(), pixelFormats.end(), pixelFormat) == pixelFormats.end())
2342 continue;
2343
2344 SupportedProfile profile;
2345 if (type == Type::kEncoder) {
2346 profile.max_framerate_numerator = 30;
2347 profile.max_framerate_denominator = 1;
2348 }
2349
2350 getSupportedResolution(pixelFormat, &profile.min_resolution, &profile.max_resolution);
2351
2352 const auto videoCodecProfiles = queryC2Profiles(pixelFormat);
2353
2354 for (const auto& videoCodecProfile : videoCodecProfiles) {
2355 profile.profile = videoCodecProfile;
2356 profiles.push_back(profile);
2357
2358 ALOGV("Found profile %s, resolutions: %s %s", profileToString(profile.profile),
2359 toString(profile.min_resolution).c_str(),
2360 toString(profile.max_resolution).c_str());
2361 }
2362 }
2363
2364 return profiles;
2365 }
2366
startPolling(scoped_refptr<base::SequencedTaskRunner> taskRunner,android::V4L2DevicePoller::EventCallback eventCallback,base::RepeatingClosure errorCallback)2367 bool V4L2Device::startPolling(scoped_refptr<base::SequencedTaskRunner> taskRunner,
2368 android::V4L2DevicePoller::EventCallback eventCallback,
2369 base::RepeatingClosure errorCallback) {
2370 DCHECK_CALLED_ON_VALID_SEQUENCE(mClientSequenceChecker);
2371
2372 if (!mDevicePoller) {
2373 mDevicePoller = std::make_unique<android::V4L2DevicePoller>(this, "V4L2DeviceThreadPoller",
2374 std::move(taskRunner));
2375 }
2376
2377 bool ret = mDevicePoller->startPolling(std::move(eventCallback), std::move(errorCallback));
2378
2379 if (!ret) mDevicePoller = nullptr;
2380
2381 return ret;
2382 }
2383
stopPolling()2384 bool V4L2Device::stopPolling() {
2385 DCHECK_CALLED_ON_VALID_SEQUENCE(mClientSequenceChecker);
2386
2387 return !mDevicePoller || mDevicePoller->stopPolling();
2388 }
2389
schedulePoll()2390 void V4L2Device::schedulePoll() {
2391 DCHECK_CALLED_ON_VALID_SEQUENCE(mClientSequenceChecker);
2392
2393 if (!mDevicePoller || !mDevicePoller->isPolling()) return;
2394
2395 mDevicePoller->schedulePoll();
2396 }
2397
isCtrlExposed(uint32_t ctrlId)2398 bool V4L2Device::isCtrlExposed(uint32_t ctrlId) {
2399 DCHECK_CALLED_ON_VALID_SEQUENCE(mClientSequenceChecker);
2400
2401 struct v4l2_queryctrl queryCtrl;
2402 memset(&queryCtrl, 0, sizeof(queryCtrl));
2403 queryCtrl.id = ctrlId;
2404
2405 return ioctl(VIDIOC_QUERYCTRL, &queryCtrl) == 0;
2406 }
2407
setExtCtrls(uint32_t ctrlClass,std::vector<V4L2ExtCtrl> ctrls)2408 bool V4L2Device::setExtCtrls(uint32_t ctrlClass, std::vector<V4L2ExtCtrl> ctrls) {
2409 DCHECK_CALLED_ON_VALID_SEQUENCE(mClientSequenceChecker);
2410
2411 if (ctrls.empty()) return true;
2412
2413 struct v4l2_ext_controls extCtrls;
2414 memset(&extCtrls, 0, sizeof(extCtrls));
2415 extCtrls.ctrl_class = ctrlClass;
2416 extCtrls.count = ctrls.size();
2417 extCtrls.controls = &ctrls[0].ctrl;
2418 return ioctl(VIDIOC_S_EXT_CTRLS, &extCtrls) == 0;
2419 }
2420
isCommandSupported(uint32_t commandId)2421 bool V4L2Device::isCommandSupported(uint32_t commandId) {
2422 DCHECK_CALLED_ON_VALID_SEQUENCE(mClientSequenceChecker);
2423
2424 struct v4l2_encoder_cmd cmd;
2425 memset(&cmd, 0, sizeof(cmd));
2426 cmd.cmd = commandId;
2427
2428 return ioctl(VIDIOC_TRY_ENCODER_CMD, &cmd) == 0;
2429 }
2430
hasCapabilities(uint32_t capabilities)2431 bool V4L2Device::hasCapabilities(uint32_t capabilities) {
2432 DCHECK_CALLED_ON_VALID_SEQUENCE(mClientSequenceChecker);
2433
2434 struct v4l2_capability caps;
2435 memset(&caps, 0, sizeof(caps));
2436 if (ioctl(VIDIOC_QUERYCAP, &caps) != 0) {
2437 ALOGE("Failed to query capabilities");
2438 return false;
2439 }
2440
2441 return (caps.capabilities & capabilities) == capabilities;
2442 }
2443
openDevicePath(const std::string & path,Type)2444 bool V4L2Device::openDevicePath(const std::string& path, Type /*type*/) {
2445 ALOG_ASSERT(!mDeviceFd.is_valid());
2446
2447 mDeviceFd.reset(HANDLE_EINTR(::open(path.c_str(), O_RDWR | O_NONBLOCK | O_CLOEXEC)));
2448 if (!mDeviceFd.is_valid()) return false;
2449
2450 return true;
2451 }
2452
closeDevice()2453 void V4L2Device::closeDevice() {
2454 ALOGV("%s()", __func__);
2455
2456 mDeviceFd.reset();
2457 }
2458
2459 // static
getDeviceInfosForType(V4L2Device::Type type)2460 const V4L2Device::DeviceInfos& V4L2Device::getDeviceInfosForType(V4L2Device::Type type) {
2461 // video input/output devices are registered as /dev/videoX in V4L2.
2462 static constexpr const char* kVideoDevicePattern = "/dev/video";
2463 static const DeviceInfos sNoDevices = {};
2464 static std::mutex sDeviceInfosCacheLock;
2465 static std::map<Type, DeviceInfos> sDeviceInfosCache;
2466
2467 std::lock_guard lock(sDeviceInfosCacheLock);
2468 if (sDeviceInfosCache.find(type) != sDeviceInfosCache.end()) {
2469 return sDeviceInfosCache[type];
2470 }
2471
2472 v4l2_buf_type bufType;
2473 switch (type) {
2474 case Type::kDecoder:
2475 bufType = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2476 break;
2477 case Type::kEncoder:
2478 bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2479 break;
2480 default:
2481 ALOGE("Only decoder and encoder types are supported!!");
2482 return sNoDevices;
2483 }
2484
2485 DeviceInfos deviceInfos;
2486 for (int i = 0; i < 10; ++i) {
2487 std::string path = base::StringPrintf("%s%d", kVideoDevicePattern, i);
2488
2489 scoped_refptr<V4L2Device> device = V4L2Device::create();
2490 if (!device->openDevicePath(path, type)) {
2491 continue;
2492 }
2493
2494 const auto& supportedPixelformats = device->enumerateSupportedPixelformats(bufType);
2495 if (!supportedPixelformats.empty()) {
2496 ALOGV("Found device: %s", path.c_str());
2497 deviceInfos.push_back(std::make_pair(path, supportedPixelformats));
2498 }
2499
2500 device->closeDevice();
2501 }
2502
2503 sDeviceInfosCache[type] = deviceInfos;
2504
2505 return sDeviceInfosCache[type];
2506 }
2507
getDevicePathFor(Type type,uint32_t pixFmt)2508 std::string V4L2Device::getDevicePathFor(Type type, uint32_t pixFmt) {
2509 for (const auto& info : getDeviceInfosForType(type)) {
2510 if (std::find(info.second.begin(), info.second.end(), pixFmt) != info.second.end())
2511 return info.first;
2512 }
2513
2514 return std::string();
2515 }
2516
2517 } // namespace android
2518