1 /*
2 * Copyright (C) 2012-2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera2-JpegProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <netinet/in.h>
22
23 #include <aidl/android/hardware/camera/device/CameraBlob.h>
24 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
25
26 #include <binder/MemoryBase.h>
27 #include <binder/MemoryHeapBase.h>
28 #include <com_android_graphics_libgui_flags.h>
29 #include <gui/Surface.h>
30 #include <utils/Log.h>
31 #include <utils/Trace.h>
32
33 #include "common/CameraDeviceBase.h"
34 #include "api1/Camera2Client.h"
35 #include "api1/client2/Camera2Heap.h"
36 #include "api1/client2/CaptureSequencer.h"
37 #include "api1/client2/JpegProcessor.h"
38
39 namespace android {
40 namespace camera2 {
41
42 using android::camera3::CAMERA_STREAM_ROTATION_0;
43 using aidl::android::hardware::camera::device::CameraBlob;
44 using aidl::android::hardware::camera::device::CameraBlobId;
45
JpegProcessor(sp<Camera2Client> client,wp<CaptureSequencer> sequencer)46 JpegProcessor::JpegProcessor(
47 sp<Camera2Client> client,
48 wp<CaptureSequencer> sequencer):
49 Thread(false),
50 mDevice(client->getCameraDevice()),
51 mSequencer(sequencer),
52 mId(client->getCameraId()),
53 mCaptureDone(false),
54 mCaptureSuccess(false),
55 mCaptureStreamId(NO_STREAM) {
56 }
57
~JpegProcessor()58 JpegProcessor::~JpegProcessor() {
59 ALOGV("%s: Exit", __FUNCTION__);
60 deleteStream();
61 }
62
onFrameAvailable(const BufferItem &)63 void JpegProcessor::onFrameAvailable(const BufferItem& /*item*/) {
64 Mutex::Autolock l(mInputMutex);
65 ALOGV("%s", __FUNCTION__);
66 if (!mCaptureDone) {
67 mCaptureDone = true;
68 mCaptureSuccess = true;
69 mCaptureDoneSignal.signal();
70 }
71 }
72
updateStream(const Parameters & params)73 status_t JpegProcessor::updateStream(const Parameters ¶ms) {
74 ATRACE_CALL();
75 ALOGV("%s", __FUNCTION__);
76 status_t res;
77
78 Mutex::Autolock l(mInputMutex);
79
80 sp<CameraDeviceBase> device = mDevice.promote();
81 if (device == 0) {
82 ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
83 return INVALID_OPERATION;
84 }
85
86 // Find out buffer size for JPEG
87 ssize_t maxJpegSize = device->getJpegBufferSize(device->infoPhysical(""),
88 params.pictureWidth, params.pictureHeight);
89 if (maxJpegSize <= 0) {
90 ALOGE("%s: Camera %d: Jpeg buffer size (%zu) is invalid ",
91 __FUNCTION__, mId, maxJpegSize);
92 return INVALID_OPERATION;
93 }
94
95 if (mCaptureConsumer == 0) {
96 // Create CPU buffer queue endpoint
97 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
98 mCaptureConsumer = new CpuConsumer(1);
99 mCaptureConsumer->setFrameAvailableListener(this);
100 mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
101 mCaptureWindow = mCaptureConsumer->getSurface();
102 #else
103 sp<IGraphicBufferProducer> producer;
104 sp<IGraphicBufferConsumer> consumer;
105 BufferQueue::createBufferQueue(&producer, &consumer);
106 mCaptureConsumer = new CpuConsumer(consumer, 1);
107 mCaptureConsumer->setFrameAvailableListener(this);
108 mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
109 mCaptureWindow = new Surface(producer);
110 #endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
111 }
112
113 // Since ashmem heaps are rounded up to page size, don't reallocate if
114 // the capture heap isn't exactly the same size as the required JPEG buffer
115 const size_t HEAP_SLACK_FACTOR = 2;
116 if (mCaptureHeap == 0 ||
117 (mCaptureHeap->getSize() < static_cast<size_t>(maxJpegSize)) ||
118 (mCaptureHeap->getSize() >
119 static_cast<size_t>(maxJpegSize) * HEAP_SLACK_FACTOR) ) {
120 // Create memory for API consumption
121 mCaptureHeap.clear();
122 mCaptureHeap =
123 new MemoryHeapBase(maxJpegSize, 0, "Camera2Client::CaptureHeap");
124 if (mCaptureHeap->getSize() == 0) {
125 ALOGE("%s: Camera %d: Unable to allocate memory for capture",
126 __FUNCTION__, mId);
127 return NO_MEMORY;
128 }
129 }
130 ALOGV("%s: Camera %d: JPEG capture heap now %zu bytes; requested %zd bytes",
131 __FUNCTION__, mId, mCaptureHeap->getSize(), maxJpegSize);
132
133 if (mCaptureStreamId != NO_STREAM) {
134 // Check if stream parameters have to change
135 CameraDeviceBase::StreamInfo streamInfo;
136 res = device->getStreamInfo(mCaptureStreamId, &streamInfo);
137 if (res != OK) {
138 ALOGE("%s: Camera %d: Error querying capture output stream info: "
139 "%s (%d)", __FUNCTION__,
140 mId, strerror(-res), res);
141 return res;
142 }
143 if (streamInfo.width != (uint32_t)params.pictureWidth ||
144 streamInfo.height != (uint32_t)params.pictureHeight) {
145 ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
146 __FUNCTION__, mId, mCaptureStreamId);
147 res = device->deleteStream(mCaptureStreamId);
148 if (res == -EBUSY) {
149 ALOGV("%s: Camera %d: Device is busy, call updateStream again "
150 " after it becomes idle", __FUNCTION__, mId);
151 return res;
152 } else if (res != OK) {
153 ALOGE("%s: Camera %d: Unable to delete old output stream "
154 "for capture: %s (%d)", __FUNCTION__,
155 mId, strerror(-res), res);
156 return res;
157 }
158 mCaptureStreamId = NO_STREAM;
159 }
160 }
161
162 if (mCaptureStreamId == NO_STREAM) {
163 // Create stream for HAL production
164 res = device->createStream(mCaptureWindow,
165 params.pictureWidth, params.pictureHeight,
166 HAL_PIXEL_FORMAT_BLOB, HAL_DATASPACE_V0_JFIF,
167 CAMERA_STREAM_ROTATION_0, &mCaptureStreamId,
168 std::string(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
169 if (res != OK) {
170 ALOGE("%s: Camera %d: Can't create output stream for capture: "
171 "%s (%d)", __FUNCTION__, mId,
172 strerror(-res), res);
173 return res;
174 }
175 }
176 return OK;
177 }
178
deleteStream()179 status_t JpegProcessor::deleteStream() {
180 ATRACE_CALL();
181
182 Mutex::Autolock l(mInputMutex);
183
184 if (mCaptureStreamId != NO_STREAM) {
185 sp<CameraDeviceBase> device = mDevice.promote();
186 if (device == 0) {
187 ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
188 return INVALID_OPERATION;
189 }
190
191 status_t res = device->deleteStream(mCaptureStreamId);
192 if (res != OK) {
193 ALOGE("%s: delete stream %d failed!", __FUNCTION__, mCaptureStreamId);
194 return res;
195 }
196
197 mCaptureHeap.clear();
198 mCaptureWindow.clear();
199 mCaptureConsumer.clear();
200
201 mCaptureStreamId = NO_STREAM;
202 }
203 return OK;
204 }
205
getStreamId() const206 int JpegProcessor::getStreamId() const {
207 Mutex::Autolock l(mInputMutex);
208 return mCaptureStreamId;
209 }
210
dump(int,const Vector<String16> &) const211 void JpegProcessor::dump(int /*fd*/, const Vector<String16>& /*args*/) const {
212 }
213
threadLoop()214 bool JpegProcessor::threadLoop() {
215 status_t res;
216
217 bool captureSuccess = false;
218 {
219 Mutex::Autolock l(mInputMutex);
220
221 while (!mCaptureDone) {
222 res = mCaptureDoneSignal.waitRelative(mInputMutex,
223 kWaitDuration);
224 if (res == TIMED_OUT) return true;
225 }
226
227 captureSuccess = mCaptureSuccess;
228 mCaptureDone = false;
229 }
230
231 res = processNewCapture(captureSuccess);
232
233 return true;
234 }
235
processNewCapture(bool captureSuccess)236 status_t JpegProcessor::processNewCapture(bool captureSuccess) {
237 ATRACE_CALL();
238 status_t res;
239 sp<Camera2Heap> captureHeap;
240 sp<MemoryBase> captureBuffer;
241
242 CpuConsumer::LockedBuffer imgBuffer;
243
244 if (captureSuccess) {
245 Mutex::Autolock l(mInputMutex);
246 if (mCaptureStreamId == NO_STREAM) {
247 ALOGW("%s: Camera %d: No stream is available", __FUNCTION__, mId);
248 return INVALID_OPERATION;
249 }
250
251 res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
252 if (res != OK) {
253 if (res != BAD_VALUE) {
254 ALOGE("%s: Camera %d: Error receiving still image buffer: "
255 "%s (%d)", __FUNCTION__,
256 mId, strerror(-res), res);
257 }
258 return res;
259 }
260
261 ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
262 mId);
263
264 if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
265 ALOGE("%s: Camera %d: Unexpected format for still image: "
266 "%x, expected %x", __FUNCTION__, mId,
267 imgBuffer.format,
268 HAL_PIXEL_FORMAT_BLOB);
269 mCaptureConsumer->unlockBuffer(imgBuffer);
270 return OK;
271 }
272
273 // Find size of JPEG image
274 size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width);
275 if (jpegSize == 0) { // failed to find size, default to whole buffer
276 jpegSize = imgBuffer.width;
277 }
278 size_t heapSize = mCaptureHeap->getSize();
279 if (jpegSize > heapSize) {
280 ALOGW("%s: JPEG image is larger than expected, truncating "
281 "(got %zu, expected at most %zu bytes)",
282 __FUNCTION__, jpegSize, heapSize);
283 jpegSize = heapSize;
284 }
285
286 // TODO: Optimize this to avoid memcopy
287 captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize);
288 void* captureMemory = mCaptureHeap->getBase();
289 memcpy(captureMemory, imgBuffer.data, jpegSize);
290
291 mCaptureConsumer->unlockBuffer(imgBuffer);
292 }
293
294 sp<CaptureSequencer> sequencer = mSequencer.promote();
295 if (sequencer != 0) {
296 sequencer->onCaptureAvailable(imgBuffer.timestamp, captureBuffer, !captureSuccess);
297 }
298
299 return OK;
300 }
301
302 /*
303 * JPEG FILE FORMAT OVERVIEW.
304 * http://www.jpeg.org/public/jfif.pdf
305 * (JPEG is the image compression algorithm, actual file format is called JFIF)
306 *
307 * "Markers" are 2-byte patterns used to distinguish parts of JFIF files. The
308 * first byte is always 0xFF, and the second byte is between 0x01 and 0xFE
309 * (inclusive). Because every marker begins with the same byte, they are
310 * referred to by the second byte's value.
311 *
312 * JFIF files all begin with the Start of Image (SOI) marker, which is 0xD8.
313 * Following it, "segment" sections begin with other markers, followed by a
314 * 2-byte length (in network byte order), then the segment data.
315 *
316 * For our purposes we will ignore the data, and just use the length to skip to
317 * the next segment. This is necessary because the data inside segments are
318 * allowed to contain the End of Image marker (0xFF 0xD9), preventing us from
319 * naievely scanning until the end.
320 *
321 * After all the segments are processed, the jpeg compressed image stream begins.
322 * This can be considered an opaque format with one requirement: all 0xFF bytes
323 * in this stream must be followed with a 0x00 byte. This prevents any of the
324 * image data to be interpreted as a segment. The only exception to this is at
325 * the end of the image stream there is an End of Image (EOI) marker, which is
326 * 0xFF followed by a non-zero (0xD9) byte.
327 */
328
329 const uint8_t MARK = 0xFF; // First byte of marker
330 const uint8_t SOI = 0xD8; // Start of Image
331 const uint8_t EOI = 0xD9; // End of Image
332 const size_t MARKER_LENGTH = 2; // length of a marker
333
334 #pragma pack(push)
335 #pragma pack(1)
336 typedef struct segment {
337 uint8_t marker[MARKER_LENGTH];
338 uint16_t length;
339 } segment_t;
340 #pragma pack(pop)
341
342 /* HELPER FUNCTIONS */
343
344 // check for Start of Image marker
checkJpegStart(uint8_t * buf)345 bool checkJpegStart(uint8_t* buf) {
346 return buf[0] == MARK && buf[1] == SOI;
347 }
348 // check for End of Image marker
checkJpegEnd(uint8_t * buf)349 bool checkJpegEnd(uint8_t *buf) {
350 return buf[0] == MARK && buf[1] == EOI;
351 }
352 // check for arbitrary marker, returns marker type (second byte)
353 // returns 0 if no marker found. Note: 0x00 is not a valid marker type
checkJpegMarker(uint8_t * buf)354 uint8_t checkJpegMarker(uint8_t *buf) {
355 if (buf[0] == MARK && buf[1] > 0 && buf[1] < 0xFF) {
356 return buf[1];
357 }
358 return 0;
359 }
360
361 // Return the size of the JPEG, 0 indicates failure
findJpegSize(uint8_t * jpegBuffer,size_t maxSize)362 size_t JpegProcessor::findJpegSize(uint8_t* jpegBuffer, size_t maxSize) {
363 size_t size;
364
365 // First check for JPEG transport header at the end of the buffer
366 uint8_t *header = jpegBuffer + (maxSize - sizeof(CameraBlob));
367 CameraBlob *blob = (CameraBlob*)(header);
368 if (blob->blobId == CameraBlobId::JPEG) {
369 size = blob->blobSizeBytes;
370 if (size > 0 && size <= maxSize - sizeof(CameraBlob)) {
371 // Verify SOI and EOI markers
372 size_t offset = size - MARKER_LENGTH;
373 uint8_t *end = jpegBuffer + offset;
374 if (checkJpegStart(jpegBuffer) && checkJpegEnd(end)) {
375 ALOGV("Found JPEG transport header, img size %zu", size);
376 return size;
377 } else {
378 ALOGW("Found JPEG transport header with bad Image Start/End");
379 }
380 } else {
381 ALOGW("Found JPEG transport header with bad size %zu", size);
382 }
383 }
384
385 // Check Start of Image
386 if ( !checkJpegStart(jpegBuffer) ) {
387 ALOGE("Could not find start of JPEG marker");
388 return 0;
389 }
390
391 // Read JFIF segment markers, skip over segment data
392 size = MARKER_LENGTH; //jump SOI;
393 while (size <= maxSize - MARKER_LENGTH) {
394 segment_t *segment = (segment_t*)(jpegBuffer + size);
395 uint8_t type = checkJpegMarker(segment->marker);
396 if (type == 0) { // invalid marker, no more segments, begin JPEG data
397 ALOGV("JPEG stream found beginning at offset %zu", size);
398 break;
399 }
400 if (type == EOI || size > maxSize - sizeof(segment_t)) {
401 ALOGE("Got premature End before JPEG data, offset %zu", size);
402 return 0;
403 }
404 size_t length = ntohs(segment->length);
405 ALOGV("JFIF Segment, type %x length %zx", type, length);
406 size += length + MARKER_LENGTH;
407 }
408
409 // Find End of Image
410 // Scan JPEG buffer until End of Image (EOI)
411 bool foundEnd = false;
412 for ( ; size <= maxSize - MARKER_LENGTH; size++) {
413 if ( checkJpegEnd(jpegBuffer + size) ) {
414 foundEnd = true;
415 size += MARKER_LENGTH;
416 break;
417 }
418 }
419 if (!foundEnd) {
420 ALOGE("Could not find end of JPEG marker");
421 return 0;
422 }
423
424 if (size > maxSize) {
425 ALOGW("JPEG size %zu too large, reducing to maxSize %zu", size, maxSize);
426 size = maxSize;
427 }
428 ALOGV("Final JPEG size %zu", size);
429 return size;
430 }
431
432 }; // namespace camera2
433 }; // namespace android
434