1 /*
2 * Copyright 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 #include <algorithm>
17 #include <string_view>
18 #include <type_traits>
19
20 #include <assert.h>
21 #include <ctype.h>
22 #include <fcntl.h>
23 #include <inttypes.h>
24 #include <getopt.h>
25 #include <signal.h>
26 #include <stdio.h>
27 #include <stdlib.h>
28 #include <string.h>
29 #include <sys/stat.h>
30 #include <sys/types.h>
31 #include <sys/wait.h>
32
33 #include <termios.h>
34 #include <unistd.h>
35
36 #define LOG_TAG "ScreenRecord"
37 #define ATRACE_TAG ATRACE_TAG_GRAPHICS
38 //#define LOG_NDEBUG 0
39 #include <utils/Log.h>
40
41 #include <binder/IPCThreadState.h>
42 #include <utils/Errors.h>
43 #include <utils/SystemClock.h>
44 #include <utils/Timers.h>
45 #include <utils/Trace.h>
46
47 #include <gui/ISurfaceComposer.h>
48 #include <gui/Surface.h>
49 #include <gui/SurfaceComposerClient.h>
50 #include <gui/ISurfaceComposer.h>
51 #include <media/MediaCodecBuffer.h>
52 #include <media/NdkMediaCodec.h>
53 #include <media/NdkMediaFormatPriv.h>
54 #include <media/NdkMediaMuxer.h>
55 #include <media/openmax/OMX_IVCommon.h>
56 #include <media/stagefright/MediaCodec.h>
57 #include <media/stagefright/MediaCodecConstants.h>
58 #include <media/stagefright/MediaErrors.h>
59 #include <media/stagefright/PersistentSurface.h>
60 #include <media/stagefright/foundation/ABuffer.h>
61 #include <media/stagefright/foundation/AMessage.h>
62 #include <mediadrm/ICrypto.h>
63 #include <ui/DisplayMode.h>
64 #include <ui/DisplayState.h>
65
66 #include "screenrecord.h"
67 #include "Overlay.h"
68 #include "FrameOutput.h"
69
70 using android::ABuffer;
71 using android::ALooper;
72 using android::AMessage;
73 using android::AString;
74 using android::ui::DisplayMode;
75 using android::FrameOutput;
76 using android::IBinder;
77 using android::IGraphicBufferProducer;
78 using android::ISurfaceComposer;
79 using android::MediaCodec;
80 using android::MediaCodecBuffer;
81 using android::Overlay;
82 using android::PersistentSurface;
83 using android::PhysicalDisplayId;
84 using android::ProcessState;
85 using android::Rect;
86 using android::String8;
87 using android::SurfaceComposerClient;
88 using android::Vector;
89 using android::sp;
90 using android::status_t;
91 using android::SurfaceControl;
92
93 using android::INVALID_OPERATION;
94 using android::NAME_NOT_FOUND;
95 using android::NO_ERROR;
96 using android::UNKNOWN_ERROR;
97
98 namespace ui = android::ui;
99
100 static const uint32_t kMinBitRate = 100000; // 0.1Mbps
101 static const uint32_t kMaxBitRate = 200 * 1000000; // 200Mbps
102 static const uint32_t kMaxTimeLimitSec = 180; // 3 minutes
103 static const uint32_t kFallbackWidth = 1280; // 720p
104 static const uint32_t kFallbackHeight = 720;
105 static const char* kMimeTypeAvc = "video/avc";
106 static const char* kMimeTypeApplicationOctetstream = "application/octet-stream";
107
108 // Command-line parameters.
109 static bool gVerbose = false; // chatty on stdout
110 static bool gRotate = false; // rotate 90 degrees
111 static bool gMonotonicTime = false; // use system monotonic time for timestamps
112 static bool gPersistentSurface = false; // use persistent surface
113 static enum {
114 FORMAT_MP4, FORMAT_H264, FORMAT_WEBM, FORMAT_3GPP, FORMAT_FRAMES, FORMAT_RAW_FRAMES
115 } gOutputFormat = FORMAT_MP4; // data format for output
116 static AString gCodecName = ""; // codec name override
117 static bool gSizeSpecified = false; // was size explicitly requested?
118 static bool gWantInfoScreen = false; // do we want initial info screen?
119 static bool gWantFrameTime = false; // do we want times on each frame?
120 static bool gSecureDisplay = false; // should we create a secure virtual display?
121 static uint32_t gVideoWidth = 0; // default width+height
122 static uint32_t gVideoHeight = 0;
123 static uint32_t gBitRate = 20000000; // 20Mbps
124 static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
125 static uint32_t gBframes = 0;
126 static std::optional<PhysicalDisplayId> gPhysicalDisplayId;
127 // Set by signal handler to stop recording.
128 static volatile bool gStopRequested = false;
129
130 // Previous signal handler state, restored after first hit.
131 static struct sigaction gOrigSigactionINT;
132 static struct sigaction gOrigSigactionHUP;
133
134
135 /*
136 * Catch keyboard interrupt signals. On receipt, the "stop requested"
137 * flag is raised, and the original handler is restored (so that, if
138 * we get stuck finishing, a second Ctrl-C will kill the process).
139 */
signalCatcher(int signum)140 static void signalCatcher(int signum)
141 {
142 gStopRequested = true;
143 switch (signum) {
144 case SIGINT:
145 case SIGHUP:
146 sigaction(SIGINT, &gOrigSigactionINT, NULL);
147 sigaction(SIGHUP, &gOrigSigactionHUP, NULL);
148 break;
149 default:
150 abort();
151 break;
152 }
153 }
154
155 /*
156 * Configures signal handlers. The previous handlers are saved.
157 *
158 * If the command is run from an interactive adb shell, we get SIGINT
159 * when Ctrl-C is hit. If we're run from the host, the local adb process
160 * gets the signal, and we get a SIGHUP when the terminal disconnects.
161 */
configureSignals()162 static status_t configureSignals() {
163 struct sigaction act;
164 memset(&act, 0, sizeof(act));
165 act.sa_handler = signalCatcher;
166 if (sigaction(SIGINT, &act, &gOrigSigactionINT) != 0) {
167 status_t err = -errno;
168 fprintf(stderr, "Unable to configure SIGINT handler: %s\n",
169 strerror(errno));
170 return err;
171 }
172 if (sigaction(SIGHUP, &act, &gOrigSigactionHUP) != 0) {
173 status_t err = -errno;
174 fprintf(stderr, "Unable to configure SIGHUP handler: %s\n",
175 strerror(errno));
176 return err;
177 }
178 signal(SIGPIPE, SIG_IGN);
179 return NO_ERROR;
180 }
181
182 /*
183 * Configures and starts the MediaCodec encoder. Obtains an input surface
184 * from the codec.
185 */
prepareEncoder(float displayFps,sp<MediaCodec> * pCodec,sp<IGraphicBufferProducer> * pBufferProducer)186 static status_t prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,
187 sp<IGraphicBufferProducer>* pBufferProducer) {
188 status_t err;
189
190 if (gVerbose) {
191 printf("Configuring recorder for %dx%d %s at %.2fMbps\n",
192 gVideoWidth, gVideoHeight, kMimeTypeAvc, gBitRate / 1000000.0);
193 fflush(stdout);
194 }
195
196 sp<AMessage> format = new AMessage;
197 format->setInt32(KEY_WIDTH, gVideoWidth);
198 format->setInt32(KEY_HEIGHT, gVideoHeight);
199 format->setString(KEY_MIME, kMimeTypeAvc);
200 format->setInt32(KEY_COLOR_FORMAT, OMX_COLOR_FormatAndroidOpaque);
201 format->setInt32(KEY_BIT_RATE, gBitRate);
202 format->setFloat(KEY_FRAME_RATE, displayFps);
203 format->setInt32(KEY_I_FRAME_INTERVAL, 10);
204 format->setInt32(KEY_MAX_B_FRAMES, gBframes);
205 if (gBframes > 0) {
206 format->setInt32(KEY_PROFILE, AVCProfileMain);
207 format->setInt32(KEY_LEVEL, AVCLevel41);
208 }
209
210 sp<android::ALooper> looper = new android::ALooper;
211 looper->setName("screenrecord_looper");
212 looper->start();
213 ALOGV("Creating codec");
214 sp<MediaCodec> codec;
215 if (gCodecName.empty()) {
216 codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
217 if (codec == NULL) {
218 fprintf(stderr, "ERROR: unable to create %s codec instance\n",
219 kMimeTypeAvc);
220 return UNKNOWN_ERROR;
221 }
222 } else {
223 codec = MediaCodec::CreateByComponentName(looper, gCodecName);
224 if (codec == NULL) {
225 fprintf(stderr, "ERROR: unable to create %s codec instance\n",
226 gCodecName.c_str());
227 return UNKNOWN_ERROR;
228 }
229 }
230
231 err = codec->configure(format, NULL, NULL,
232 MediaCodec::CONFIGURE_FLAG_ENCODE);
233 if (err != NO_ERROR) {
234 fprintf(stderr, "ERROR: unable to configure %s codec at %dx%d (err=%d)\n",
235 kMimeTypeAvc, gVideoWidth, gVideoHeight, err);
236 codec->release();
237 return err;
238 }
239
240 ALOGV("Creating encoder input surface");
241 sp<IGraphicBufferProducer> bufferProducer;
242 if (gPersistentSurface) {
243 sp<PersistentSurface> surface = MediaCodec::CreatePersistentInputSurface();
244 bufferProducer = surface->getBufferProducer();
245 err = codec->setInputSurface(surface);
246 } else {
247 err = codec->createInputSurface(&bufferProducer);
248 }
249 if (err != NO_ERROR) {
250 fprintf(stderr,
251 "ERROR: unable to %s encoder input surface (err=%d)\n",
252 gPersistentSurface ? "set" : "create",
253 err);
254 codec->release();
255 return err;
256 }
257
258 ALOGV("Starting codec");
259 err = codec->start();
260 if (err != NO_ERROR) {
261 fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", err);
262 codec->release();
263 return err;
264 }
265
266 ALOGV("Codec prepared");
267 *pCodec = codec;
268 *pBufferProducer = bufferProducer;
269 return 0;
270 }
271
272 /*
273 * Sets the display projection, based on the display dimensions, video size,
274 * and device orientation.
275 */
setDisplayProjection(SurfaceComposerClient::Transaction & t,const sp<IBinder> & dpy,const ui::DisplayState & displayState)276 static status_t setDisplayProjection(
277 SurfaceComposerClient::Transaction& t,
278 const sp<IBinder>& dpy,
279 const ui::DisplayState& displayState) {
280 // Set the region of the layer stack we're interested in, which in our case is "all of it".
281 Rect layerStackRect(displayState.layerStackSpaceRect);
282
283 // We need to preserve the aspect ratio of the display.
284 float displayAspect = layerStackRect.getHeight() / static_cast<float>(layerStackRect.getWidth());
285
286
287 // Set the way we map the output onto the display surface (which will
288 // be e.g. 1280x720 for a 720p video). The rect is interpreted
289 // post-rotation, so if the display is rotated 90 degrees we need to
290 // "pre-rotate" it by flipping width/height, so that the orientation
291 // adjustment changes it back.
292 //
293 // We might want to encode a portrait display as landscape to use more
294 // of the screen real estate. (If players respect a 90-degree rotation
295 // hint, we can essentially get a 720x1280 video instead of 1280x720.)
296 // In that case, we swap the configured video width/height and then
297 // supply a rotation value to the display projection.
298 uint32_t videoWidth, videoHeight;
299 uint32_t outWidth, outHeight;
300 if (!gRotate) {
301 videoWidth = gVideoWidth;
302 videoHeight = gVideoHeight;
303 } else {
304 videoWidth = gVideoHeight;
305 videoHeight = gVideoWidth;
306 }
307 if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
308 // limited by narrow width; reduce height
309 outWidth = videoWidth;
310 outHeight = (uint32_t)(videoWidth * displayAspect);
311 } else {
312 // limited by short height; restrict width
313 outHeight = videoHeight;
314 outWidth = (uint32_t)(videoHeight / displayAspect);
315 }
316 uint32_t offX, offY;
317 offX = (videoWidth - outWidth) / 2;
318 offY = (videoHeight - outHeight) / 2;
319 Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
320
321 if (gVerbose) {
322 if (gRotate) {
323 printf("Rotated content area is %ux%u at offset x=%d y=%d\n",
324 outHeight, outWidth, offY, offX);
325 fflush(stdout);
326 } else {
327 printf("Content area is %ux%u at offset x=%d y=%d\n",
328 outWidth, outHeight, offX, offY);
329 fflush(stdout);
330 }
331 }
332
333 t.setDisplayProjection(dpy,
334 gRotate ? ui::ROTATION_90 : ui::ROTATION_0,
335 layerStackRect, displayRect);
336 return NO_ERROR;
337 }
338
339 /*
340 * Gets the physical id of the display to record. If the user specified a physical
341 * display id, then that id will be set. Otherwise, the default display will be set.
342 */
getPhysicalDisplayId(PhysicalDisplayId & outDisplayId)343 static status_t getPhysicalDisplayId(PhysicalDisplayId& outDisplayId) {
344 if (gPhysicalDisplayId) {
345 outDisplayId = *gPhysicalDisplayId;
346 return NO_ERROR;
347 }
348
349 const std::vector<PhysicalDisplayId> ids = SurfaceComposerClient::getPhysicalDisplayIds();
350 if (ids.empty()) {
351 return INVALID_OPERATION;
352 }
353 outDisplayId = ids.front();
354 return NO_ERROR;
355 }
356
357 /*
358 * Configures the virtual display. When this completes, virtual display
359 * frames will start arriving from the buffer producer.
360 */
prepareVirtualDisplay(const ui::DisplayState & displayState,const sp<IGraphicBufferProducer> & bufferProducer,sp<IBinder> * pDisplayHandle,sp<SurfaceControl> * mirrorRoot)361 static status_t prepareVirtualDisplay(
362 const ui::DisplayState& displayState,
363 const sp<IGraphicBufferProducer>& bufferProducer,
364 sp<IBinder>* pDisplayHandle, sp<SurfaceControl>* mirrorRoot) {
365 std::string displayName = gPhysicalDisplayId
366 ? "ScreenRecorder " + to_string(*gPhysicalDisplayId)
367 : "ScreenRecorder";
368 static const std::string kDisplayName(displayName);
369
370 sp<IBinder> dpy = SurfaceComposerClient::createVirtualDisplay(kDisplayName, gSecureDisplay);
371 SurfaceComposerClient::Transaction t;
372 t.setDisplaySurface(dpy, bufferProducer);
373 setDisplayProjection(t, dpy, displayState);
374
375 // ensures that random layer stack assigned to virtual display changes
376 // between calls - if a list of displays with their layer stacks becomes
377 // available, we should use it to ensure a new layer stack is used here
378 std::srand(
379 std::chrono::duration_cast<std::chrono::milliseconds>(
380 std::chrono::system_clock::now().time_since_epoch()
381 ).count());
382 ui::LayerStack layerStack = ui::LayerStack::fromValue(std::rand());
383 t.setDisplayLayerStack(dpy, layerStack);
384
385 PhysicalDisplayId displayId;
386 status_t err = getPhysicalDisplayId(displayId);
387 if (err != NO_ERROR) {
388 return err;
389 }
390 *mirrorRoot = SurfaceComposerClient::getDefault()->mirrorDisplay(displayId);
391 if (*mirrorRoot == nullptr) {
392 ALOGE("Failed to create a mirror for screenrecord");
393 return UNKNOWN_ERROR;
394 }
395 t.setLayerStack(*mirrorRoot, layerStack);
396 t.apply();
397
398 *pDisplayHandle = dpy;
399
400 return NO_ERROR;
401 }
402
403 /*
404 * Writes an unsigned/signed integer byte-by-byte in little endian order regardless
405 * of the platform endianness.
406 */
407 template <typename T>
writeValueLE(T value,uint8_t * buffer)408 static void writeValueLE(T value, uint8_t* buffer) {
409 std::remove_const_t<T> temp = value;
410 for (int i = 0; i < sizeof(T); ++i) {
411 buffer[i] = static_cast<std::uint8_t>(temp & 0xff);
412 temp >>= 8;
413 }
414 }
415
416 /*
417 * Saves frames presentation time relative to the elapsed realtime clock in microseconds
418 * preceded by a Winscope magic string and frame count to a metadata track.
419 * This metadata is used by the Winscope tool to sync video with SurfaceFlinger
420 * and WindowManager traces.
421 *
422 * The metadata is written as a binary array as follows:
423 * - winscope magic string (kWinscopeMagicString constant), without trailing null char,
424 * - the number of recorded frames (as little endian uint32),
425 * - for every frame its presentation time relative to the elapsed realtime clock in microseconds
426 * (as little endian uint64).
427 */
writeWinscopeMetadataLegacy(const Vector<int64_t> & timestamps,const ssize_t metaTrackIdx,AMediaMuxer * muxer)428 static status_t writeWinscopeMetadataLegacy(const Vector<int64_t>& timestamps,
429 const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
430 static constexpr auto kWinscopeMagicStringLegacy = "#VV1NSC0PET1ME!#";
431
432 ALOGV("Writing winscope metadata legacy");
433 int64_t systemTimeToElapsedTimeOffsetMicros = (android::elapsedRealtimeNano()
434 - systemTime(SYSTEM_TIME_MONOTONIC)) / 1000;
435 sp<ABuffer> buffer = new ABuffer(timestamps.size() * sizeof(int64_t)
436 + sizeof(uint32_t) + strlen(kWinscopeMagicStringLegacy));
437 uint8_t* pos = buffer->data();
438 strcpy(reinterpret_cast<char*>(pos), kWinscopeMagicStringLegacy);
439 pos += strlen(kWinscopeMagicStringLegacy);
440 writeValueLE<uint32_t>(timestamps.size(), pos);
441 pos += sizeof(uint32_t);
442 for (size_t idx = 0; idx < timestamps.size(); ++idx) {
443 writeValueLE<uint64_t>(static_cast<uint64_t>(timestamps[idx]
444 + systemTimeToElapsedTimeOffsetMicros), pos);
445 pos += sizeof(uint64_t);
446 }
447 AMediaCodecBufferInfo bufferInfo = {
448 0 /* offset */,
449 static_cast<int32_t>(buffer->size()),
450 timestamps[0] /* presentationTimeUs */,
451 0 /* flags */
452 };
453 return AMediaMuxer_writeSampleData(muxer, metaTrackIdx, buffer->data(), &bufferInfo);
454 }
455
456 /*
457 * Saves metadata needed by Winscope to synchronize the screen recording playback with other traces.
458 *
459 * The metadata (version 2) is written as a binary array with the following format:
460 * - winscope magic string (#VV1NSC0PET1ME2#, 16B).
461 * - the metadata version number (4B little endian).
462 * - Realtime-to-elapsed time offset in nanoseconds (8B little endian).
463 * - the recorded frames count (8B little endian)
464 * - for each recorded frame:
465 * - System time in elapsed clock timebase in nanoseconds (8B little endian).
466 *
467 *
468 * Metadata version 2 changes
469 *
470 * Use elapsed time for compatibility with other UI traces (most of them):
471 * - Realtime-to-elapsed time offset (instead of realtime-to-monotonic)
472 * - Frame timestamps in elapsed clock timebase (instead of monotonic)
473 */
writeWinscopeMetadata(const Vector<std::int64_t> & timestampsMonotonicUs,const ssize_t metaTrackIdx,AMediaMuxer * muxer)474 static status_t writeWinscopeMetadata(const Vector<std::int64_t>& timestampsMonotonicUs,
475 const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
476 ALOGV("Writing winscope metadata");
477
478 static constexpr auto kWinscopeMagicString = std::string_view {"#VV1NSC0PET1ME2#"};
479 static constexpr std::uint32_t metadataVersion = 2;
480
481 const auto elapsedTimeNs = android::elapsedRealtimeNano();
482 const std::int64_t elapsedToMonotonicTimeOffsetNs =
483 elapsedTimeNs - systemTime(SYSTEM_TIME_MONOTONIC);
484 const std::int64_t realToElapsedTimeOffsetNs =
485 systemTime(SYSTEM_TIME_REALTIME) - elapsedTimeNs;
486 const std::uint32_t framesCount = static_cast<std::uint32_t>(timestampsMonotonicUs.size());
487
488 sp<ABuffer> buffer = new ABuffer(
489 kWinscopeMagicString.size() +
490 sizeof(decltype(metadataVersion)) +
491 sizeof(decltype(realToElapsedTimeOffsetNs)) +
492 sizeof(decltype(framesCount)) +
493 framesCount * sizeof(std::uint64_t)
494 );
495 std::uint8_t* pos = buffer->data();
496
497 std::copy(kWinscopeMagicString.cbegin(), kWinscopeMagicString.cend(), pos);
498 pos += kWinscopeMagicString.size();
499
500 writeValueLE(metadataVersion, pos);
501 pos += sizeof(decltype(metadataVersion));
502
503 writeValueLE(realToElapsedTimeOffsetNs, pos);
504 pos += sizeof(decltype(realToElapsedTimeOffsetNs));
505
506 writeValueLE(framesCount, pos);
507 pos += sizeof(decltype(framesCount));
508
509 for (const auto timestampMonotonicUs : timestampsMonotonicUs) {
510 const auto timestampElapsedNs =
511 elapsedToMonotonicTimeOffsetNs + timestampMonotonicUs * 1000;
512 writeValueLE<std::uint64_t>(timestampElapsedNs, pos);
513 pos += sizeof(std::uint64_t);
514 }
515
516 AMediaCodecBufferInfo bufferInfo = {
517 0 /* offset */,
518 static_cast<std::int32_t>(buffer->size()),
519 timestampsMonotonicUs[0] /* presentationTimeUs */,
520 0 /* flags */
521 };
522 return AMediaMuxer_writeSampleData(muxer, metaTrackIdx, buffer->data(), &bufferInfo);
523 }
524
525 /*
526 * Update the display projection if size or orientation have changed.
527 */
updateDisplayProjection(const sp<IBinder> & virtualDpy,ui::DisplayState & displayState)528 void updateDisplayProjection(const sp<IBinder>& virtualDpy, ui::DisplayState& displayState) {
529 ATRACE_NAME("updateDisplayProjection");
530
531 PhysicalDisplayId displayId;
532 if (getPhysicalDisplayId(displayId) != NO_ERROR) {
533 fprintf(stderr, "ERROR: Failed to get display id\n");
534 return;
535 }
536
537 sp<IBinder> displayToken = SurfaceComposerClient::getPhysicalDisplayToken(displayId);
538 if (!displayToken) {
539 fprintf(stderr, "ERROR: failed to get display token\n");
540 return;
541 }
542
543 ui::DisplayState currentDisplayState;
544 if (SurfaceComposerClient::getDisplayState(displayToken, ¤tDisplayState) != NO_ERROR) {
545 ALOGW("ERROR: failed to get display state\n");
546 return;
547 }
548
549 if (currentDisplayState.orientation != displayState.orientation ||
550 currentDisplayState.layerStackSpaceRect != displayState.layerStackSpaceRect) {
551 displayState = currentDisplayState;
552 ALOGD("display state changed, now has orientation %s, size (%d, %d)",
553 toCString(displayState.orientation), displayState.layerStackSpaceRect.getWidth(),
554 displayState.layerStackSpaceRect.getHeight());
555
556 SurfaceComposerClient::Transaction t;
557 setDisplayProjection(t, virtualDpy, currentDisplayState);
558 t.apply();
559 }
560 }
561
562 /*
563 * Runs the MediaCodec encoder, sending the output to the MediaMuxer. The
564 * input frames are coming from the virtual display as fast as SurfaceFlinger
565 * wants to send them.
566 *
567 * Exactly one of muxer or rawFp must be non-null.
568 *
569 * The muxer must *not* have been started before calling.
570 */
runEncoder(const sp<MediaCodec> & encoder,AMediaMuxer * muxer,FILE * rawFp,const sp<IBinder> & virtualDpy,ui::DisplayState displayState)571 static status_t runEncoder(const sp<MediaCodec>& encoder, AMediaMuxer* muxer, FILE* rawFp,
572 const sp<IBinder>& virtualDpy, ui::DisplayState displayState) {
573 static int kTimeout = 250000; // be responsive on signal
574 status_t err;
575 ssize_t trackIdx = -1;
576 ssize_t metaLegacyTrackIdx = -1;
577 ssize_t metaTrackIdx = -1;
578 uint32_t debugNumFrames = 0;
579 int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
580 int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
581 Vector<int64_t> timestampsMonotonicUs;
582 bool firstFrame = true;
583
584 assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
585
586 Vector<sp<MediaCodecBuffer> > buffers;
587 err = encoder->getOutputBuffers(&buffers);
588 if (err != NO_ERROR) {
589 fprintf(stderr, "Unable to get output buffers (err=%d)\n", err);
590 return err;
591 }
592
593 // Run until we're signaled.
594 while (!gStopRequested) {
595 size_t bufIndex, offset, size;
596 int64_t ptsUsec;
597 uint32_t flags;
598
599 if (firstFrame) {
600 ATRACE_NAME("first_frame");
601 firstFrame = false;
602 }
603
604 if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
605 if (gVerbose) {
606 printf("Time limit reached\n");
607 fflush(stdout);
608 }
609 break;
610 }
611
612 ALOGV("Calling dequeueOutputBuffer");
613 err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec,
614 &flags, kTimeout);
615 ALOGV("dequeueOutputBuffer returned %d", err);
616 switch (err) {
617 case NO_ERROR:
618 // got a buffer
619 if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
620 ALOGV("Got codec config buffer (%zu bytes)", size);
621 if (muxer != NULL) {
622 // ignore this -- we passed the CSD into MediaMuxer when
623 // we got the format change notification
624 size = 0;
625 }
626 }
627 if (size != 0) {
628 ALOGV("Got data in buffer %zu, size=%zu, pts=%" PRId64,
629 bufIndex, size, ptsUsec);
630
631 updateDisplayProjection(virtualDpy, displayState);
632
633 // If the virtual display isn't providing us with timestamps,
634 // use the current time. This isn't great -- we could get
635 // decoded data in clusters -- but we're not expecting
636 // to hit this anyway.
637 if (ptsUsec == 0) {
638 ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
639 }
640
641 if (muxer == NULL) {
642 fwrite(buffers[bufIndex]->data(), 1, size, rawFp);
643 // Flush the data immediately in case we're streaming.
644 // We don't want to do this if all we've written is
645 // the SPS/PPS data because mplayer gets confused.
646 if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0) {
647 fflush(rawFp);
648 }
649 } else {
650 // The MediaMuxer docs are unclear, but it appears that we
651 // need to pass either the full set of BufferInfo flags, or
652 // (flags & BUFFER_FLAG_SYNCFRAME).
653 //
654 // If this blocks for too long we could drop frames. We may
655 // want to queue these up and do them on a different thread.
656 ATRACE_NAME("write sample");
657 assert(trackIdx != -1);
658 // TODO
659 sp<ABuffer> buffer = new ABuffer(
660 buffers[bufIndex]->data(), buffers[bufIndex]->size());
661 AMediaCodecBufferInfo bufferInfo = {
662 0 /* offset */,
663 static_cast<int32_t>(buffer->size()),
664 ptsUsec /* presentationTimeUs */,
665 flags
666 };
667 err = AMediaMuxer_writeSampleData(muxer, trackIdx, buffer->data(), &bufferInfo);
668 if (err != NO_ERROR) {
669 fprintf(stderr,
670 "Failed writing data to muxer (err=%d)\n", err);
671 return err;
672 }
673 if (gOutputFormat == FORMAT_MP4) {
674 timestampsMonotonicUs.add(ptsUsec);
675 }
676 }
677 debugNumFrames++;
678 }
679 err = encoder->releaseOutputBuffer(bufIndex);
680 if (err != NO_ERROR) {
681 fprintf(stderr, "Unable to release output buffer (err=%d)\n",
682 err);
683 return err;
684 }
685 if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
686 // Not expecting EOS from SurfaceFlinger. Go with it.
687 ALOGI("Received end-of-stream");
688 gStopRequested = true;
689 }
690 break;
691 case -EAGAIN: // INFO_TRY_AGAIN_LATER
692 ALOGV("Got -EAGAIN, looping");
693 break;
694 case android::INFO_FORMAT_CHANGED: // INFO_OUTPUT_FORMAT_CHANGED
695 {
696 // Format includes CSD, which we must provide to muxer.
697 ALOGV("Encoder format changed");
698 sp<AMessage> newFormat;
699 encoder->getOutputFormat(&newFormat);
700 // TODO remove when MediaCodec has been replaced with AMediaCodec
701 AMediaFormat *ndkFormat = AMediaFormat_fromMsg(&newFormat);
702 if (muxer != NULL) {
703 trackIdx = AMediaMuxer_addTrack(muxer, ndkFormat);
704 if (gOutputFormat == FORMAT_MP4) {
705 AMediaFormat *metaFormat = AMediaFormat_new();
706 AMediaFormat_setString(metaFormat, AMEDIAFORMAT_KEY_MIME, kMimeTypeApplicationOctetstream);
707 metaLegacyTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
708 metaTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
709 AMediaFormat_delete(metaFormat);
710 }
711 ALOGV("Starting muxer");
712 err = AMediaMuxer_start(muxer);
713 if (err != NO_ERROR) {
714 fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
715 return err;
716 }
717 }
718 }
719 break;
720 case android::INFO_OUTPUT_BUFFERS_CHANGED: // INFO_OUTPUT_BUFFERS_CHANGED
721 // Not expected for an encoder; handle it anyway.
722 ALOGV("Encoder buffers changed");
723 err = encoder->getOutputBuffers(&buffers);
724 if (err != NO_ERROR) {
725 fprintf(stderr,
726 "Unable to get new output buffers (err=%d)\n", err);
727 return err;
728 }
729 break;
730 case INVALID_OPERATION:
731 ALOGW("dequeueOutputBuffer returned INVALID_OPERATION");
732 return err;
733 default:
734 fprintf(stderr,
735 "Got weird result %d from dequeueOutputBuffer\n", err);
736 return err;
737 }
738 }
739
740 ALOGV("Encoder stopping (req=%d)", gStopRequested);
741 if (gVerbose) {
742 printf("Encoder stopping; recorded %u frames in %" PRId64 " seconds\n",
743 debugNumFrames, nanoseconds_to_seconds(
744 systemTime(CLOCK_MONOTONIC) - startWhenNsec));
745 fflush(stdout);
746 }
747 if (metaLegacyTrackIdx >= 0 && metaTrackIdx >= 0 && !timestampsMonotonicUs.isEmpty()) {
748 err = writeWinscopeMetadataLegacy(timestampsMonotonicUs, metaLegacyTrackIdx, muxer);
749 if (err != NO_ERROR) {
750 fprintf(stderr, "Failed writing legacy winscope metadata to muxer (err=%d)\n", err);
751 return err;
752 }
753
754 err = writeWinscopeMetadata(timestampsMonotonicUs, metaTrackIdx, muxer);
755 if (err != NO_ERROR) {
756 fprintf(stderr, "Failed writing winscope metadata to muxer (err=%d)\n", err);
757 return err;
758 }
759 }
760 return NO_ERROR;
761 }
762
763 /*
764 * Raw H.264 byte stream output requested. Send the output to stdout
765 * if desired. If the output is a tty, reconfigure it to avoid the
766 * CRLF line termination that we see with "adb shell" commands.
767 */
prepareRawOutput(const char * fileName)768 static FILE* prepareRawOutput(const char* fileName) {
769 FILE* rawFp = NULL;
770
771 if (strcmp(fileName, "-") == 0) {
772 if (gVerbose) {
773 fprintf(stderr, "ERROR: verbose output and '-' not compatible");
774 return NULL;
775 }
776 rawFp = stdout;
777 } else {
778 rawFp = fopen(fileName, "w");
779 if (rawFp == NULL) {
780 fprintf(stderr, "fopen raw failed: %s\n", strerror(errno));
781 return NULL;
782 }
783 }
784
785 int fd = fileno(rawFp);
786 if (isatty(fd)) {
787 // best effort -- reconfigure tty for "raw"
788 ALOGD("raw video output to tty (fd=%d)", fd);
789 struct termios term;
790 if (tcgetattr(fd, &term) == 0) {
791 cfmakeraw(&term);
792 if (tcsetattr(fd, TCSANOW, &term) == 0) {
793 ALOGD("tty successfully configured for raw");
794 }
795 }
796 }
797
798 return rawFp;
799 }
800
floorToEven(uint32_t num)801 static inline uint32_t floorToEven(uint32_t num) {
802 return num & ~1;
803 }
804
805 struct RecordingData {
806 sp<MediaCodec> encoder;
807 // Configure virtual display.
808 sp<IBinder> dpy;
809
810 sp<Overlay> overlay;
811
~RecordingDataRecordingData812 ~RecordingData() {
813 if (dpy != nullptr) SurfaceComposerClient::destroyVirtualDisplay(dpy);
814 if (overlay != nullptr) overlay->stop();
815 if (encoder != nullptr) {
816 encoder->stop();
817 encoder->release();
818 }
819 }
820 };
821
822 /*
823 * Computes the maximum width and height across all physical displays.
824 */
getMaxDisplaySize()825 static ui::Size getMaxDisplaySize() {
826 const std::vector<PhysicalDisplayId> physicalDisplayIds =
827 SurfaceComposerClient::getPhysicalDisplayIds();
828 if (physicalDisplayIds.empty()) {
829 fprintf(stderr, "ERROR: Failed to get physical display ids\n");
830 return {};
831 }
832
833 ui::Size result;
834 for (auto& displayId : physicalDisplayIds) {
835 sp<IBinder> displayToken = SurfaceComposerClient::getPhysicalDisplayToken(displayId);
836 if (!displayToken) {
837 fprintf(stderr, "ERROR: failed to get display token\n");
838 continue;
839 }
840
841 ui::DisplayState displayState;
842 status_t err = SurfaceComposerClient::getDisplayState(displayToken, &displayState);
843 if (err != NO_ERROR) {
844 fprintf(stderr, "ERROR: failed to get display state\n");
845 continue;
846 }
847
848 result.height = std::max(result.height, displayState.layerStackSpaceRect.getHeight());
849 result.width = std::max(result.width, displayState.layerStackSpaceRect.getWidth());
850 }
851 return result;
852 }
853
854 /*
855 * Main "do work" start point.
856 *
857 * Configures codec, muxer, and virtual display, then starts moving bits
858 * around.
859 */
recordScreen(const char * fileName)860 static status_t recordScreen(const char* fileName) {
861 status_t err;
862
863 // Configure signal handler.
864 err = configureSignals();
865 if (err != NO_ERROR) return err;
866
867 // Start Binder thread pool. MediaCodec needs to be able to receive
868 // messages from mediaserver.
869 sp<ProcessState> self = ProcessState::self();
870 self->startThreadPool();
871
872 PhysicalDisplayId displayId;
873 err = getPhysicalDisplayId(displayId);
874 if (err != NO_ERROR) {
875 fprintf(stderr, "ERROR: Failed to get display id\n");
876 return err;
877 }
878
879 // Get main display parameters.
880 sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(displayId);
881 if (display == nullptr) {
882 fprintf(stderr, "ERROR: no display\n");
883 return NAME_NOT_FOUND;
884 }
885
886 DisplayMode displayMode;
887 err = SurfaceComposerClient::getActiveDisplayMode(display, &displayMode);
888 if (err != NO_ERROR) {
889 fprintf(stderr, "ERROR: unable to get display config\n");
890 return err;
891 }
892
893 ui::DisplayState displayState;
894 err = SurfaceComposerClient::getDisplayState(display, &displayState);
895 if (err != NO_ERROR) {
896 fprintf(stderr, "ERROR: unable to get display state\n");
897 return err;
898 }
899
900 if (displayState.layerStack == ui::INVALID_LAYER_STACK) {
901 fprintf(stderr, "ERROR: INVALID_LAYER_STACK, please check your display state.\n");
902 return INVALID_OPERATION;
903 }
904
905 const ui::Size layerStackSpaceRect =
906 gPhysicalDisplayId ? displayState.layerStackSpaceRect : getMaxDisplaySize();
907 if (gVerbose) {
908 printf("Display is %dx%d @%.2ffps (orientation=%s), layerStack=%u\n",
909 layerStackSpaceRect.getWidth(), layerStackSpaceRect.getHeight(),
910 displayMode.peakRefreshRate, toCString(displayState.orientation),
911 displayState.layerStack.id);
912 fflush(stdout);
913 }
914
915 // Encoder can't take odd number as config
916 if (gVideoWidth == 0) {
917 gVideoWidth = floorToEven(layerStackSpaceRect.getWidth());
918 }
919 if (gVideoHeight == 0) {
920 gVideoHeight = floorToEven(layerStackSpaceRect.getHeight());
921 }
922
923 RecordingData recordingData = RecordingData();
924 // Configure and start the encoder.
925 sp<FrameOutput> frameOutput;
926 sp<IGraphicBufferProducer> encoderInputSurface;
927 if (gOutputFormat != FORMAT_FRAMES && gOutputFormat != FORMAT_RAW_FRAMES) {
928 err = prepareEncoder(displayMode.peakRefreshRate, &recordingData.encoder,
929 &encoderInputSurface);
930
931 if (err != NO_ERROR && !gSizeSpecified) {
932 // fallback is defined for landscape; swap if we're in portrait
933 bool needSwap = gVideoWidth < gVideoHeight;
934 uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
935 uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
936 if (gVideoWidth != newWidth && gVideoHeight != newHeight) {
937 ALOGV("Retrying with 720p");
938 fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
939 gVideoWidth, gVideoHeight, newWidth, newHeight);
940 gVideoWidth = newWidth;
941 gVideoHeight = newHeight;
942 err = prepareEncoder(displayMode.peakRefreshRate, &recordingData.encoder,
943 &encoderInputSurface);
944 }
945 }
946 if (err != NO_ERROR) return err;
947
948 // From here on, we must explicitly release() the encoder before it goes
949 // out of scope, or we will get an assertion failure from stagefright
950 // later on in a different thread.
951 } else {
952 // We're not using an encoder at all. The "encoder input surface" we hand to
953 // SurfaceFlinger will just feed directly to us.
954 frameOutput = new FrameOutput();
955 err = frameOutput->createInputSurface(gVideoWidth, gVideoHeight, &encoderInputSurface);
956 if (err != NO_ERROR) {
957 return err;
958 }
959 }
960
961 // Draw the "info" page by rendering a frame with GLES and sending
962 // it directly to the encoder.
963 // TODO: consider displaying this as a regular layer to avoid b/11697754
964 if (gWantInfoScreen) {
965 Overlay::drawInfoPage(encoderInputSurface);
966 }
967
968 // Configure optional overlay.
969 sp<IGraphicBufferProducer> bufferProducer;
970 if (gWantFrameTime) {
971 // Send virtual display frames to an external texture.
972 recordingData.overlay = new Overlay(gMonotonicTime);
973 err = recordingData.overlay->start(encoderInputSurface, &bufferProducer);
974 if (err != NO_ERROR) {
975 return err;
976 }
977 if (gVerbose) {
978 printf("Bugreport overlay created\n");
979 fflush(stdout);
980 }
981 } else {
982 // Use the encoder's input surface as the virtual display surface.
983 bufferProducer = encoderInputSurface;
984 }
985
986 // We need to hold a reference to mirrorRoot during the entire recording to ensure it's not
987 // cleaned up by SurfaceFlinger. When the reference is dropped, SurfaceFlinger will delete
988 // the resource.
989 sp<SurfaceControl> mirrorRoot;
990 // Configure virtual display.
991 err = prepareVirtualDisplay(displayState, bufferProducer, &recordingData.dpy, &mirrorRoot);
992 if (err != NO_ERROR) {
993 return err;
994 }
995
996 AMediaMuxer *muxer = nullptr;
997 FILE* rawFp = NULL;
998 switch (gOutputFormat) {
999 case FORMAT_MP4:
1000 case FORMAT_WEBM:
1001 case FORMAT_3GPP: {
1002 // Configure muxer. We have to wait for the CSD blob from the encoder
1003 // before we can start it.
1004 err = unlink(fileName);
1005 if (err != 0 && errno != ENOENT) {
1006 fprintf(stderr, "ERROR: couldn't remove existing file\n");
1007 abort();
1008 }
1009 int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
1010 if (fd < 0) {
1011 fprintf(stderr, "ERROR: couldn't open file\n");
1012 abort();
1013 }
1014 if (gOutputFormat == FORMAT_MP4) {
1015 muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4);
1016 } else if (gOutputFormat == FORMAT_WEBM) {
1017 muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_WEBM);
1018 } else {
1019 muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_THREE_GPP);
1020 }
1021 close(fd);
1022 if (gRotate) {
1023 AMediaMuxer_setOrientationHint(muxer, 90); // TODO: does this do anything?
1024 }
1025 break;
1026 }
1027 case FORMAT_H264:
1028 case FORMAT_FRAMES:
1029 case FORMAT_RAW_FRAMES: {
1030 rawFp = prepareRawOutput(fileName);
1031 if (rawFp == NULL) {
1032 return -1;
1033 }
1034 break;
1035 }
1036 default:
1037 fprintf(stderr, "ERROR: unknown format %d\n", gOutputFormat);
1038 abort();
1039 }
1040
1041 if (gOutputFormat == FORMAT_FRAMES || gOutputFormat == FORMAT_RAW_FRAMES) {
1042 // TODO: if we want to make this a proper feature, we should output
1043 // an outer header with version info. Right now we never change
1044 // the frame size or format, so we could conceivably just send
1045 // the current frame header once and then follow it with an
1046 // unbroken stream of data.
1047
1048 // Make the EGL context current again. This gets unhooked if we're
1049 // using "--bugreport" mode.
1050 // TODO: figure out if we can eliminate this
1051 frameOutput->prepareToCopy();
1052
1053 while (!gStopRequested) {
1054 // Poll for frames, the same way we do for MediaCodec. We do
1055 // all of the work on the main thread.
1056 //
1057 // Ideally we'd sleep indefinitely and wake when the
1058 // stop was requested, but this will do for now. (It almost
1059 // works because wait() wakes when a signal hits, but we
1060 // need to handle the edge cases.)
1061 bool rawFrames = gOutputFormat == FORMAT_RAW_FRAMES;
1062 err = frameOutput->copyFrame(rawFp, 250000, rawFrames);
1063 if (err == ETIMEDOUT) {
1064 err = NO_ERROR;
1065 } else if (err != NO_ERROR) {
1066 ALOGE("Got error %d from copyFrame()", err);
1067 break;
1068 }
1069 }
1070 } else {
1071 // Main encoder loop.
1072 err = runEncoder(recordingData.encoder, muxer, rawFp, recordingData.dpy, displayState);
1073 if (err != NO_ERROR) {
1074 fprintf(stderr, "Encoder failed (err=%d)\n", err);
1075 // fall through to cleanup
1076 }
1077
1078 if (gVerbose) {
1079 printf("Stopping encoder and muxer\n");
1080 fflush(stdout);
1081 }
1082 }
1083
1084 // Shut everything down, starting with the producer side.
1085 encoderInputSurface = NULL;
1086 if (muxer != NULL) {
1087 // If we don't stop muxer explicitly, i.e. let the destructor run,
1088 // it may hang (b/11050628).
1089 err = AMediaMuxer_stop(muxer);
1090 } else if (rawFp != stdout) {
1091 fclose(rawFp);
1092 }
1093
1094 return err;
1095 }
1096
1097 /*
1098 * Sends a broadcast to the media scanner to tell it about the new video.
1099 *
1100 * This is optional, but nice to have.
1101 */
notifyMediaScanner(const char * fileName)1102 static status_t notifyMediaScanner(const char* fileName) {
1103 // need to do allocations before the fork()
1104 String8 fileUrl("file://");
1105 fileUrl.append(fileName);
1106
1107 const char* kCommand = "/system/bin/am";
1108 const char* const argv[] = {
1109 kCommand,
1110 "broadcast",
1111 "-a",
1112 "android.intent.action.MEDIA_SCANNER_SCAN_FILE",
1113 "-d",
1114 fileUrl.c_str(),
1115 NULL
1116 };
1117 if (gVerbose) {
1118 printf("Executing:");
1119 for (int i = 0; argv[i] != NULL; i++) {
1120 printf(" %s", argv[i]);
1121 }
1122 putchar('\n');
1123 fflush(stdout);
1124 }
1125
1126 pid_t pid = fork();
1127 if (pid < 0) {
1128 int err = errno;
1129 ALOGW("fork() failed: %s", strerror(err));
1130 return -err;
1131 } else if (pid > 0) {
1132 // parent; wait for the child, mostly to make the verbose-mode output
1133 // look right, but also to check for and log failures
1134 int status;
1135 pid_t actualPid = TEMP_FAILURE_RETRY(waitpid(pid, &status, 0));
1136 if (actualPid != pid) {
1137 ALOGW("waitpid(%d) returned %d (errno=%d)", pid, actualPid, errno);
1138 } else if (status != 0) {
1139 ALOGW("'am broadcast' exited with status=%d", status);
1140 } else {
1141 ALOGV("'am broadcast' exited successfully");
1142 }
1143 } else {
1144 if (!gVerbose) {
1145 // non-verbose, suppress 'am' output
1146 ALOGV("closing stdout/stderr in child");
1147 int fd = open("/dev/null", O_WRONLY);
1148 if (fd >= 0) {
1149 dup2(fd, STDOUT_FILENO);
1150 dup2(fd, STDERR_FILENO);
1151 close(fd);
1152 }
1153 }
1154 execv(kCommand, const_cast<char* const*>(argv));
1155 ALOGE("execv(%s) failed: %s\n", kCommand, strerror(errno));
1156 exit(1);
1157 }
1158 return NO_ERROR;
1159 }
1160
1161 /*
1162 * Parses a string of the form "1280x720".
1163 *
1164 * Returns true on success.
1165 */
parseWidthHeight(const char * widthHeight,uint32_t * pWidth,uint32_t * pHeight)1166 static bool parseWidthHeight(const char* widthHeight, uint32_t* pWidth,
1167 uint32_t* pHeight) {
1168 long width, height;
1169 char* end;
1170
1171 // Must specify base 10, or "0x0" gets parsed differently.
1172 width = strtol(widthHeight, &end, 10);
1173 if (end == widthHeight || *end != 'x' || *(end+1) == '\0') {
1174 // invalid chars in width, or missing 'x', or missing height
1175 return false;
1176 }
1177 height = strtol(end + 1, &end, 10);
1178 if (*end != '\0') {
1179 // invalid chars in height
1180 return false;
1181 }
1182
1183 *pWidth = width;
1184 *pHeight = height;
1185 return true;
1186 }
1187
1188 /*
1189 * Accepts a string with a bare number ("4000000") or with a single-character
1190 * unit ("4m").
1191 *
1192 * Returns an error if parsing fails.
1193 */
parseValueWithUnit(const char * str,uint32_t * pValue)1194 static status_t parseValueWithUnit(const char* str, uint32_t* pValue) {
1195 long value;
1196 char* endptr;
1197
1198 value = strtol(str, &endptr, 10);
1199 if (*endptr == '\0') {
1200 // bare number
1201 *pValue = value;
1202 return NO_ERROR;
1203 } else if (toupper(*endptr) == 'M' && *(endptr+1) == '\0') {
1204 *pValue = value * 1000000; // check for overflow?
1205 return NO_ERROR;
1206 } else {
1207 fprintf(stderr, "Unrecognized value: %s\n", str);
1208 return UNKNOWN_ERROR;
1209 }
1210 }
1211
1212 /*
1213 * Dumps usage on stderr.
1214 */
usage()1215 static void usage() {
1216 fprintf(stderr,
1217 "Usage: screenrecord [options] <filename>\n"
1218 "\n"
1219 "Android screenrecord v%d.%d. Records the device's display to a .mp4 file.\n"
1220 "\n"
1221 "Options:\n"
1222 "--size WIDTHxHEIGHT\n"
1223 " Set the video size, e.g. \"1280x720\". Default is the device's main\n"
1224 " display resolution (if supported), 1280x720 if not. For best results,\n"
1225 " use a size supported by the AVC encoder.\n"
1226 "--bit-rate RATE\n"
1227 " Set the video bit rate, in bits per second. Value may be specified as\n"
1228 " bits or megabits, e.g. '4000000' is equivalent to '4M'. Default %dMbps.\n"
1229 "--bugreport\n"
1230 " Add additional information, such as a timestamp overlay, that is helpful\n"
1231 " in videos captured to illustrate bugs.\n"
1232 "--time-limit TIME\n"
1233 " Set the maximum recording time, in seconds. Default is %d. Set to 0\n"
1234 " to remove the time limit.\n"
1235 "--display-id ID\n"
1236 " specify the physical display ID to record. Default is the primary display.\n"
1237 " see \"dumpsys SurfaceFlinger --display-id\" for valid display IDs.\n"
1238 "--verbose\n"
1239 " Display interesting information on stdout.\n"
1240 "--version\n"
1241 " Show Android screenrecord version.\n"
1242 "--help\n"
1243 " Show this message.\n"
1244 "\n"
1245 "Recording continues until Ctrl-C is hit or the time limit is reached.\n"
1246 "\n",
1247 kVersionMajor, kVersionMinor, gBitRate / 1000000, gTimeLimitSec
1248 );
1249 }
1250
1251 /*
1252 * Parses args and kicks things off.
1253 */
main(int argc,char * const argv[])1254 int main(int argc, char* const argv[]) {
1255 static const struct option longOptions[] = {
1256 { "help", no_argument, NULL, 'h' },
1257 { "verbose", no_argument, NULL, 'v' },
1258 { "size", required_argument, NULL, 's' },
1259 { "bit-rate", required_argument, NULL, 'b' },
1260 { "time-limit", required_argument, NULL, 't' },
1261 { "bugreport", no_argument, NULL, 'u' },
1262 // "unofficial" options
1263 { "show-device-info", no_argument, NULL, 'i' },
1264 { "show-frame-time", no_argument, NULL, 'f' },
1265 { "rotate", no_argument, NULL, 'r' },
1266 { "output-format", required_argument, NULL, 'o' },
1267 { "codec-name", required_argument, NULL, 'N' },
1268 { "monotonic-time", no_argument, NULL, 'm' },
1269 { "persistent-surface", no_argument, NULL, 'p' },
1270 { "bframes", required_argument, NULL, 'B' },
1271 { "display-id", required_argument, NULL, 'd' },
1272 { "capture-secure", no_argument, NULL, 'S' },
1273 { "version", no_argument, NULL, 'x' },
1274 { NULL, 0, NULL, 0 }
1275 };
1276
1277 while (true) {
1278 int optionIndex = 0;
1279 int ic = getopt_long(argc, argv, "", longOptions, &optionIndex);
1280 if (ic == -1) {
1281 break;
1282 }
1283
1284 switch (ic) {
1285 case 'h':
1286 usage();
1287 return 0;
1288 case 'v':
1289 gVerbose = true;
1290 break;
1291 case 's':
1292 if (!parseWidthHeight(optarg, &gVideoWidth, &gVideoHeight)) {
1293 fprintf(stderr, "Invalid size '%s', must be width x height\n",
1294 optarg);
1295 return 2;
1296 }
1297 if (gVideoWidth == 0 || gVideoHeight == 0) {
1298 fprintf(stderr,
1299 "Invalid size %ux%u, width and height may not be zero\n",
1300 gVideoWidth, gVideoHeight);
1301 return 2;
1302 }
1303 gSizeSpecified = true;
1304 break;
1305 case 'b':
1306 if (parseValueWithUnit(optarg, &gBitRate) != NO_ERROR) {
1307 return 2;
1308 }
1309 if (gBitRate < kMinBitRate || gBitRate > kMaxBitRate) {
1310 fprintf(stderr,
1311 "Bit rate %dbps outside acceptable range [%d,%d]\n",
1312 gBitRate, kMinBitRate, kMaxBitRate);
1313 return 2;
1314 }
1315 break;
1316 case 't':
1317 {
1318 char *next;
1319 const int64_t timeLimitSec = strtol(optarg, &next, 10);
1320 if (next == optarg || (*next != '\0' && *next != ' ')) {
1321 fprintf(stderr, "Error parsing time limit argument\n");
1322 return 2;
1323 }
1324 if (timeLimitSec > std::numeric_limits<uint32_t>::max() || timeLimitSec < 0) {
1325 fprintf(stderr,
1326 "Time limit %" PRIi64 "s outside acceptable range [0,%u] seconds\n",
1327 timeLimitSec, std::numeric_limits<uint32_t>::max());
1328 return 2;
1329 }
1330 gTimeLimitSec = (timeLimitSec == 0) ?
1331 std::numeric_limits<uint32_t>::max() : timeLimitSec;
1332 if (gVerbose) {
1333 printf("Time limit set to %u seconds\n", gTimeLimitSec);
1334 fflush(stdout);
1335 }
1336 break;
1337 }
1338 case 'u':
1339 gWantInfoScreen = true;
1340 gWantFrameTime = true;
1341 break;
1342 case 'i':
1343 gWantInfoScreen = true;
1344 break;
1345 case 'f':
1346 gWantFrameTime = true;
1347 break;
1348 case 'r':
1349 // experimental feature
1350 gRotate = true;
1351 break;
1352 case 'o':
1353 if (strcmp(optarg, "mp4") == 0) {
1354 gOutputFormat = FORMAT_MP4;
1355 } else if (strcmp(optarg, "h264") == 0) {
1356 gOutputFormat = FORMAT_H264;
1357 } else if (strcmp(optarg, "webm") == 0) {
1358 gOutputFormat = FORMAT_WEBM;
1359 } else if (strcmp(optarg, "3gpp") == 0) {
1360 gOutputFormat = FORMAT_3GPP;
1361 } else if (strcmp(optarg, "frames") == 0) {
1362 gOutputFormat = FORMAT_FRAMES;
1363 } else if (strcmp(optarg, "raw-frames") == 0) {
1364 gOutputFormat = FORMAT_RAW_FRAMES;
1365 } else {
1366 fprintf(stderr, "Unknown format '%s'\n", optarg);
1367 return 2;
1368 }
1369 break;
1370 case 'N':
1371 gCodecName = optarg;
1372 break;
1373 case 'm':
1374 gMonotonicTime = true;
1375 break;
1376 case 'p':
1377 gPersistentSurface = true;
1378 break;
1379 case 'B':
1380 if (parseValueWithUnit(optarg, &gBframes) != NO_ERROR) {
1381 return 2;
1382 }
1383 break;
1384 case 'd':
1385 if (const auto id = android::DisplayId::fromValue<PhysicalDisplayId>(atoll(optarg));
1386 id && SurfaceComposerClient::getPhysicalDisplayToken(*id)) {
1387 gPhysicalDisplayId = *id;
1388 break;
1389 }
1390
1391 fprintf(stderr, "Invalid physical display ID\n");
1392 return 2;
1393 case 'S':
1394 gSecureDisplay = true;
1395 break;
1396 case 'x':
1397 fprintf(stderr, "%d.%d\n", kVersionMajor, kVersionMinor);
1398 return 0;
1399 default:
1400 if (ic != '?') {
1401 fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
1402 }
1403 return 2;
1404 }
1405 }
1406
1407 if (optind != argc - 1) {
1408 fprintf(stderr, "Must specify output file (see --help).\n");
1409 return 2;
1410 }
1411
1412 const char* fileName = argv[optind];
1413 if (gOutputFormat == FORMAT_MP4) {
1414 // MediaMuxer tries to create the file in the constructor, but we don't
1415 // learn about the failure until muxer.start(), which returns a generic
1416 // error code without logging anything. We attempt to create the file
1417 // now for better diagnostics.
1418 int fd = open(fileName, O_CREAT | O_RDWR, 0644);
1419 if (fd < 0) {
1420 fprintf(stderr, "Unable to open '%s': %s\n", fileName, strerror(errno));
1421 return 1;
1422 }
1423 close(fd);
1424 }
1425
1426 status_t err = recordScreen(fileName);
1427 if (err == NO_ERROR) {
1428 // Try to notify the media scanner. Not fatal if this fails.
1429 notifyMediaScanner(fileName);
1430 }
1431 ALOGD(err == NO_ERROR ? "success" : "failed");
1432 return (int) err;
1433 }
1434