merge in nyc-dr1-release history after reset to nyc-dr1-dev
diff --git a/include/media/stagefright/MediaCodecSource.h b/include/media/stagefright/MediaCodecSource.h
index cc62786..00b2c71 100644
--- a/include/media/stagefright/MediaCodecSource.h
+++ b/include/media/stagefright/MediaCodecSource.h
@@ -50,6 +50,7 @@
bool isVideo() const { return mIsVideo; }
sp<IGraphicBufferProducer> getGraphicBufferProducer();
void setInputBufferTimeOffset(int64_t timeOffsetUs);
+ int64_t getFirstSampleSystemTimeUs();
// MediaSource
virtual status_t start(MetaData *params = NULL);
@@ -79,6 +80,7 @@
kWhatStop,
kWhatPause,
kWhatSetInputBufferTimeOffset,
+ kWhatGetFirstSampleSystemTimeUs,
kWhatStopStalled,
};
@@ -90,6 +92,7 @@
uint32_t flags = 0);
status_t onStart(MetaData *params);
+ void onPause();
status_t init();
status_t initEncoder();
void releaseEncoder();
@@ -123,6 +126,8 @@
List<size_t> mAvailEncoderInputIndices;
List<int64_t> mDecodingTimeQueue; // decoding time (us) for video
int64_t mInputBufferTimeOffsetUs;
+ int64_t mFirstSampleSystemTimeUs;
+ bool mPausePending;
// audio drift time
int64_t mFirstSampleTimeUs;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 24ca582..bd1fd7c 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -1807,19 +1807,38 @@
return OK;
}
- // 30 ms buffer to avoid timestamp overlap
- mTotalPausedDurationUs += (systemTime() / 1000) - mPauseStartTimeUs - 30000;
+ int64_t bufferStartTimeUs = 0;
+ bool allSourcesStarted = true;
+ for (const auto &source : { mAudioEncoderSource, mVideoEncoderSource }) {
+ if (source == nullptr) {
+ continue;
+ }
+ int64_t timeUs = source->getFirstSampleSystemTimeUs();
+ if (timeUs < 0) {
+ allSourcesStarted = false;
+ }
+ if (bufferStartTimeUs < timeUs) {
+ bufferStartTimeUs = timeUs;
+ }
+ }
+
+ if (allSourcesStarted) {
+ if (mPauseStartTimeUs < bufferStartTimeUs) {
+ mPauseStartTimeUs = bufferStartTimeUs;
+ }
+ // 30 ms buffer to avoid timestamp overlap
+ mTotalPausedDurationUs += (systemTime() / 1000) - mPauseStartTimeUs - 30000;
+ }
double timeOffset = -mTotalPausedDurationUs;
if (mCaptureFpsEnable) {
timeOffset *= mCaptureFps / mFrameRate;
}
- if (mAudioEncoderSource != NULL) {
- mAudioEncoderSource->setInputBufferTimeOffset((int64_t)timeOffset);
- mAudioEncoderSource->start();
- }
- if (mVideoEncoderSource != NULL) {
- mVideoEncoderSource->setInputBufferTimeOffset((int64_t)timeOffset);
- mVideoEncoderSource->start();
+ for (const auto &source : { mAudioEncoderSource, mVideoEncoderSource }) {
+ if (source == nullptr) {
+ continue;
+ }
+ source->setInputBufferTimeOffset((int64_t)timeOffset);
+ source->start();
}
mPauseStartTimeUs = 0;
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 5039922..311c745 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -342,6 +342,17 @@
postSynchronouslyAndReturnError(msg);
}
+int64_t MediaCodecSource::getFirstSampleSystemTimeUs() {
+ sp<AMessage> msg = new AMessage(kWhatGetFirstSampleSystemTimeUs, mReflector);
+ sp<AMessage> response;
+ msg->postAndAwaitResponse(&response);
+ int64_t timeUs;
+ if (!response->findInt64("time-us", &timeUs)) {
+ timeUs = -1ll;
+ }
+ return timeUs;
+}
+
status_t MediaCodecSource::start(MetaData* params) {
sp<AMessage> msg = new AMessage(kWhatStart, mReflector);
msg->setObject("meta", params);
@@ -408,6 +419,8 @@
mEncoderDataSpace(0),
mGraphicBufferConsumer(consumer),
mInputBufferTimeOffsetUs(0),
+ mFirstSampleSystemTimeUs(-1ll),
+ mPausePending(false),
mFirstSampleTimeUs(-1ll),
mGeneration(0) {
CHECK(mLooper != NULL);
@@ -646,17 +659,19 @@
if (mbuf != NULL) {
CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
- timeUs += mInputBufferTimeOffsetUs;
-
- // Due to the extra delay adjustment at the beginning of start/resume,
- // the adjusted timeUs may be negative if MediaCodecSource goes into pause
- // state before feeding any buffers to the encoder. Drop the buffer in this
- // case.
- if (timeUs < 0) {
- mbuf->release();
- return OK;
+ if (mFirstSampleSystemTimeUs < 0ll) {
+ mFirstSampleSystemTimeUs = systemTime() / 1000;
+ if (mPausePending) {
+ mPausePending = false;
+ onPause();
+ mbuf->release();
+ mAvailEncoderInputIndices.push_back(bufferIndex);
+ return OK;
+ }
}
+ timeUs += mInputBufferTimeOffsetUs;
+
// push decoding time for video, or drift time for audio
if (mIsVideo) {
mDecodingTimeQueue.push_back(timeUs);
@@ -665,7 +680,6 @@
if (mFirstSampleTimeUs < 0ll) {
mFirstSampleTimeUs = timeUs;
}
-
int64_t driftTimeUs = 0;
if (mbuf->meta_data()->findInt64(kKeyDriftTime, &driftTimeUs)
&& driftTimeUs) {
@@ -717,6 +731,10 @@
if (mStarted) {
ALOGI("MediaCodecSource (%s) resuming", mIsVideo ? "video" : "audio");
+ if (mPausePending) {
+ mPausePending = false;
+ return OK;
+ }
if (mIsVideo) {
mEncoder->requestIDRFrame();
}
@@ -763,6 +781,15 @@
return OK;
}
+void MediaCodecSource::onPause() {
+ if (mFlags & FLAG_USE_SURFACE_INPUT) {
+ suspend();
+ } else {
+ CHECK(mPuller != NULL);
+ mPuller->pause();
+ }
+}
+
void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case kWhatPullerNotify:
@@ -832,25 +859,27 @@
}
MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
- memcpy(mbuf->data(), outbuf->data(), outbuf->size());
+ mbuf->add_ref();
if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
if (mIsVideo) {
int64_t decodingTimeUs;
if (mFlags & FLAG_USE_SURFACE_INPUT) {
+ if (mFirstSampleSystemTimeUs < 0ll) {
+ mFirstSampleSystemTimeUs = systemTime() / 1000;
+ if (mPausePending) {
+ mPausePending = false;
+ onPause();
+ mbuf->release();
+ break;
+ }
+ }
// Time offset is not applied at
// feedEncoderInputBuffer() in surface input case.
timeUs += mInputBufferTimeOffsetUs;
-
- // Due to the extra delay adjustment at the beginning of
- // start/resume, the adjusted timeUs may be negative if
- // MediaCodecSource goes into pause state before feeding
- // any buffers to the encoder. Drop the buffer in this case.
- if (timeUs < 0) {
- mEncoder->releaseOutputBuffer(index);
- break;
- }
-
+ // GraphicBufferSource is supposed to discard samples
+ // queued before start, and offset timeUs by start time
+ CHECK_GE(timeUs, 0ll);
// TODO:
// Decoding time for surface source is unavailable,
// use presentation time for now. May need to move
@@ -883,8 +912,8 @@
if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true);
}
+ memcpy(mbuf->data(), outbuf->data(), outbuf->size());
mbuf->setObserver(this);
- mbuf->add_ref();
{
Mutexed<Output>::Locked output(mOutput);
@@ -975,11 +1004,10 @@
case kWhatPause:
{
- if (mFlags & FLAG_USE_SURFACE_INPUT) {
- suspend();
+ if (mFirstSampleSystemTimeUs < 0) {
+ mPausePending = true;
} else {
- CHECK(mPuller != NULL);
- mPuller->pause();
+ onPause();
}
break;
}
@@ -994,6 +1022,16 @@
response->postReply(replyID);
break;
}
+ case kWhatGetFirstSampleSystemTimeUs:
+ {
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ sp<AMessage> response = new AMessage;
+ response->setInt64("time-us", mFirstSampleSystemTimeUs);
+ response->postReply(replyID);
+ break;
+ }
default:
TRESPASS();
}