Skip to content
This repository has been archived by the owner on Dec 16, 2024. It is now read-only.

Commit

Permalink
Fix mismatch issue in catch up path in android player.
Browse files Browse the repository at this point in the history
Signed-off-by: Luo, Ying <[email protected]>
  • Loading branch information
luoying1234 authored and U1X6WK committed Jul 2, 2021
1 parent 9897d1f commit 148b684
Show file tree
Hide file tree
Showing 6 changed files with 45 additions and 20 deletions.
3 changes: 2 additions & 1 deletion src/OmafDashAccess/OmafMediaStream.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -812,13 +812,14 @@ int32_t OmafMediaStream::TaskRun(OmafTilesStitch *stitch, std::pair<uint64_t, st
uint64_t optStartPTS = startPTSofCurrSeg;
//1.1 choose opt pts
// LOG(INFO) <<"Trigger PTS " << triggerPTS << "Start PTS " << startPTSofCurrSeg << endl;
#ifndef _ANDROID_NDK_OPTION_
if (m_gopSize > 0 && triggerPTS > startPTSofCurrSeg) {
uint32_t offset_num = triggerPTS / m_gopSize;
uint32_t remain_pts = triggerPTS % m_gopSize;
optStartPTS = remain_pts > m_gopSize - thresholdFrameNum ? (offset_num + 1) * m_gopSize : offset_num * m_gopSize;
OMAF_LOG(LOG_INFO, "Start pts from %lld, video id %d\n", optStartPTS, video_id);
}

#endif
//2. get samples num (indicate that segment parsed)
uint32_t samplesNumPerSeg = 0;
if (m_pStreamInfo != nullptr && m_pStreamInfo->framerate_den != 0) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ public class MediaLoader {

public static final String MEDIA_FORMAT_KEY = "stereoFormat";
private static final int MAX_SURFACE_NUM = 5;
private static final int MAX_CATCHUP_SURFACE_NUM = 2;
private static final int MAX_CATCHUP_SURFACE_NUM = 1;

/** A spherical mesh for video should be large enough that there are no stereo artifacts. */
private static final int SPHERE_RADIUS_METERS = 50;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -318,13 +318,13 @@ public void onDrawFrame(GL10 gl) {
// Combine touch & sensor data.
// Orientation = pitch * sensor * yaw since that is closest to what most users expect the
// behavior to be.
synchronized (this) {
Matrix.multiplyMM(tempMatrix, 0, deviceOrientationMatrix, 0, touchYawMatrix, 0);
Matrix.multiplyMM(viewMatrix, 0, touchPitchMatrix, 0, tempMatrix, 0);
}

Matrix.multiplyMM(viewProjectionMatrix, 0, projectionMatrix, 0, viewMatrix, 0);
if (mediaLoader.mediaPlayer != null && mediaLoader.mediaPlayer.GetStatus() == mediaLoader.mediaPlayer.PLAY) {
synchronized (this) {
Log.i(TAG, "onDrawFrame");
Matrix.multiplyMM(tempMatrix, 0, deviceOrientationMatrix, 0, touchYawMatrix, 0);
Matrix.multiplyMM(viewMatrix, 0, touchPitchMatrix, 0, tempMatrix, 0);
}
Matrix.multiplyMM(viewProjectionMatrix, 0, projectionMatrix, 0, viewMatrix, 0);
scene.glDrawFrame(viewProjectionMatrix, Type.MONOCULAR, screenWidth, screenHeight);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ public final class SceneRenderer {
private static final String TAG = "SceneRenderer";
private static final long Interval = 33;
private static final int MULTI_DECODER_MAX_NUM = 5;
private static final int MAX_CATCHUP_SURFACE_NUM = 2;
private static final int MAX_CATCHUP_SURFACE_NUM = 1;
// This is the primary interface between the Media Player and the GL Scene.
private Surface[] decodeSurface = new Surface[MULTI_DECODER_MAX_NUM + MAX_CATCHUP_SURFACE_NUM];
private Surface displaySurface;
Expand All @@ -68,6 +68,8 @@ public final class SceneRenderer {

private int renderCount = 0;

private boolean isWrittenCatchup = false;

private int cnt = 0;

public boolean decode_surface_ready = false;
Expand Down Expand Up @@ -309,6 +311,7 @@ public void glDrawFrame(float[] viewProjectionMatrix, int eyeType, int width, in
for (int i = MULTI_DECODER_MAX_NUM; i < MULTI_DECODER_MAX_NUM + MAX_CATCHUP_SURFACE_NUM; i++){
decodeTexture[i].updateTexImage();
}
isWrittenCatchup = true;
Log.i(TAG, "update catch up tex image at pts " + cnt);
}
if (frameAvailable.compareAndSet(true, false))
Expand All @@ -324,7 +327,17 @@ public void glDrawFrame(float[] viewProjectionMatrix, int eyeType, int width, in
int ret = 0;

ret = mediaPlayer.UpdateDisplayTex(renderCount);
if (ret == 0) renderCount++;
if (ret == 0) {
Log.i(TAG, "update display tex at pts " + renderCount);
renderCount++;
if (isWrittenCatchup) {
for (int i = MULTI_DECODER_MAX_NUM; i < MULTI_DECODER_MAX_NUM + MAX_CATCHUP_SURFACE_NUM; i++) {
decodeTexture[i].releaseTexImage();
isWrittenCatchup = false;
Log.i(TAG, "release catch up tex image at pts " + renderCount);
}
}
}
checkGlError();
if (ret == 0 && !hasTransformTypeSent) {
transformType = mediaPlayer.GetTransformType();
Expand Down
27 changes: 18 additions & 9 deletions src/player/player_lib/Decoder/VideoDecoder_hw.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -347,7 +347,10 @@ RenderStatus VideoDecoder_hw::DecodeFrame(DashPacket *pkt, uint32_t video_id)
frame->video_id = video_id;
frame->bEOS = false;
frame->bCatchup = data->bCatchup;
mDecCtx->push_frame(frame);
while (frame->bCatchup && frame->pts > mNextInputPts) {
ANDROID_LOGD("check frame pts %ld is greater than input pts %d, wait!", frame->pts, mNextInputPts);
usleep(5);
}
ANDROID_LOGD("PTS: %d, frame rwpk num: %d, one rrwpk w: %d, h: %d, l: %d, t: %d", data->pts, data->rwpk->numRegions, data->rwpk->rectRegionPacking[0].projRegWidth,
data->rwpk->rectRegionPacking[0].projRegHeight, data->rwpk->rectRegionPacking[0].projRegLeft, data->rwpk->rectRegionPacking[0].projRegTop);
SAFE_DELETE(data);
Expand All @@ -374,12 +377,11 @@ RenderStatus VideoDecoder_hw::DecodeFrame(DashPacket *pkt, uint32_t video_id)
ANDROID_LOGD("CHANGE: successfully decode one catch up frame at pts %ld", frame->pts);
else
ANDROID_LOGD("CHANGE: successfully decode one frame at pts %ld video id %d", frame->pts, mVideoId);

ANDROID_LOGD("mNextInputPts %d", mNextInputPts);
mDecCtx->push_frame(frame);
//swift deocde when needing drop frame, and for wait and normal situation, do as follows.
if (frame->pts >= mNextInputPts) {
while (frame->bCatchup && frame->pts > mNextInputPts) {
usleep(5);
}
ANDROID_LOGD("Input pts %lld, frame pts %lld video id %d", mNextInputPts, frame->pts, mVideoId);
std::mutex mtx;
std::unique_lock<std::mutex> lck(mtx);
Expand Down Expand Up @@ -460,13 +462,17 @@ RenderStatus VideoDecoder_hw::FlushDecoder(uint32_t video_id)
frame->video_id = video_id;
frame->bEOS = false;
frame->bCatchup = data->bCatchup;
mDecCtx->push_frame(frame);
while (frame->bCatchup && frame->pts > mNextInputPts) {
ANDROID_LOGD("check frame pts %ld is greater than input pts %d, wait!", frame->pts, mNextInputPts);
usleep(5);
}
ANDROID_LOGD("PTS: %d, frame rwpk num: %d, one rrwpk w: %d, h: %d, l: %d, t: %d", data->pts, data->rwpk->numRegions, data->rwpk->rectRegionPacking[0].projRegWidth,
data->rwpk->rectRegionPacking[0].projRegHeight, data->rwpk->rectRegionPacking[0].projRegLeft, data->rwpk->rectRegionPacking[0].projRegTop);
// 2. release output buffer
if (out_buf_idx > 0) ANDROID_LOGD("frame info size is greater than zero!");
if (IS_DUMPED != 1){
bool render = (buf_info.size != 0) && (frame->pts == mNextInputPts || mNextInputPts == 0);
ANDROID_LOGD("is render %d, pts %ld, video id %d", render, frame->pts, mVideoId);
AMediaCodec_releaseOutputBuffer(mDecCtx->mMediaCodec, out_buf_idx, render);
}
else
Expand All @@ -486,12 +492,11 @@ RenderStatus VideoDecoder_hw::FlushDecoder(uint32_t video_id)
ANDROID_LOGD("CHANGE: successfully decode one catch up frame at pts %ld", frame->pts);
else
ANDROID_LOGD("CHANGE: successfully decode one frame at pts %ld video id %d", frame->pts, mVideoId);
ANDROID_LOGD("mNextInputPts %d, video id ", mNextInputPts, mVideoId);

ANDROID_LOGD("mNextInputPts %d, video id %d", mNextInputPts, mVideoId);
mDecCtx->push_frame(frame);
//swift deocde when needing drop frame, and for wait and normal situation, do as follows.
if (frame->pts >= mNextInputPts) {
while (frame->bCatchup && frame->pts > mNextInputPts) {//wait
usleep(5);
}
std::mutex mtx;
std::unique_lock<std::mutex> lck(mtx);
m_cv.wait(lck);
Expand Down Expand Up @@ -584,7 +589,9 @@ void VideoDecoder_hw::Run()
bool isCatchup = pkt_info->bCatchup;
if (pkt_info->pkt) {//catch up eos last frame
LOG(INFO) << "Decoded frame is pts " << pkt_info->pts << endl;
do {
ret = DecodeFrame(pkt_info->pkt, pkt_info->video_id);
} while (ret == RENDER_NULL_PACKET);// ensure that send packet is right
if(RENDER_STATUS_OK != ret){
LOG(INFO)<<"Video "<< mVideoId <<": failed to decoder one frame"<<std::endl;
}
Expand All @@ -594,7 +601,9 @@ void VideoDecoder_hw::Run()
SAFE_DELETE(pkt_info);
}
ANDROID_LOGD("Finish to decode last eos frame, video id %d", mVideoId);
do {
ret = FlushDecoder(mVideoId);
} while (ret == RENDER_NULL_PACKET);// ensure that send packet is right
if(RENDER_STATUS_OK != ret){
LOG(INFO)<<"Video "<< mVideoId <<": failed to flush decoder when EOS"<<std::endl;
}
Expand Down
4 changes: 3 additions & 1 deletion src/player/player_lib/MediaSource/DashMediaSource.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -373,7 +373,9 @@ void DashMediaSource::ProcessVideoPacket() {
if (currentWaitTime > WAIT_PACKET_TIME_OUT) // wait 5s but get packet failed
{
m_status = STATUS_TIMEOUT;
// ANDROID_LOGD("Wait too long to get packet from Omaf Dash Access library! Force to quit!");
#ifdef _ANDROID_OS_
ANDROID_LOGD("Wait too long to get packet from Omaf Dash Access library! Force to quit!");
#endif
LOG(ERROR) << " Wait too long to get packet from Omaf Dash Access library! Force to quit! " << std::endl;
}
return;
Expand Down

0 comments on commit 148b684

Please sign in to comment.