From b1956113f5601b0cc6ac525d3918a0dfa8d240af Mon Sep 17 00:00:00 2001 From: Sven Gothel Date: Sun, 15 Oct 2023 07:06:53 +0200 Subject: Bug 1472: Enhance GLMediaPlayer AV Sync: Utilize SCR aware audio PTS used as master-clock, enabling proper AV sync w/ untouched audio We can finally utilize the added pass through audio PTS, see commits - GlueGen 52725b4c6525487f93407f529dc0a758b387a4fc - JOAL 12029f1ec1d8afa576e1ac61655f318cc37c1d16 This enables us to use the audio PTS as the master-clock and adjust video to the untouched audio. In case no audio is selected/playing or audio is muted, we sync merely on the system-clock (SCR) w/o audio. AV granularity is 22ms, however, since the ALAudioSink PTS may be a little late, it renders even a slightly better sync in case of too early audio (d_apts < 0). Since video frames are sync'ed to audio, the resync procedure may result in a hysteresis swinging into sync. This might be notable at start and when resumed audio or after seek. We leave the audio frames untouched to reduce processing burden and allow non-disrupted listening. Passed AV sync tests - Five-minute-sync-test.mp4 - Audio-Video-Sync-Test-Calibration-23.98fps-24fps.mp4 - Audio-Video-Sync-Test-2.mkv --- .../com/jogamp/opengl/util/av/GLMediaPlayer.java | 29 +- .../android/av/AndroidGLMediaPlayerAPI14.java | 23 +- .../jogamp/opengl/util/av/GLMediaPlayerImpl.java | 479 +++++++++++++-------- .../jogamp/opengl/util/av/NullGLMediaPlayer.java | 34 +- .../opengl/util/av/impl/FFMPEGMediaPlayer.java | 57 ++- .../opengl/util/av/impl/OMXGLMediaPlayer.java | 19 +- 6 files changed, 423 insertions(+), 218 deletions(-) (limited to 'src/jogl/classes') diff --git a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java index 3e4d589f3..e3ef27303 100644 --- a/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java +++ b/src/jogl/classes/com/jogamp/opengl/util/av/GLMediaPlayer.java @@ -137,10 +137,10 @@ import com.jogamp.opengl.util.texture.TextureSequence; *

* The class follows a passive A/V synchronization pattern. * Audio is being untouched, while {@link #getNextTexture(GL)} delivers a new video frame - * only, if its timestamp is less than {@link #MAXIMUM_VIDEO_ASYNC} ahead of time. - * If its timestamp is more than {@link #MAXIMUM_VIDEO_ASYNC} ahead of time, + * only, if its timestamp is less than {@link #MAX_VIDEO_ASYNC} ahead of time. + * If its timestamp is more than {@link #MAX_VIDEO_ASYNC} ahead of time, * the previous frame is returned. - * If its timestamp is more than {@link #MAXIMUM_VIDEO_ASYNC} after time, + * If its timestamp is more than {@link #MAX_VIDEO_ASYNC} after time, * the frame is dropped and the next frame is being fetched. *

*

@@ -253,7 +253,8 @@ public interface GLMediaPlayer extends TextureSequence { public static final String CameraPropRate = "rate"; /** Maximum video frame async of {@value} milliseconds. */ - public static final int MAXIMUM_VIDEO_ASYNC = 22; + public static final int MAX_VIDEO_ASYNC = 22; + public static final int MIN_VIDEO_ASYNC = 11; /** * A StreamException encapsulates a caught exception in the decoder thread, a.k.a StreamWorker, @@ -612,12 +613,22 @@ public interface GLMediaPlayer extends TextureSequence { public int getPresentedFrameCount(); /** - * @return current video presentation timestamp (PTS) in milliseconds of {@link #getLastTexture()} + * Returns current video presentation timestamp (PTS) in milliseconds of {@link #getLastTexture()} + *

+ * The relative millisecond PTS since start of the presentation stored in integer + * covers a time span of 2'147'483'647 ms (see {@link Integer#MAX_VALUE} + * or 2'147'483 seconds or 24.855 days. + *

**/ public int getVideoPTS(); /** - * @return current audio presentation timestamp (PTS) in milliseconds. + * Returns current audio presentation timestamp (PTS) in milliseconds. + *

+ * The relative millisecond PTS since start of the presentation stored in integer + * covers a time span of 2'147'483'647 ms (see {@link Integer#MAX_VALUE} + * or 2'147'483 seconds or 24.855 days. + *

**/ public int getAudioPTS(); @@ -679,7 +690,11 @@ public interface GLMediaPlayer extends TextureSequence { public int getAudioFrames(); /** - * @return total duration of stream in msec. + * Return total duration of stream in msec. + *

+ * The duration stored in integer covers 2'147'483'647 ms (see {@link Integer#MAX_VALUE} + * or 2'147'483 seconds or 24.855 days. + *

*/ public int getDuration(); diff --git a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java index 667c03458..bc31b6aea 100644 --- a/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java +++ b/src/jogl/classes/jogamp/opengl/android/av/AndroidGLMediaPlayerAPI14.java @@ -34,9 +34,12 @@ import com.jogamp.opengl.GL; import com.jogamp.opengl.GLES2; import com.jogamp.opengl.GLException; import com.jogamp.common.os.AndroidVersion; +import com.jogamp.common.os.Clock; import com.jogamp.common.os.Platform; +import com.jogamp.common.av.PTS; import com.jogamp.common.av.TimeFrameI; import com.jogamp.opengl.util.av.GLMediaPlayer; +import com.jogamp.opengl.util.av.GLMediaPlayer.State; import com.jogamp.opengl.util.texture.Texture; import com.jogamp.opengl.util.texture.TextureSequence; @@ -137,7 +140,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { @Override protected final boolean resumeImpl() { - playStart = Platform.currentTimeMillis(); + playStart = Platform.currentMillis(); if(null != mp) { try { mp.start(); @@ -209,7 +212,21 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { } @Override - protected final int getAudioPTSImpl() { return null != mp ? mp.getCurrentPosition() : 0; } + protected PTS getAudioPTSImpl() { return audio_pts; } + @Override + protected PTS getUpdatedAudioPTS() { + if( null != mp ) { + audio_pts.set(Clock.currentMillis(), mp.getCurrentPosition()); + } else { + audio_pts.set(Clock.currentMillis(), 0); + } + return audio_pts; + } + @Override + protected int getAudioQueuedDuration() { return 0; } + @Override + protected int getLastBufferedAudioPTS() { return audio_pts.getLast(); } + private final PTS audio_pts = new PTS( () -> { return State.Playing == getState() ? getPlaySpeed() : 0f; } ); @Override protected final void destroyImpl() { @@ -416,7 +433,7 @@ public class AndroidGLMediaPlayerAPI14 extends GLMediaPlayerImpl { if( null != mp ) { pts = mp.getCurrentPosition(); } else { - pts = (int) ( Platform.currentTimeMillis() - playStart ); + pts = (int) ( Platform.currentMillis() - playStart ); } // stex.getTransformMatrix(atex.getSTMatrix()); } diff --git a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java index bc3768abc..2ee546a07 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java +++ b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java @@ -32,6 +32,7 @@ import java.net.URISyntaxException; import java.net.URLConnection; import java.nio.ByteBuffer; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.Map; @@ -51,13 +52,15 @@ import jogamp.opengl.Debug; import com.jogamp.common.net.UriQueryProps; import com.jogamp.common.nio.Buffers; import com.jogamp.common.av.AudioSink; +import com.jogamp.common.av.PTS; import com.jogamp.common.av.TimeFrameI; import com.jogamp.common.net.Uri; -import com.jogamp.common.os.Platform; +import com.jogamp.common.os.Clock; import com.jogamp.common.util.IOUtil; import com.jogamp.common.util.InterruptSource; import com.jogamp.common.util.LFRingbuffer; import com.jogamp.common.util.Ringbuffer; +import com.jogamp.common.util.TSPrinter; import com.jogamp.common.util.WorkerThread; import com.jogamp.math.FloatUtil; import com.jogamp.opengl.GLExtensions; @@ -83,9 +86,17 @@ import com.jogamp.opengl.util.texture.TextureSequence.TextureFrame; */ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { private static final int STREAM_WORKER_DELAY = Debug.getIntProperty("jogl.debug.GLMediaPlayer.StreamWorker.delay", false, 0); - + private static final TSPrinter logout; private static final String unknown = "unknown"; + static { + if( DEBUG || DEBUG_AVSYNC || DEBUG_NATIVE ) { + logout = TSPrinter.stderr(); + } else { + logout = null; + } + } + private volatile State state; private final Object stateLock = new Object(); @@ -150,7 +161,6 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { private volatile int decodedFrameCount = 0; private int presentedFrameCount = 0; private int displayedFrameCount = 0; - private volatile int video_pts_last = 0; /** * Help detect EOS, limit is {@link #MAX_FRAMELESS_MS_UNTIL_EOS}. @@ -168,26 +178,33 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { protected AudioSink audioSink = null; protected boolean audioSinkPlaySpeedSet = false; - /** System Clock Reference (SCR) of first audio PTS at start time. */ - private long audio_scr_t0 = 0; - private boolean audioSCR_reset = true; + /** AV System Clock Reference (SCR) */ + private final PTS av_scr = new PTS( () -> { return State.Playing == state ? playSpeed : 0f; } ); + /** Trigger System Clock Reference (SCR) reset. */ + private boolean video_scr_reset = false; + private boolean audio_scr_reset = false; + + private final PTS video_pts_last = new PTS( () -> { return State.Playing == state ? playSpeed : 0f; } ); - /** System Clock Reference (SCR) of first video frame at start time. */ - private long video_scr_t0 = 0; - /** System Clock Reference (SCR) PTS offset, i.e. first video PTS at start time. */ - private int video_scr_pts = 0; /** Cumulative video pts diff. */ private float video_dpts_cum = 0; /** Cumulative video frames. */ private int video_dpts_count = 0; + /** Cumulative audio pts diff. */ + private float audio_dpts_cum = 0; + /** Cumulative audio frames. */ + private int audio_dpts_count = 0; + + private int audio_queued_last_ms = 0; + /** FIXME: Remove or - if helpful - configure max video queue size */ + private static final int video_queue_growth = 0; + /** Number of min frame count required for video cumulative sync. */ - private static final int VIDEO_DPTS_NUM = 20; + private static final int AV_DPTS_NUM = 20; /** Cumulative coefficient, value {@value}. */ - private static final float VIDEO_DPTS_COEFF = 0.7943282f; // (float) Math.exp(Math.log(0.01) / VIDEO_DPTS_NUM); + private static final float AV_DPTS_COEFF = 0.7943282f; // (float) Math.exp(Math.log(0.01) / VIDEO_DPTS_NUM); /** Maximum valid video pts diff. */ - private static final int VIDEO_DPTS_MAX = 5000; // 5s max diff - /** Trigger video PTS reset with given cause as bitfield. */ - private boolean videoSCR_reset = false; + private static final int AV_DPTS_MAX = 5000; // 5s max diff private TextureFrame[] videoFramesOrig = null; private Ringbuffer videoFramesFree = null; @@ -332,35 +349,45 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { public final int getPresentedFrameCount() { return presentedFrameCount; } @Override - public final int getVideoPTS() { return video_pts_last; } + public final int getVideoPTS() { return video_pts_last.getLast(); } @Override public final int getAudioPTS() { - if( State.Uninitialized != state ) { - return getAudioPTSImpl(); + if( State.Uninitialized != state && null != audioSink ) { + return audioSink.getPTS().getLast(); } return 0; } /** Override if not using audioSink! */ - protected int getAudioPTSImpl() { + protected PTS getAudioPTSImpl() { if( null != audioSink ) { return audioSink.getPTS(); } else { - return 0; + return dummy_audio_pts; } } /** Override if not using audioSink! */ - protected int getLastBufferedAudioPTSImpl() { + protected PTS getUpdatedAudioPTS() { if( null != audioSink ) { - return audioSink.getLastBufferedPTS(); + return audioSink.updateQueue(); + } else { + return dummy_audio_pts; + } + } + private final PTS dummy_audio_pts = new PTS( () -> { return State.Playing == state ? playSpeed : 0f; } ); + + /** Override if not using audioSink! */ + protected int getAudioQueuedDuration() { + if( null != audioSink ) { + return (int)(audioSink.getQueuedDuration()*1000f); } else { return 0; } } /** Override if not using audioSink! */ - protected float getQueuedAudioTimeImpl() { + protected int getLastBufferedAudioPTS() { if( null != audioSink ) { - return audioSink.getQueuedTime(); + return audioSink.getLastBufferedPTS(); } else { return 0; } @@ -389,7 +416,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { } default: } - if(DEBUG) { System.err.println("Play: "+preState+" -> "+state+", "+toString()); } + if(DEBUG) { logout.println("Play: "+preState+" -> "+state+", "+toString()); } return state; } } @@ -418,7 +445,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { resume(); } } - if(DEBUG) { System.err.println("Pause: "+preState+" -> "+state+", "+toString()); } + if(DEBUG) { logout.println("Pause: "+preState+" -> "+state+", "+toString()); } return state; } } @@ -436,7 +463,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { stopImpl(); changeState(new GLMediaPlayer.EventMask(), State.Uninitialized); // attachedObjects.clear(); - if(DEBUG) { System.err.println("Stop: "+preState+" -> "+state+", "+toString()); } + if(DEBUG) { logout.println("Stop: "+preState+" -> "+state+", "+toString()); } return state; } } @@ -489,7 +516,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { audioSink.play(); // cont. w/ new data } if(DEBUG) { - System.err.println("Seek("+msec+"): "+getPerfString()); + logout.println("Seek("+msec+"): "+getPerfString()); } if( null != streamWorker ) { streamWorker.resume(); @@ -499,7 +526,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { default: pts1 = 0; } - if(DEBUG) { System.err.println("Seek("+msec+"): "+preState+" -> "+state+", "+toString()); } + if(DEBUG) { logout.println("Seek("+msec+"): "+preState+" -> "+state+", "+toString()); } return pts1; } } @@ -525,7 +552,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { } } } - if(DEBUG) { System.err.println("setPlaySpeed("+rate+"): "+state+", "+preSpeed+" -> "+playSpeed+", "+toString()); } + if(DEBUG) { logout.println("setPlaySpeed("+rate+"): "+state+", "+preSpeed+" -> "+playSpeed+", "+toString()); } return res; } } @@ -590,7 +617,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { audioVolume = v; res = true; } - if(DEBUG) { System.err.println("setAudioVolume("+v+"): "+state+", "+preVolume+" -> "+audioVolume+", "+toString()); } + if(DEBUG) { logout.println("setAudioVolume("+v+"): "+state+", "+preVolume+" -> "+audioVolume+", "+toString()); } return res; } } @@ -715,7 +742,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { } } if(DEBUG) { - System.err.println("GLMediaPlayer.initGL: "+this); + logout.println("GLMediaPlayer.initGL: "+this); } try { resetAVPTSAndFlush(); @@ -724,7 +751,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { initGLImpl(gl); setAudioVolume( audioVolume ); // update volume if(DEBUG) { - System.err.println("initGLImpl.X "+this); + logout.println("initGLImpl.X "+this); } if( null != gl ) { videoFramesOrig = createTexFrames(gl, textureCount); @@ -929,7 +956,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { } } if(DEBUG) { - System.err.println("Created TexImage2D RGBA "+tWidth+"x"+tHeight+", target "+toHexString(textureTarget)+ + logout.println("Created TexImage2D RGBA "+tWidth+"x"+tHeight+", target "+toHexString(textureTarget)+ ", ifmt "+toHexString(textureInternalFormat)+", fmt "+toHexString(textureFormat)+", type "+toHexString(textureType)); } } @@ -972,18 +999,44 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { texFrames[i] = null; } if( DEBUG ) { - System.err.println(Thread.currentThread().getName()+"> Clear TexFrame["+i+"]: "+frame+" -> null"); + logout.println(Thread.currentThread().getName()+"> Clear TexFrame["+i+"]: "+frame+" -> null"); } } } } + private static TextureFrame[] concat(final TextureFrame[] first, final TextureFrame[] second) { + final TextureFrame[] result = Arrays.copyOf(first, first.length + second.length); + System.arraycopy(second, 0, result, first.length, second.length); + return result; + } + + private final boolean growVideoFrameBuffers(final GL gl, final int growAmount) { + if( null == gl || videoFramesFree == null || videoFramesOrig == null || videoFramesDecoded == null) { + return false; + } + final TextureFrame[] newElems; + try { + newElems = createTexFrames(gl, growAmount); + } catch(final RuntimeException rex) { + return false; + } + final TextureFrame[] newArray = concat(videoFramesOrig, newElems); + videoFramesOrig = newArray; + videoFramesFree.growEmptyBuffer(newElems); + return true; + } + private TextureFrame cachedFrame = null; - private long lastTimeMillis = 0; + private long lastMillis = 0; private int repeatedFrame = 0; private final boolean[] stGotVFrame = { false }; + protected boolean audioStreamEnabled() { + return GLMediaPlayer.STREAM_ID_NONE != aid && !isAudioMuted() && ( 1.0f == getPlaySpeed() || audioSinkPlaySpeedSet ); + } + @Override public final TextureFrame getNextTexture(final GL gl) throws IllegalStateException { synchronized( stateLock ) { @@ -991,6 +1044,22 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { boolean dropFrame = false; try { do { + final long currentMillis = Clock.currentMillis(); + final int audio_queued_ms; + if( audioStreamEnabled() && !audio_scr_reset ) { + audio_queued_ms = getAudioQueuedDuration(); + } else { + audio_queued_ms = 100; + } + final int audio_dequeued_ms; + if( audio_queued_last_ms > audio_queued_ms ) { + audio_dequeued_ms = audio_queued_last_ms - audio_queued_ms; + } else { + audio_dequeued_ms = 0; + } + char syncModeA = '_', syncModeB = '_'; + char resetModeA = '_', resetModeV = '_'; + final boolean droppedFrame; if( dropFrame ) { presentedFrameCount--; @@ -999,65 +1068,100 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { } else { droppedFrame = false; } - final boolean playCached = null != cachedFrame; - final int video_pts; + final PTS video_pts = new PTS( () -> { return State.Playing == state ? playSpeed : 0f; } ); final boolean hasVideoFrame; TextureFrame nextFrame; - if( playCached ) { + if( null != cachedFrame && ( audio_queued_ms >= audio_dequeued_ms || video_queue_growth > 0 ) ) { nextFrame = cachedFrame; cachedFrame = null; presentedFrameCount--; - video_pts = nextFrame.getPTS(); + video_pts.set(currentMillis, nextFrame.getPTS()); hasVideoFrame = true; repeatedFrame++; + syncModeA = 'r'; + if( videoFramesFree.isEmpty() && audio_queued_ms < audio_dequeued_ms ) { + growVideoFrameBuffers(gl, video_queue_growth); + syncModeA = 'z'; + } } else { + if( null != cachedFrame && null != videoFramesFree ) { + // Push back skipped repeated frame due to low audio_queued_ms + videoFramesFree.putBlocking(cachedFrame); + syncModeA = 'z'; + } + cachedFrame = null; repeatedFrame = 0; if( null != videoFramesDecoded ) { // multi-threaded and video available nextFrame = videoFramesDecoded.get(); if( null != nextFrame ) { - video_pts = nextFrame.getPTS(); + video_pts.set(currentMillis, nextFrame.getPTS()); hasVideoFrame = true; } else { - video_pts = TimeFrameI.INVALID_PTS; + video_pts.set(0, TimeFrameI.INVALID_PTS); hasVideoFrame = false; + syncModeA = 'e'; } } else { // single-threaded or audio-only - video_pts = getNextSingleThreaded(gl, lastFrame, stGotVFrame); + video_pts.set(currentMillis, getNextSingleThreaded(gl, lastFrame, stGotVFrame)); nextFrame = lastFrame; hasVideoFrame = stGotVFrame[0]; } } - final long currentTimeMillis = Platform.currentTimeMillis(); - if( TimeFrameI.END_OF_STREAM_PTS == video_pts || - ( duration > 0 && duration <= video_pts ) || maxNullFrameCountUntilEOS <= nullFrameCount ) + final PTS audio_pts = new PTS( () -> { return State.Playing == state ? playSpeed : 0f; } ); + final int audio_pts_lb; + final boolean use_audio; + if( audioStreamEnabled() ) { + final PTS apts = getUpdatedAudioPTS(); + if( !apts.isValid() ) { + audio_pts.set(video_pts); + use_audio = false; + } else { + audio_pts.set(apts); + use_audio = true; + if( audio_scr_reset ) { + audio_scr_reset = false; + resetSCR(apts); + audio_queued_last_ms = 0; + resetModeA = 'A'; + } + } + audio_pts_lb = getLastBufferedAudioPTS(); + } else { + audio_pts.set(video_pts); + audio_pts_lb = 0; + use_audio = false; + } + + if( video_pts.isValid() ) { + final int frame_period_last = video_pts.diffLast(video_pts_last); // rendering loop interrupted ? + if( video_scr_reset || frame_period_last > frame_duration*10 ) { + video_scr_reset = false; + resetSCR( use_audio ? audio_pts : video_pts ); + resetModeV = 'V'; + } + } + + if( video_pts.isEOS() || + ( duration > 0 && duration <= video_pts.get(currentMillis) ) || maxNullFrameCountUntilEOS <= nullFrameCount ) { // EOS if( DEBUG || DEBUG_AVSYNC ) { - System.err.println( "AV-EOS (getNextTexture): EOS_PTS "+(TimeFrameI.END_OF_STREAM_PTS == video_pts)+", "+this); + logout.println(currentMillis, "AV-EOS (getNextTexture): EOS_PTS "+(video_pts.isEOS())+", "+this); } pauseImpl(true, new GLMediaPlayer.EventMask(GLMediaPlayer.EventMask.Bit.EOS)); - } else if( TimeFrameI.INVALID_PTS == video_pts ) { // no audio or video frame + } else if( !video_pts.isValid() ) { // no audio or video frame if( null == videoFramesDecoded || !videoFramesDecoded.isEmpty() ) { nullFrameCount++; } if( DEBUG_AVSYNC ) { - final int audio_pts = getAudioPTSImpl(); - final int audio_pts_lb = getLastBufferedAudioPTSImpl(); - final int audio_scr = (int) ( ( currentTimeMillis - audio_scr_t0 ) * playSpeed ); - final int d_apts; - if( audio_pts != TimeFrameI.INVALID_PTS ) { - d_apts = audio_pts - audio_scr; - } else { - d_apts = 0; - } - final int video_scr = video_scr_pts + (int) ( ( currentTimeMillis - video_scr_t0 ) * playSpeed ); - final int d_vpts = video_pts - video_scr; - System.err.println( "AV?: dT "+(currentTimeMillis-lastTimeMillis)+", nullFrames "+nullFrameCount+ - getPerfStringImpl( video_scr, video_pts, d_vpts, audio_scr, audio_pts, audio_pts_lb, d_apts, 0 ) + ", droppedFrame "+droppedFrame); + syncModeB = '?'; + logout.println(currentMillis, "AV"+syncModeA+syncModeB+":"+resetModeA+resetModeV+ + ": dT "+(currentMillis-lastMillis)+", nullFrames "+nullFrameCount+", "+ + getPerfStringImpl(currentMillis, video_pts, audio_pts, audio_queued_ms, audio_pts_lb) + ", droppedFrame "+droppedFrame); } } else { // valid pts: has audio or video frame nullFrameCount=0; @@ -1065,65 +1169,99 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { if( hasVideoFrame ) { // has video frame presentedFrameCount++; - final int audio_pts = getAudioPTSImpl(); - final int audio_pts_lb = getLastBufferedAudioPTSImpl(); - final int audio_scr = (int) ( ( currentTimeMillis - audio_scr_t0 ) * playSpeed ); + // d_apts > 0: audio too slow (behind SCR) repeat video frame, < 0: audio too fast (in front of SCR) drop video frame final int d_apts; - if( audio_pts != TimeFrameI.INVALID_PTS ) { - d_apts = audio_pts - audio_scr; + if( audio_pts.isValid() ) { + d_apts = av_scr.diff(currentMillis, audio_pts); } else { d_apts = 0; } - - final int frame_period_last = video_pts - video_pts_last; // rendering loop interrupted ? - if( videoSCR_reset || frame_period_last > frame_duration*10 ) { - videoSCR_reset = false; - video_scr_t0 = currentTimeMillis; - video_scr_pts = video_pts; - } - final int video_scr = video_scr_pts + (int) ( ( currentTimeMillis - video_scr_t0 ) * playSpeed ); - final int d_vpts = video_pts - video_scr; - // final int d_avpts = d_vpts - d_apts; - if( -VIDEO_DPTS_MAX > d_vpts || d_vpts > VIDEO_DPTS_MAX ) { - // if( -VIDEO_DPTS_MAX > d_avpts || d_avpts > VIDEO_DPTS_MAX ) { + // d_vpts > 0: video too fast (in front of SCR) repeat frame, < 0: video too slow (behind SCR) drop frame + int d_vpts = video_pts.diff(currentMillis, av_scr); + + final boolean d_apts_off = use_audio && ( -AV_DPTS_MAX > d_apts || d_apts > AV_DPTS_MAX ); + final boolean d_vpts_off = -AV_DPTS_MAX > d_vpts || d_vpts > AV_DPTS_MAX; + if( d_apts_off || d_vpts_off ) { + // Extreme rare off audio/video DPTS + resetSCR( use_audio ? audio_pts : video_pts ); + resetModeA = d_apts_off ? 'A' : 'a'; + resetModeV = d_vpts_off ? 'V' : 'v'; if( DEBUG_AVSYNC ) { - System.err.println( "AV*: dT "+(currentTimeMillis-lastTimeMillis)+", "+ - getPerfStringImpl( video_scr, video_pts, d_vpts, audio_scr, audio_pts, audio_pts_lb, d_apts, 0 ) + ", "+nextFrame); + syncModeB = '*'; + logout.println(currentMillis, "AV"+syncModeA+syncModeB+":"+resetModeA+resetModeV+ + ": dT "+(currentMillis-lastMillis)+", "+ + getPerfStringImpl(currentMillis, video_pts, audio_pts, audio_queued_ms, audio_pts_lb)); // + ", "+nextFrame); } } else { - final int dpy_den = displayedFrameCount > 0 ? displayedFrameCount : 1; - final int avg_dpy_duration = ( (int) ( currentTimeMillis - video_scr_t0 ) ) / dpy_den ; // ms/f - final int maxVideoDelay = Math.min(avg_dpy_duration, MAXIMUM_VIDEO_ASYNC); + final int dt_a; + final boolean scr_resynced; + if( use_audio ) { + audio_dpts_count++; + if( droppedFrame ) { + audio_dpts_cum = d_apts * AV_DPTS_COEFF + audio_dpts_cum; // weight on current frame's PTS + } else { + audio_dpts_cum = d_apts + AV_DPTS_COEFF * audio_dpts_cum; + } + dt_a = (int) ( getDPTSAvg(audio_dpts_cum, audio_dpts_count) / playSpeed + 0.5f ); + } else { + dt_a = 0; + } + if( ( dt_a < -MAX_VIDEO_ASYNC && d_apts < 0 ) || ( dt_a > MAX_VIDEO_ASYNC && d_apts > 0 ) ) { + // resync to audio + scr_resynced = true; + syncModeB = '*'; + av_scr.set(audio_pts); + audio_dpts_cum = d_apts * AV_DPTS_COEFF + d_apts; // total weight on current frame's PTS + audio_dpts_count = AV_DPTS_NUM - AV_DPTS_NUM/4; + d_vpts = video_pts.diff(currentMillis, av_scr); + // video_dpts_cum = 0; + // video_dpts_count = 0; + resetModeA = 'A'; + // resetModeV = 'V'; + } else { + scr_resynced = false; + } + final int avg_dpy_duration, maxVideoDelay; + { + final int dpy_den = displayedFrameCount > 0 ? displayedFrameCount : 1; + avg_dpy_duration = ( (int) ( ( currentMillis - av_scr.getSCR() ) * playSpeed + 0.5f ) ) / dpy_den ; // ms/f + maxVideoDelay = Math.min(Math.max(avg_dpy_duration, MIN_VIDEO_ASYNC), MAX_VIDEO_ASYNC); + } video_dpts_count++; - // video_dpts_cum = d_avpts + VIDEO_DPTS_COEFF * video_dpts_cum; - video_dpts_cum = d_vpts + VIDEO_DPTS_COEFF * video_dpts_cum; - final int video_dpts_avg_diff = video_dpts_count >= VIDEO_DPTS_NUM ? getVideoDPTSAvg() : 0; - final int dt = (int) ( video_dpts_avg_diff / playSpeed + 0.5f ); - // final int dt = (int) ( d_vpts / playSpeed + 0.5f ); - // final int dt = (int) ( d_avpts / playSpeed + 0.5f ); + if( droppedFrame || scr_resynced ) { + video_dpts_cum = d_vpts * AV_DPTS_COEFF + video_dpts_cum; // weight on current frame's PTS + } else { + video_dpts_cum = d_vpts + AV_DPTS_COEFF * video_dpts_cum; + } + final int dt_v = (int) ( getDPTSAvg(video_dpts_cum, video_dpts_count) / playSpeed + 0.5f ); final TextureFrame _nextFrame = nextFrame; - char mode_C; - if( dt > maxVideoDelay ) { + if( dt_v > maxVideoDelay && d_vpts >= 0 /** || dt_av > MAXIMUM_AV_ASYNC */ && + ( audio_queued_ms >= audio_dequeued_ms || video_queue_growth > 0 ) ) + { cachedFrame = nextFrame; nextFrame = null; - mode_C = 'c'; - } else if ( !droppedFrame && dt < -maxVideoDelay && null != videoFramesDecoded && videoFramesDecoded.size() > 0 ) { + syncModeB = 'c'; + if( videoFramesFree.isEmpty() && audio_queued_ms < audio_dequeued_ms ) { + growVideoFrameBuffers(gl, video_queue_growth); + syncModeB = 'z'; + } + } else if ( dt_v < -maxVideoDelay && d_vpts < 0 && null != videoFramesDecoded && videoFramesDecoded.size() > 0 ) { // only drop if prev. frame has not been dropped and // frame is too late and one decoded frame is already available. dropFrame = true; - mode_C = 'd'; + syncModeB = 'd'; } else if( repeatedFrame > 0 ) { - mode_C = 'r'; + syncModeB = 'r'; } else { - mode_C = '_'; + syncModeB = '_'; } - video_pts_last = video_pts; + video_pts_last.set(video_pts); if( DEBUG_AVSYNC ) { - System.err.println( "AV"+mode_C+": dT "+(currentTimeMillis-lastTimeMillis)+", dt "+dt+"/"+maxVideoDelay+", r"+repeatedFrame+", "+ - getPerfStringImpl( video_scr, video_pts, d_vpts, - audio_scr, audio_pts, audio_pts_lb, d_apts, - video_dpts_avg_diff ) + - ", avg dpy-fps "+avg_dpy_duration+" ms/f, "+_nextFrame); + logout.println(currentMillis, "AV"+syncModeA+syncModeB+":"+resetModeA+resetModeV+ + ": dT "+(currentMillis-lastMillis)+", dt[v "+dt_v+", a "+dt_a+"]/"+maxVideoDelay+", "+ + getPerfStringImpl(currentMillis, video_pts, audio_pts, + audio_queued_ms, audio_pts_lb) + + ", avg dpy-fps "+avg_dpy_duration+" ms/f"); // , "+_nextFrame); } } } // has video frame @@ -1137,7 +1275,8 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { videoFramesFree.putBlocking(_lastFrame); } } - lastTimeMillis = currentTimeMillis; + lastMillis = currentMillis; + audio_queued_last_ms = audio_queued_ms; } while( dropFrame ); } catch (final InterruptedException e) { e.printStackTrace(); @@ -1180,7 +1319,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { pts = getNextTextureImpl(gl, nextFrame); postNextTextureImpl(gl); if( TimeFrameI.INVALID_PTS != pts ) { - newFrameAvailable(nextFrame, Platform.currentTimeMillis()); + newFrameAvailable(nextFrame, Clock.currentMillis()); gotVFrame[0] = true; } else { gotVFrame[0] = false; @@ -1208,17 +1347,6 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { @Override public final AudioSink getAudioSink() { return audioSink; } - /** - * To be called from implementation at 1st PTS after start - * w/ current pts value in milliseconds. - * @param audio_scr_t0 - */ - protected void setFirstAudioPTS2SCR(final int pts) { - if( audioSCR_reset ) { - audio_scr_t0 = Platform.currentTimeMillis() - pts; - audioSCR_reset = false; - } - } private void flushAllVideoFrames() { if( null != videoFramesFree ) { videoFramesFree.resetFull(videoFramesOrig); @@ -1229,34 +1357,43 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { cachedFrame = null; } private void resetAVPTSAndFlush() { - video_dpts_cum = 0; - video_dpts_count = 0; + resetSCR(av_scr); + audio_queued_last_ms = 0; resetAVPTS(); flushAllVideoFrames(); if( null != audioSink ) { audioSink.flush(); } } + private void resetSCR(final PTS pts) { + av_scr.set(pts); + audio_dpts_cum = 0; + audio_dpts_count = 0; + video_dpts_cum = 0; + video_dpts_count = 0; + } private void resetAVPTS() { nullFrameCount = 0; presentedFrameCount = 0; displayedFrameCount = 0; decodedFrameCount = 0; - audioSCR_reset = true; - videoSCR_reset = true; + video_scr_reset = true; + audio_scr_reset = true; } - private final int getVideoDPTSAvg() { - return (int) ( video_dpts_cum * (1.0f - VIDEO_DPTS_COEFF) + 0.5f ); + private static final int getDPTSAvg(final float dpts_cum, final int dpts_count) { + final int dpts_avg = (int) ( dpts_cum * (1.0f - AV_DPTS_COEFF) + 0.5f ); + final int dpts_avg_diff = dpts_count >= AV_DPTS_NUM ? dpts_avg : 0; + return dpts_avg_diff; } - private final void newFrameAvailable(final TextureFrame frame, final long currentTimeMillis) { + private final void newFrameAvailable(final TextureFrame frame, final long currentMillis) { decodedFrameCount++; // safe: only written-to either from stream-worker or user thread if( 0 == frame.getDuration() ) { // patch frame duration if not set already frame.setDuration( (int) frame_duration ); } synchronized(eventListenersLock) { for(final Iterator i = eventListeners.iterator(); i.hasNext(); ) { - i.next().newFrameAvailable(this, frame, currentTimeMillis); + i.next().newFrameAvailable(this, frame, currentMillis); } } } @@ -1337,7 +1474,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { return null; } - WorkerThread.StateCallback stateCB = (final WorkerThread self, final WorkerThread.StateCallback.State cause) -> { + private final WorkerThread.StateCallback stateCB = (final WorkerThread self, final WorkerThread.StateCallback.State cause) -> { switch( cause ) { case INIT: break; @@ -1366,7 +1503,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { break; } }; - WorkerThread.Callback action = (final WorkerThread self) -> { + private final WorkerThread.Callback action = (final WorkerThread self) -> { final GL gl; TextureFrame nextFrame = null; try { @@ -1384,10 +1521,14 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { if( STREAM_WORKER_DELAY > 0 ) { java.lang.Thread.sleep(STREAM_WORKER_DELAY); } + final int capacityDelta = videoFramesFree.capacity() - videoFramesDecoded.capacity(); + if( videoFramesDecoded.isFull() && capacityDelta > 0 ) { + videoFramesDecoded.growFullBuffer( capacityDelta ); + } if( !videoFramesDecoded.put(nextFrame) ) { throw new InternalError("XXX: free "+videoFramesFree+", decoded "+videoFramesDecoded+", "+GLMediaPlayerImpl.this); } - newFrameAvailable(nextFrame, Platform.currentTimeMillis()); + newFrameAvailable(nextFrame, Clock.currentMillis()); nextFrame = null; } else { // audio only @@ -1408,7 +1549,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { // state transition incl. notification self.pause(false); if( DEBUG || DEBUG_AVSYNC ) { - System.err.println( "AV-EOS (StreamWorker): EOS_PTS "+(TimeFrameI.END_OF_STREAM_PTS == vPTS)+", "+GLMediaPlayerImpl.this); + logout.println( "AV-EOS (StreamWorker): EOS_PTS "+(TimeFrameI.END_OF_STREAM_PTS == vPTS)+", "+GLMediaPlayerImpl.this); } pauseImpl(true, new GLMediaPlayer.EventMask(GLMediaPlayer.EventMask.Bit.EOS)); } @@ -1455,9 +1596,9 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { protected final void attributesUpdated(final GLMediaPlayer.EventMask eventMask) { if( !eventMask.isZero() ) { - final long now = Platform.currentTimeMillis(); + final long now = Clock.currentMillis(); if( DEBUG ) { - System.err.println("GLMediaPlayer.AttributesChanged: "+eventMask+", state "+state+", when "+now); + logout.println("GLMediaPlayer.AttributesChanged: "+eventMask+", state "+state+", when "+now); } synchronized(eventListenersLock) { for(final Iterator i = eventListeners.iterator(); i.hasNext(); ) { @@ -1559,7 +1700,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { } if( wasUninitialized ) { if( DEBUG ) { - System.err.println("XXX Initialize @ updateAttributes: "+this); + logout.println("XXX Initialize @ updateAttributes: "+this); } } attributesUpdated(eventMask); @@ -1568,7 +1709,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { protected void setIsGLOriented(final boolean isGLOriented) { if( isInGLOrientation != isGLOriented ) { if( DEBUG ) { - System.err.println("XXX gl-orient "+isInGLOrientation+" -> "+isGLOriented); + logout.println("XXX gl-orient "+isInGLOrientation+" -> "+isGLOriented); } isInGLOrientation = isGLOriented; if( null != videoFramesOrig ) { @@ -1631,9 +1772,9 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { final String loc = ( null != streamLoc ) ? streamLoc.toString() : "" ; final int freeVideoFrames = null != videoFramesFree ? videoFramesFree.size() : 0; final int decVideoFrames = null != videoFramesDecoded ? videoFramesDecoded.size() : 0; - final int video_scr = video_scr_pts + (int) ( ( Platform.currentTimeMillis() - video_scr_t0 ) * playSpeed ); + final int video_scr_ms = av_scr.get(Clock.currentMillis()); final String camPath = null != cameraPath ? ", camera: "+cameraPath : ""; - return getClass().getSimpleName()+"["+state+", vSCR "+video_scr+", frames[p "+presentedFrameCount+", d "+decodedFrameCount+", t "+videoFrames+" ("+tt+" s), z "+nullFrameCount+" / "+maxNullFrameCountUntilEOS+"], "+ + return getClass().getSimpleName()+"["+state+", vSCR "+video_scr_ms+", frames[p "+presentedFrameCount+", d "+decodedFrameCount+", t "+videoFrames+" ("+tt+" s), z "+nullFrameCount+" / "+maxNullFrameCountUntilEOS+"], "+ "speed "+playSpeed+", "+bps_stream+" bps, hasSW "+(null!=streamWorker)+ ", Texture[count "+textureCount+", free "+freeVideoFrames+", dec "+decVideoFrames+", tagt "+toHexString(textureTarget)+", ifmt "+toHexString(textureInternalFormat)+", fmt "+toHexString(textureFormat)+", type "+toHexString(textureType)+"], "+ "Video[id "+vid+", <"+vcodec+">, "+width+"x"+height+", glOrient "+isInGLOrientation+", "+fps+" fps, "+frame_duration+" fdur, "+bps_video+" bps], "+ @@ -1642,48 +1783,44 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { @Override public final String getPerfString() { - final long currentTimeMillis = Platform.currentTimeMillis(); - final int video_scr = video_scr_pts + (int) ( ( currentTimeMillis - video_scr_t0 ) * playSpeed ); - final int d_vpts = video_pts_last - video_scr; - final int audio_scr = (int) ( ( currentTimeMillis - audio_scr_t0 ) * playSpeed ); - final int audio_pts = getAudioPTSImpl(); - final int audio_pts_lb = getLastBufferedAudioPTSImpl(); - final int d_apts = audio_pts - audio_scr; - return getPerfStringImpl( video_scr, video_pts_last, d_vpts, audio_scr, audio_pts, audio_pts_lb, d_apts, getVideoDPTSAvg() ); - } - private final String getPerfStringImpl(final int video_scr, final int video_pts, final int d_vpts, - final int audio_scr, final int audio_pts, final int autio_pts_lb, final int d_apts, - final int video_dpts_avg_diff) { + final long currentMillis = Clock.currentMillis(); + final PTS audio_pts = getAudioPTSImpl(); + final int audio_queued_ms = getAudioQueuedDuration(); + final int audio_pts_lb = getLastBufferedAudioPTS(); + return getPerfStringImpl(currentMillis, video_pts_last, audio_pts, audio_queued_ms, audio_pts_lb); + } + private final String getPerfStringImpl(final long currentMillis, final PTS video_pts, final PTS audio_pts, + final int audio_queued_ms, final int autio_pts_lb) { final float tt = getDuration() / 1000.0f; - final String audioSinkInfo; - final AudioSink audioSink = getAudioSink(); - if( false && null != audioSink ) { - audioSinkInfo = String.format("AudioSink[frames [p %d, q %d, f %d, c %d], dur %.1f ms, bytes %d]", - audioSink.getEnqueuedFrameCount(), audioSink.getQueuedFrameCount(), - audioSink.getFreeFrameCount(), audioSink.getFrameCount(), 1000f*audioSink.getQueuedTime(), - audioSink.getQueuedByteCount() - ); + final int audio_dequeued_ms; + if( audio_queued_last_ms > audio_queued_ms ) { + audio_dequeued_ms = audio_queued_last_ms - audio_queued_ms; } else { - audioSinkInfo = ""; + audio_dequeued_ms = 0; } - final int freeVideoFrames, decVideoFrames; - if( null != videoFramesFree ) { - freeVideoFrames = videoFramesFree.size(); - decVideoFrames = videoFramesDecoded.size(); + // d_apts > 0: audio too slow (behind SCR) repeat video frame, < 0: audio too fast (in front of SCR) drop video frame + final int d_apts; + if( audio_pts.isValid() ) { + d_apts = av_scr.diff(currentMillis, audio_pts); } else { - freeVideoFrames = 0; - decVideoFrames = 0; - } - if( false ) { - return state+", frames[(p "+presentedFrameCount+", d "+decodedFrameCount+") / "+videoFrames+", "+tt+" s, z "+nullFrameCount+" / "+maxNullFrameCountUntilEOS+"], "+ - "speed " + playSpeed+", dAV "+( d_vpts - d_apts )+", vSCR "+video_scr+", vpts "+video_pts+", dSCR["+d_vpts+", avrg "+video_dpts_avg_diff+"], "+ - "aSCR "+audio_scr+", apts "+audio_pts+" ( lb "+autio_pts_lb+", d "+d_apts+" ), "+audioSinkInfo+ - ", Texture[count "+textureCount+", free "+freeVideoFrames+", dec "+decVideoFrames+"]"; - } else { - return "frames[p "+presentedFrameCount+", d "+decodedFrameCount+"], "+ - "dAV "+( video_pts - audio_pts )+"/"+( d_vpts - d_apts )+", vSCR "+video_scr+", vpts "+video_pts+", dSCR["+d_vpts+", avrg "+video_dpts_avg_diff+"], "+ - "aSCR "+audio_scr+", apts "+audio_pts+" ( lb "+autio_pts_lb+", d "+d_apts+" )]"; + d_apts = 0; } + // d_vpts > 0: video too fast (in front of SCR) repeat frame, < 0: video too slow (behind SCR) drop frame + final int d_vpts = video_pts.getLast() - av_scr.get(currentMillis); // equals: video_pts.diff(currentMillis, av_scr); + + final int video_dpts_avrg = getDPTSAvg(video_dpts_cum, video_dpts_count); + final int audio_dpts_avrg = getDPTSAvg(audio_dpts_cum, audio_dpts_count); + + final int d_avpts0 = video_pts.diff(currentMillis, audio_pts); + final int d_avpts1 = video_dpts_avrg - audio_dpts_avrg; + + final int vFramesQueued = this.videoFramesDecoded.size(); + final int vFramesFree = this.videoFramesFree.size(); + return "frames[p"+presentedFrameCount+" d"+decodedFrameCount+" q"+vFramesQueued+" r"+repeatedFrame+" f"+vFramesFree+"/"+videoFramesOrig.length+"], "+ + "dAV[v-a "+d_avpts0+", avg "+d_avpts1+"], SCR "+av_scr.get(currentMillis)+ + ", vpts "+video_pts.getLast()+", dSCR["+d_vpts+", avg "+video_dpts_avrg+"]"+ + ", apts "+audio_pts.get(currentMillis)+" dSCR["+d_apts+", avg "+audio_dpts_avrg+ + "] (deq "+audio_dequeued_ms+"ms, left "+audio_queued_ms+"ms, lb "+autio_pts_lb+")]"; } @Override @@ -1744,7 +1881,7 @@ public abstract class GLMediaPlayerImpl implements GLMediaPlayer { return Integer.parseInt(val); } catch (final NumberFormatException nfe) { if(DEBUG) { - System.err.println("Not a valid integer for <"+key+">: <"+val+">"); + logout.println("Not a valid integer for <"+key+">: <"+val+">"); } } return 0; diff --git a/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java index be440adaf..d616020e7 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java +++ b/src/jogl/classes/jogamp/opengl/util/av/NullGLMediaPlayer.java @@ -34,11 +34,13 @@ import java.nio.ByteBuffer; import com.jogamp.opengl.GL; import com.jogamp.opengl.GLException; import com.jogamp.opengl.GLProfile; - +import com.jogamp.common.av.PTS; import com.jogamp.common.nio.Buffers; +import com.jogamp.common.os.Clock; import com.jogamp.common.os.Platform; import com.jogamp.common.util.IOUtil; import com.jogamp.opengl.util.av.GLMediaPlayer; +import com.jogamp.opengl.util.av.GLMediaPlayer.State; import com.jogamp.opengl.util.texture.Texture; import com.jogamp.opengl.util.texture.TextureData; import com.jogamp.opengl.util.texture.TextureIO; @@ -50,8 +52,7 @@ import com.jogamp.opengl.util.texture.TextureSequence; */ public class NullGLMediaPlayer extends GLMediaPlayerImpl { private TextureData texData = null; - private int pos_ms = 0; - private long pos_start = 0; + private final PTS video_pts = new PTS( () -> { return State.Playing == getState() ? getPlaySpeed() : 0f; } ); public NullGLMediaPlayer() { super(); @@ -65,7 +66,7 @@ public class NullGLMediaPlayer extends GLMediaPlayerImpl { @Override protected final boolean resumeImpl() { - pos_start = Platform.currentTimeMillis(); + video_pts.setSCR(Clock.currentMillis()); return true; } @@ -80,24 +81,29 @@ public class NullGLMediaPlayer extends GLMediaPlayerImpl { @Override protected final int seekImpl(final int msec) { - pos_ms = msec; + video_pts.setPTS(msec); validatePos(); - return pos_ms; + return msec; } @Override protected final int getNextTextureImpl(final GL gl, final TextureFrame nextFrame) { - final int pts = getAudioPTSImpl(); + final int pts = getLastBufferedAudioPTS(); nextFrame.setPTS( pts ); return pts; } @Override - protected final int getAudioPTSImpl() { - pos_ms = (int) ( Platform.currentTimeMillis() - pos_start ); + protected PTS getAudioPTSImpl() { return video_pts; } + @Override + protected final PTS getUpdatedAudioPTS() { validatePos(); - return pos_ms; + return video_pts; } + @Override + protected int getAudioQueuedDuration() { return 0; } + @Override + protected int getLastBufferedAudioPTS() { return video_pts.getLast(); } @Override protected final void destroyImpl() { @@ -176,11 +182,11 @@ public class NullGLMediaPlayer extends GLMediaPlayerImpl { private void validatePos() { boolean considerPausing = false; - if( 0 > pos_ms) { - pos_ms = 0; + if( 0 > video_pts.getLast() ) { + video_pts.setPTS(0); considerPausing = true; - } else if ( pos_ms > getDuration() ) { - pos_ms = getDuration(); + } else if ( video_pts.getLast() > getDuration() ) { + video_pts.setPTS( getDuration() ); considerPausing = true; } if( considerPausing && State.Playing == getState() ) { diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java index c13e3359a..3035fc013 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java +++ b/src/jogl/classes/jogamp/opengl/util/av/impl/FFMPEGMediaPlayer.java @@ -279,7 +279,8 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { private int vPlanes = 0; private int vBitsPerPixel = 0; private int vBytesPerPixelPerPlane = 0; - private int texWidth, texHeight; // overall (stuffing planes in one texture) + private int vWidth = 0, vHeight = 0; + private int texWidth = 0, texHeight = 0; // overall (stuffing planes in one texture) private String singleTexComp = "r"; private final GLPixelStorageModes psm; @@ -343,6 +344,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { if( GLMediaPlayer.STREAM_ID_NONE == aid ) { audioSink = AudioSinkFactory.createNull(); } else { + // audioSink = new jogamp.common.av.JavaSoundAudioSink(); audioSink = AudioSinkFactory.createDefault(FFMPEGMediaPlayer.class.getClassLoader()); } { @@ -457,39 +459,50 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { if( null != gl && STREAM_ID_NONE != getVID() ) { int tf, tif=GL.GL_RGBA; // texture format and internal format final int tt = GL.GL_UNSIGNED_BYTE; + final int texBytesPerPixel; switch(vBytesPerPixelPerPlane) { - case 1: - if( gl.isGL3ES3() ) { - // RED is supported on ES3 and >= GL3 [core]; ALPHA is deprecated on core - tf = GL2ES2.GL_RED; tif=GL2ES2.GL_RED; singleTexComp = "r"; - } else { - // ALPHA is supported on ES2 and GL2, i.e. <= GL3 [core] or compatibility - tf = GL.GL_ALPHA; tif=GL.GL_ALPHA; singleTexComp = "a"; - } - break; + case 1: if( gl.isGL3ES3() ) { + // RED is supported on ES3 and >= GL3 [core]; ALPHA is deprecated on core + tf = GL2ES2.GL_RED; tif=GL2ES2.GL_RED; singleTexComp = "r"; + } else { + // ALPHA is supported on ES2 and GL2, i.e. <= GL3 [core] or compatibility + tf = GL.GL_ALPHA; tif=GL.GL_ALPHA; singleTexComp = "a"; + } + texBytesPerPixel = 1; + break; case 2: if( vPixelFmt == VideoPixelFormat.YUYV422 || vPixelFmt == VideoPixelFormat.UYVY422 ) { // YUYV422: // < packed YUV 4:2:2, 2x 16bpp, Y0 Cb Y1 Cr // UYVY422: // < packed YUV 4:2:2, 2x 16bpp, Cb Y0 Cr Y1 // Both stuffed into RGBA half width texture - tf = GL.GL_RGBA; tif=GL.GL_RGBA; break; + tf = GL.GL_RGBA; tif=GL.GL_RGBA; + texBytesPerPixel = 4; } else { - tf = GL2ES2.GL_RG; tif=GL2ES2.GL_RG; break; + tf = GL2ES2.GL_RG; tif=GL2ES2.GL_RG; + texBytesPerPixel = 2; } - case 3: tf = GL.GL_RGB; tif=GL.GL_RGB; break; + break; + + case 3: tf = GL.GL_RGB; tif=GL.GL_RGB; texBytesPerPixel = 3; break; + case 4: if( vPixelFmt == VideoPixelFormat.BGRA ) { - tf = GL.GL_BGRA; tif=GL.GL_RGBA; break; + tf = GL.GL_BGRA; tif=GL.GL_RGBA; } else { - tf = GL.GL_RGBA; tif=GL.GL_RGBA; break; + tf = GL.GL_RGBA; tif=GL.GL_RGBA; } + texBytesPerPixel = 4; + break; + default: throw new RuntimeException("Unsupported bytes-per-pixel / plane "+vBytesPerPixelPerPlane); } setTextureFormat(tif, tf); setTextureType(tt); setIsGLOriented(false); - if(DEBUG) { - System.err.println("initGL: p5: video "+vPixelFmt+", planes "+vPlanes+", bpp "+vBitsPerPixel+"/"+vBytesPerPixelPerPlane+ - ", tex "+texWidth+"x"+texHeight); + if(DEBUG || true) { + final float texSizeMB = ( texWidth * texHeight * (float)texBytesPerPixel ) / 1000000f; + System.err.printf("initGL: p5: video %s, bpp %d, %d x %d, %d b/pp, %d planes, tex %d x %d x %d -> %.2fMB%n", + vPixelFmt.toString(), vBitsPerPixel, vWidth, vHeight, vBytesPerPixelPerPlane, vPlanes, + texWidth, texHeight, texBytesPerPixel, texSizeMB); } } } @@ -601,6 +614,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { vPlanes = 0; vBitsPerPixel = 0; vBytesPerPixelPerPlane = 0; + vWidth = 0; vHeight = 0; texWidth = 0; texHeight = 0; final int[] vTexWidth = { 0, 0, 0 }; // per plane @@ -610,6 +624,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { vPlanes = planes; vBitsPerPixel = bitsPerPixel; vBytesPerPixelPerPlane = bytesPerPixelPerPlane; + vWidth = vW; vHeight = vH; vTexWidth[0] = tWd0; vTexWidth[1] = tWd1; vTexWidth[2] = tWd2; switch(vPixelFmt) { @@ -669,7 +684,8 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { if(DEBUG) { System.err.println("audio: id "+aid+", fmt "+aSampleFmt+", "+avChosenAudioFormat+", aFrameSize/fc "+audioSamplesPerFrameAndChannel); - System.err.println("video: id "+vid+", fmt "+vW+"x"+vH+", "+vPixelFmt+", planes "+vPlanes+", bpp "+vBitsPerPixel+"/"+vBytesPerPixelPerPlane); + System.err.printf("video: id %d, %s, bpp %d, %d x %d, %d b/pp, %d planes, tex %d x %d%n", + vid, vPixelFmt.toString(), vBitsPerPixel, vWidth, vHeight, vBytesPerPixelPerPlane, vPlanes, texWidth, texHeight); for(int i=0; i<3; i++) { System.err.println("video: p["+i+"]: "+vTexWidth[i]); } @@ -943,8 +959,7 @@ public class FFMPEGMediaPlayer extends GLMediaPlayerImpl { } final void pushSound(final ByteBuffer sampleData, final int data_size, final int audio_pts) { - setFirstAudioPTS2SCR( audio_pts ); - if( !isAudioMuted() && ( 1.0f == getPlaySpeed() || audioSinkPlaySpeedSet ) ) { + if( audioStreamEnabled() ) { audioSink.enqueueData( audio_pts, sampleData, data_size); } } diff --git a/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java b/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java index 336084734..2c2fc57d0 100644 --- a/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java +++ b/src/jogl/classes/jogamp/opengl/util/av/impl/OMXGLMediaPlayer.java @@ -30,10 +30,13 @@ package jogamp.opengl.util.av.impl; import java.io.IOException; +import com.jogamp.common.av.PTS; +import com.jogamp.common.os.Clock; import com.jogamp.opengl.GL; import com.jogamp.opengl.GLException; import com.jogamp.opengl.egl.EGL; +import com.jogamp.opengl.util.av.GLMediaPlayer.State; import com.jogamp.opengl.util.texture.TextureSequence; import jogamp.opengl.util.av.EGLMediaPlayerImpl; @@ -123,9 +126,21 @@ public class OMXGLMediaPlayer extends EGLMediaPlayerImpl { } @Override - protected int getAudioPTSImpl() { - return 0!=moviePtr ? _getCurrentPosition(moviePtr) : 0; + protected PTS getAudioPTSImpl() { return audio_pts; } + @Override + protected PTS getUpdatedAudioPTS() { + if( 0 != moviePtr ) { + audio_pts.set(Clock.currentMillis(), _getCurrentPosition(moviePtr)); + } else { + audio_pts.set(Clock.currentMillis(), 0); + } + return audio_pts; } + @Override + protected int getAudioQueuedDuration() { return 0; } + @Override + protected int getLastBufferedAudioPTS() { return audio_pts.getLast(); } + private final PTS audio_pts = new PTS( () -> { return State.Playing == getState() ? getPlaySpeed() : 0f; } ); @Override protected boolean setPlaySpeedImpl(final float rate) { -- cgit v1.2.3