Commit e7309f3b authored by Thomas Guillem's avatar Thomas Guillem Committed by Jean-Baptiste Kempf

audiotrack: increase audio latency

Queue more than one audio buffer at a time (limit of 1 second). TimeGet will
report a bigger delay, but the module will be more stable and more efficient.

AudioTrack delay is now calculated by JNIThread when a buffer is written.
Overall delay is calculated in TimeGet by adding AudioTrack delay and queued
delay.
Signed-off-by: default avatarJean-Baptiste Kempf <jb@videolan.org>
parent a39f80a6
...@@ -28,15 +28,25 @@ ...@@ -28,15 +28,25 @@
#include <assert.h> #include <assert.h>
#include <jni.h> #include <jni.h>
#include <dlfcn.h> #include <dlfcn.h>
#include <stdbool.h>
#include <sys/queue.h>
#include <vlc_atomic.h>
#include <vlc_common.h> #include <vlc_common.h>
#include <vlc_plugin.h> #include <vlc_plugin.h>
#include <vlc_aout.h> #include <vlc_aout.h>
#include <vlc_threads.h> #include <vlc_threads.h>
/* Maximum VLC buffers queued by the internal queue in microseconds. This delay
* doesn't include audiotrack delay */
#define MAX_QUEUE_US INT64_C(1000000) // 1000ms
static int Open( vlc_object_t * ); static int Open( vlc_object_t * );
static void Close( vlc_object_t * ); static void Close( vlc_object_t * );
struct thread_cmd;
typedef TAILQ_HEAD(, thread_cmd) THREAD_CMD_QUEUE;
struct aout_sys_t { struct aout_sys_t {
/* sw gain */ /* sw gain */
float soft_gain; float soft_gain;
...@@ -44,19 +54,24 @@ struct aout_sys_t { ...@@ -44,19 +54,24 @@ struct aout_sys_t {
/* Owned by JNIThread */ /* Owned by JNIThread */
jobject p_audiotrack; /* AudioTrack ref */ jobject p_audiotrack; /* AudioTrack ref */
jobject p_audioTimestamp; /* AudioTimestamp ref */
jbyteArray p_bytearray; /* ByteArray ref */ jbyteArray p_bytearray; /* ByteArray ref */
size_t i_bytearray_size; /* size of the ByteArray */ size_t i_bytearray_size; /* size of the ByteArray */
audio_sample_format_t fmt; /* fmt setup by Start */ audio_sample_format_t fmt; /* fmt setup by Start */
uint32_t i_samples_written; /* samples written since start/flush */ uint32_t i_pos_initial; /* initial position set by getPlaybackHeadPosition */
uint32_t i_dsp_initial; /* initial delay of the dsp */ uint32_t i_samples_written; /* number of samples written since last flush */
int i_bytes_per_frame; /* byte per frame */ mtime_t i_play_time; /* time when play was called */
/* JNIThread control */ /* JNIThread control */
vlc_mutex_t mutex; vlc_mutex_t mutex;
vlc_cond_t cond; vlc_cond_t cond;
vlc_thread_t thread; vlc_thread_t thread;
/* Shared between two threads, must be locked */
bool b_thread_run; /* is thread alive */ bool b_thread_run; /* is thread alive */
struct thread_cmd *p_cmd; /* actual cmd process by JNIThread */ THREAD_CMD_QUEUE thread_cmd_queue; /* thread cmd queue */
uint32_t i_samples_queued; /* number of samples queued */
uint32_t i_audiotrack_delay; /* audiotrack delay in samples */
}; };
/* Soft volume helper */ /* Soft volume helper */
...@@ -75,14 +90,15 @@ vlc_module_begin () ...@@ -75,14 +90,15 @@ vlc_module_begin ()
set_callbacks( Open, Close ) set_callbacks( Open, Close )
vlc_module_end () vlc_module_end ()
struct thread_cmd { struct thread_cmd
{
TAILQ_ENTRY(thread_cmd) next;
enum { enum {
CMD_START, CMD_START,
CMD_STOP, CMD_STOP,
CMD_PLAY, CMD_PLAY,
CMD_PAUSE, CMD_PAUSE,
CMD_FLUSH, CMD_FLUSH,
CMD_TIME_GET,
CMD_DONE, CMD_DONE,
} id; } id;
union { union {
...@@ -105,11 +121,8 @@ struct thread_cmd { ...@@ -105,11 +121,8 @@ struct thread_cmd {
int i_ret; int i_ret;
audio_sample_format_t *p_fmt; audio_sample_format_t *p_fmt;
} start; } start;
struct {
int i_ret;
mtime_t i_delay;
} time_get;
} out; } out;
void ( *pf_destroy )( struct thread_cmd * );
}; };
#define THREAD_NAME "android_audiotrack" #define THREAD_NAME "android_audiotrack"
...@@ -130,6 +143,7 @@ static struct ...@@ -130,6 +143,7 @@ static struct
jmethodID pause; jmethodID pause;
jmethodID write; jmethodID write;
jmethodID getPlaybackHeadPosition; jmethodID getPlaybackHeadPosition;
jmethodID getTimestamp;
jmethodID getMinBufferSize; jmethodID getMinBufferSize;
jint MODE_STREAM; jint MODE_STREAM;
jint ERROR; jint ERROR;
...@@ -149,6 +163,12 @@ static struct ...@@ -149,6 +163,12 @@ static struct
bool has_ERROR_DEAD_OBJECT; bool has_ERROR_DEAD_OBJECT;
jint STREAM_MUSIC; jint STREAM_MUSIC;
} AudioManager; } AudioManager;
struct {
jclass clazz;
jmethodID ctor;
jfieldID framePosition;
jfieldID nanoTime;
} AudioTimestamp;
} jfields; } jfields;
/* init all jni fields. /* init all jni fields.
...@@ -210,6 +230,7 @@ InitJNIFields( audio_output_t *p_aout ) ...@@ -210,6 +230,7 @@ InitJNIFields( audio_output_t *p_aout )
} \ } \
} while( 0 ) } while( 0 )
/* AudioTrack class init */
GET_CLASS( "android/media/AudioTrack", true ); GET_CLASS( "android/media/AudioTrack", true );
jfields.AudioTrack.clazz = (jclass) (*env)->NewGlobalRef( env, clazz ); jfields.AudioTrack.clazz = (jclass) (*env)->NewGlobalRef( env, clazz );
CHECK_EXCEPTION( "NewGlobalRef", true ); CHECK_EXCEPTION( "NewGlobalRef", true );
...@@ -221,8 +242,12 @@ InitJNIFields( audio_output_t *p_aout ) ...@@ -221,8 +242,12 @@ InitJNIFields( audio_output_t *p_aout )
GET_ID( GetMethodID, AudioTrack.flush, "flush", "()V", true ); GET_ID( GetMethodID, AudioTrack.flush, "flush", "()V", true );
GET_ID( GetMethodID, AudioTrack.pause, "pause", "()V", true ); GET_ID( GetMethodID, AudioTrack.pause, "pause", "()V", true );
GET_ID( GetMethodID, AudioTrack.write, "write", "([BII)I", true ); GET_ID( GetMethodID, AudioTrack.write, "write", "([BII)I", true );
GET_ID( GetMethodID, AudioTrack.getTimestamp,
"getTimestamp", "(Landroid/media/AudioTimestamp;)Z", false );
GET_ID( GetMethodID, AudioTrack.getPlaybackHeadPosition, GET_ID( GetMethodID, AudioTrack.getPlaybackHeadPosition,
"getPlaybackHeadPosition", "()I", true ); "getPlaybackHeadPosition", "()I", true );
GET_ID( GetStaticMethodID, AudioTrack.getMinBufferSize, "getMinBufferSize", GET_ID( GetStaticMethodID, AudioTrack.getMinBufferSize, "getMinBufferSize",
"(III)I", true ); "(III)I", true );
GET_CONST_INT( AudioTrack.MODE_STREAM, "MODE_STREAM", true ); GET_CONST_INT( AudioTrack.MODE_STREAM, "MODE_STREAM", true );
...@@ -231,6 +256,28 @@ InitJNIFields( audio_output_t *p_aout ) ...@@ -231,6 +256,28 @@ InitJNIFields( audio_output_t *p_aout )
GET_CONST_INT( AudioTrack.ERROR_INVALID_OPERATION , GET_CONST_INT( AudioTrack.ERROR_INVALID_OPERATION ,
"ERROR_INVALID_OPERATION", true ); "ERROR_INVALID_OPERATION", true );
/* AudioTimestamp class init (if any) */
if( jfields.AudioTrack.getTimestamp )
{
GET_CLASS( "android/media/AudioTimestamp", true );
jfields.AudioTimestamp.clazz = (jclass) (*env)->NewGlobalRef( env,
clazz );
CHECK_EXCEPTION( "NewGlobalRef", true );
GET_ID( GetMethodID, AudioTimestamp.ctor, "<init>", "()V", true );
GET_ID( GetFieldID, AudioTimestamp.framePosition,
"framePosition", "J", true );
GET_ID( GetFieldID, AudioTimestamp.nanoTime,
"nanoTime", "J", true );
} else
{
jfields.AudioTimestamp.clazz = NULL;
jfields.AudioTimestamp.ctor = NULL;
jfields.AudioTimestamp.framePosition = NULL;
jfields.AudioTimestamp.nanoTime = NULL;
}
/* AudioFormat class init */
GET_CLASS( "android/media/AudioFormat", true ); GET_CLASS( "android/media/AudioFormat", true );
GET_CONST_INT( AudioFormat.ENCODING_PCM_8BIT, "ENCODING_PCM_8BIT", true ); GET_CONST_INT( AudioFormat.ENCODING_PCM_8BIT, "ENCODING_PCM_8BIT", true );
GET_CONST_INT( AudioFormat.ENCODING_PCM_16BIT, "ENCODING_PCM_16BIT", true ); GET_CONST_INT( AudioFormat.ENCODING_PCM_16BIT, "ENCODING_PCM_16BIT", true );
...@@ -244,6 +291,7 @@ InitJNIFields( audio_output_t *p_aout ) ...@@ -244,6 +291,7 @@ InitJNIFields( audio_output_t *p_aout )
GET_CONST_INT( AudioFormat.CHANNEL_OUT_MONO, "CHANNEL_OUT_MONO", true ); GET_CONST_INT( AudioFormat.CHANNEL_OUT_MONO, "CHANNEL_OUT_MONO", true );
GET_CONST_INT( AudioFormat.CHANNEL_OUT_STEREO, "CHANNEL_OUT_STEREO", true ); GET_CONST_INT( AudioFormat.CHANNEL_OUT_STEREO, "CHANNEL_OUT_STEREO", true );
/* AudioManager class init */
GET_CLASS( "android/media/AudioManager", true ); GET_CLASS( "android/media/AudioManager", true );
GET_CONST_INT( AudioManager.ERROR_DEAD_OBJECT, "ERROR_DEAD_OBJECT", false ); GET_CONST_INT( AudioManager.ERROR_DEAD_OBJECT, "ERROR_DEAD_OBJECT", false );
jfields.AudioManager.has_ERROR_DEAD_OBJECT = field != NULL; jfields.AudioManager.has_ERROR_DEAD_OBJECT = field != NULL;
...@@ -283,48 +331,161 @@ check_exception( JNIEnv *env, bool *p_error, audio_output_t *p_aout, ...@@ -283,48 +331,161 @@ check_exception( JNIEnv *env, bool *p_error, audio_output_t *p_aout,
#define JNI_CALL( what, obj, method, ... ) (*env)->what( env, obj, method, ##__VA_ARGS__ ) #define JNI_CALL( what, obj, method, ... ) (*env)->what( env, obj, method, ##__VA_ARGS__ )
#define JNI_CALL_INT( obj, method, ... ) JNI_CALL( CallIntMethod, obj, method, ##__VA_ARGS__ ) #define JNI_CALL_INT( obj, method, ... ) JNI_CALL( CallIntMethod, obj, method, ##__VA_ARGS__ )
#define JNI_CALL_BOOL( obj, method, ... ) JNI_CALL( CallBooleanMethod, obj, method, ##__VA_ARGS__ )
#define JNI_CALL_VOID( obj, method, ... ) JNI_CALL( CallVoidMethod, obj, method, ##__VA_ARGS__ ) #define JNI_CALL_VOID( obj, method, ... ) JNI_CALL( CallVoidMethod, obj, method, ##__VA_ARGS__ )
#define JNI_CALL_STATIC_INT( clazz, method, ... ) JNI_CALL( CallStaticIntMethod, clazz, method, ##__VA_ARGS__ ) #define JNI_CALL_STATIC_INT( clazz, method, ... ) JNI_CALL( CallStaticIntMethod, clazz, method, ##__VA_ARGS__ )
#define JNI_AT_NEW( ... ) JNI_CALL( NewObject, jfields.AudioTrack.clazz, jfields.AudioTrack.ctor, ##__VA_ARGS__ ) #define JNI_AT_NEW( ... ) JNI_CALL( NewObject, jfields.AudioTrack.clazz, jfields.AudioTrack.ctor, ##__VA_ARGS__ )
#define JNI_AT_CALL_INT( method, ... ) JNI_CALL_INT( p_sys->p_audiotrack, jfields.AudioTrack.method, ##__VA_ARGS__ ) #define JNI_AT_CALL_INT( method, ... ) JNI_CALL_INT( p_sys->p_audiotrack, jfields.AudioTrack.method, ##__VA_ARGS__ )
#define JNI_AT_CALL_BOOL( method, ... ) JNI_CALL_BOOL( p_sys->p_audiotrack, jfields.AudioTrack.method, ##__VA_ARGS__ )
#define JNI_AT_CALL_VOID( method, ... ) JNI_CALL_VOID( p_sys->p_audiotrack, jfields.AudioTrack.method, ##__VA_ARGS__ ) #define JNI_AT_CALL_VOID( method, ... ) JNI_CALL_VOID( p_sys->p_audiotrack, jfields.AudioTrack.method, ##__VA_ARGS__ )
#define JNI_AT_CALL_STATIC_INT( method, ... ) JNI_CALL( CallStaticIntMethod, jfields.AudioTrack.clazz, jfields.AudioTrack.method, ##__VA_ARGS__ ) #define JNI_AT_CALL_STATIC_INT( method, ... ) JNI_CALL( CallStaticIntMethod, jfields.AudioTrack.clazz, jfields.AudioTrack.method, ##__VA_ARGS__ )
static int #define JNI_AUDIOTIMESTAMP_GET_LONG( field ) JNI_CALL( GetLongField, p_sys->p_audioTimestamp, jfields.AudioTimestamp.field )
JNIThread_TimeGet( JNIEnv *env, bool *p_error, audio_output_t *p_aout,
mtime_t *p_delay ) static inline mtime_t
frames_to_us( aout_sys_t *p_sys, uint32_t i_nb_frames )
{ {
VLC_UNUSED( p_error ); return i_nb_frames * CLOCK_FREQ / p_sys->fmt.i_rate;
aout_sys_t *p_sys = p_aout->sys; }
uint32_t dsp; #define FRAMES_TO_US(x) frames_to_us( p_sys, (x) )
/* Android doc: static struct thread_cmd *
* getPlaybackHeadPosition: Returns the playback head position expressed in ThreadCmd_New( int id )
* frames. Though the "int" type is signed 32-bits, the value should be {
* reinterpreted as if it is unsigned 32-bits. That is, the next position struct thread_cmd *p_cmd = calloc( 1, sizeof(struct thread_cmd) );
* after 0x7FFFFFFF is (int) 0x80000000. This is a continuously advancing
* counter. It will wrap (overflow) periodically, for example approximately if( p_cmd )
* once every 27:03:11 hours:minutes:seconds at 44.1 kHz. It is reset to p_cmd->id = id;
* zero by flush(), reload(), and stop().
*/ return p_cmd;
}
static void
ThreadCmd_InsertHead( aout_sys_t *p_sys, struct thread_cmd *p_cmd )
{
TAILQ_INSERT_HEAD( &p_sys->thread_cmd_queue, p_cmd, next);
vlc_cond_signal( &p_sys->cond );
}
static void
ThreadCmd_InsertTail( aout_sys_t *p_sys, struct thread_cmd *p_cmd )
{
TAILQ_INSERT_TAIL( &p_sys->thread_cmd_queue, p_cmd, next);
vlc_cond_signal( &p_sys->cond );
}
static bool
ThreadCmd_Wait( aout_sys_t *p_sys, struct thread_cmd *p_cmd )
{
while( p_cmd->id != CMD_DONE && p_sys->b_thread_run )
vlc_cond_wait( &p_sys->cond, &p_sys->mutex );
dsp = (uint32_t )JNI_AT_CALL_INT( getPlaybackHeadPosition ); return p_cmd->id == CMD_DONE;
}
static void
ThreadCmd_FlushQueue( aout_sys_t *p_sys )
{
struct thread_cmd *p_cmd, *p_cmd_next;
if( p_sys->i_samples_written == 0 ) { for ( p_cmd = TAILQ_FIRST( &p_sys->thread_cmd_queue );
p_sys->i_dsp_initial = dsp; p_cmd != NULL; p_cmd = p_cmd_next )
return -1; {
p_cmd_next = TAILQ_NEXT( p_cmd, next );
TAILQ_REMOVE( &p_sys->thread_cmd_queue, p_cmd, next );
if( p_cmd->pf_destroy )
p_cmd->pf_destroy( p_cmd );
} }
}
dsp -= p_sys->i_dsp_initial; static void
if( dsp == 0 ) JNIThread_InitDelay( JNIEnv *env, audio_output_t *p_aout, uint32_t *p_delay )
return -1; {
aout_sys_t *p_sys = p_aout->sys;
p_sys->i_pos_initial = JNI_AT_CALL_INT( getPlaybackHeadPosition );
/* HACK: On some broken devices, head position is still moving after a
* flush or a stop. So, wait for the head position to be stabilized. */
if( unlikely( p_sys->i_pos_initial != 0 ) )
{
uint32_t i_last_pos;
do {
i_last_pos = p_sys->i_pos_initial;
msleep( 50000 );
p_sys->i_pos_initial = JNI_AT_CALL_INT( getPlaybackHeadPosition );
} while( p_sys->i_pos_initial != i_last_pos );
}
p_sys->i_samples_written = 0;
*p_delay = 0;
}
static void
JNIThread_SetDelay( JNIEnv *env, audio_output_t *p_aout, uint32_t *p_delay )
{
aout_sys_t *p_sys = p_aout->sys;
bool b_frame_delay_set = false;
jlong i_frame_pos;
mtime_t i_current_time = mdate();
if( p_delay ) if( p_sys->p_audioTimestamp )
*p_delay = ((mtime_t)p_sys->i_samples_written - dsp) * {
CLOCK_FREQ / p_sys->fmt.i_rate; /* Android doc:
* getTimestamp: Poll for a timestamp on demand.
*
* If you need to track timestamps during initial warmup or after a
* routing or mode change, you should request a new timestamp once per
* second until the reported timestamps show that the audio clock is
* stable. Thereafter, query for a new timestamp approximately once
* every 10 seconds to once per minute. Calling this method more often
* is inefficient. It is also counter-productive to call this method
* more often than recommended, because the short-term differences
* between successive timestamp reports are not meaningful. If you need
* a high-resolution mapping between frame position and presentation
* time, consider implementing that at application level, based on
* low-resolution timestamps.
*/
if( JNI_AT_CALL_BOOL( getTimestamp, p_sys->p_audioTimestamp ) )
{
jlong i_frame_time = JNI_AUDIOTIMESTAMP_GET_LONG( nanoTime ) / 1000;
/* frame time should be after last play time
* frame time shouldn't be in the future
* frame time should be less than 10 seconds old */
if( i_frame_time > p_sys->i_play_time
&& i_current_time > i_frame_time
&& ( i_current_time - i_frame_time ) <= INT64_C(10000000) )
{
jlong i_time_diff = i_current_time - i_frame_time;
jlong i_frames_diff = i_time_diff * p_sys->fmt.i_rate
/ CLOCK_FREQ;
i_frame_pos = JNI_AUDIOTIMESTAMP_GET_LONG( framePosition )
+ i_frames_diff;
b_frame_delay_set = true;
}
}
}
if( !b_frame_delay_set )
{
/* Android doc:
* getPlaybackHeadPosition: Returns the playback head position
* expressed in frames. Though the "int" type is signed 32-bits, the
* value should be reinterpreted as if it is unsigned 32-bits. That is,
* the next position after 0x7FFFFFFF is (int) 0x80000000. This is a
* continuously advancing counter. It will wrap (overflow)
* periodically, for example approximately once every 27:03:11
* hours:minutes:seconds at 44.1 kHz. It is reset to zero by flush(),
* reload(), and stop().
*/
uint32_t i_head_pos = JNI_AT_CALL_INT( getPlaybackHeadPosition );
i_frame_pos = i_head_pos - p_sys->i_pos_initial;
b_frame_delay_set = true;
}
return 0; if( b_frame_delay_set && p_sys->i_samples_written > i_frame_pos )
*p_delay = p_sys->i_samples_written - i_frame_pos;
} }
static int static int
...@@ -393,26 +554,46 @@ JNIThread_Start( JNIEnv *env, bool *p_error, audio_output_t *p_aout ) ...@@ -393,26 +554,46 @@ JNIThread_Start( JNIEnv *env, bool *p_error, audio_output_t *p_aout )
i_size = i_min_buffer_size * 2; // double buffering i_size = i_min_buffer_size * 2; // double buffering
/* create AudioTrack object */
p_audiotrack = JNI_AT_NEW( jfields.AudioManager.STREAM_MUSIC, i_rate, p_audiotrack = JNI_AT_NEW( jfields.AudioManager.STREAM_MUSIC, i_rate,
i_channel_config, i_format, i_size, i_channel_config, i_format, i_size,
jfields.AudioTrack.MODE_STREAM ); jfields.AudioTrack.MODE_STREAM );
if( CHECK_EXCEPTION( "<init>" ) || !p_audiotrack ) if( CHECK_EXCEPTION( "AudioTrack<init>" ) || !p_audiotrack )
return VLC_EGENERIC; return VLC_EGENERIC;
p_sys->p_audiotrack = (*env)->NewGlobalRef( env, p_audiotrack ); p_sys->p_audiotrack = (*env)->NewGlobalRef( env, p_audiotrack );
(*env)->DeleteLocalRef( env, p_audiotrack ); (*env)->DeleteLocalRef( env, p_audiotrack );
if( !p_sys->p_audiotrack ) if( !p_sys->p_audiotrack )
return VLC_EGENERIC; return VLC_EGENERIC;
p_sys->fmt.i_rate = i_rate; if( jfields.AudioTimestamp.clazz )
p_sys->i_samples_written = 0; {
p_sys->i_bytes_per_frame = i_nb_channels * i_format_size; /* create AudioTimestamp object */
jobject p_audioTimestamp = JNI_CALL( NewObject,
jfields.AudioTimestamp.clazz,
jfields.AudioTimestamp.ctor );
if( CHECK_EXCEPTION( "AudioTimestamp<init>" ) || !p_audioTimestamp )
goto error;
p_sys->p_audioTimestamp = (*env)->NewGlobalRef( env, p_audioTimestamp );
(*env)->DeleteLocalRef( env, p_audioTimestamp );
if( !p_sys->p_audioTimestamp )
goto error;
}
/* Gets the initial value of DAC samples counter */ p_sys->fmt.i_rate = i_rate;
JNIThread_TimeGet( env, p_error, p_aout, NULL );
JNI_AT_CALL_VOID( play ); JNI_AT_CALL_VOID( play );
CHECK_EXCEPTION( "play" );
p_sys->i_play_time = mdate();
return VLC_SUCCESS; return VLC_SUCCESS;
error:
if( p_sys->p_audiotrack )
{
JNI_AT_CALL_VOID( release );
(*env)->DeleteGlobalRef( env, p_sys->p_audiotrack );
p_sys->p_audiotrack = NULL;
}
return VLC_EGENERIC;
} }
static void static void
...@@ -497,9 +678,9 @@ JNIThread_Play( JNIEnv *env, bool *p_error, audio_output_t *p_aout, ...@@ -497,9 +678,9 @@ JNIThread_Play( JNIEnv *env, bool *p_error, audio_output_t *p_aout,
break; break;
} }
p_sys->i_samples_written += i_ret / p_sys->i_bytes_per_frame;
i_offset += i_ret; i_offset += i_ret;
} }
p_sys->i_samples_written += p_buffer->i_nb_samples;
} }
static void static void
...@@ -518,6 +699,7 @@ JNIThread_Pause( JNIEnv *env, bool *p_error, audio_output_t *p_aout, ...@@ -518,6 +699,7 @@ JNIThread_Pause( JNIEnv *env, bool *p_error, audio_output_t *p_aout,
{ {
JNI_AT_CALL_VOID( play ); JNI_AT_CALL_VOID( play );
CHECK_EXCEPTION( "play" ); CHECK_EXCEPTION( "play" );
p_sys->i_play_time = mdate();
} }
} }
...@@ -538,8 +720,6 @@ JNIThread_Flush( JNIEnv *env, bool *p_error, audio_output_t *p_aout, ...@@ -538,8 +720,6 @@ JNIThread_Flush( JNIEnv *env, bool *p_error, audio_output_t *p_aout,
* that has not been played back will be discarded. No-op if not stopped * that has not been played back will be discarded. No-op if not stopped
* or paused, or if the track's creation mode is not MODE_STREAM. * or paused, or if the track's creation mode is not MODE_STREAM.
*/ */
if( !p_sys->i_samples_written )
return;
if( b_wait ) if( b_wait )
{ {
JNI_AT_CALL_VOID( stop ); JNI_AT_CALL_VOID( stop );
...@@ -547,15 +727,14 @@ JNIThread_Flush( JNIEnv *env, bool *p_error, audio_output_t *p_aout, ...@@ -547,15 +727,14 @@ JNIThread_Flush( JNIEnv *env, bool *p_error, audio_output_t *p_aout,
return; return;
} else } else
{ {
JNI_AT_CALL_VOID( pause ); JNI_AT_CALL_VOID( pause );
if( CHECK_EXCEPTION( "pause" ) ) if( CHECK_EXCEPTION( "pause" ) )
return; return;
JNI_AT_CALL_VOID( flush ); JNI_AT_CALL_VOID( flush );
} }
p_sys->i_samples_written = 0;
JNI_AT_CALL_VOID( play ); JNI_AT_CALL_VOID( play );
CHECK_EXCEPTION( "play" ); CHECK_EXCEPTION( "play" );
p_sys->i_play_time = mdate();
} }
static void * static void *
...@@ -564,6 +743,8 @@ JNIThread( void *data ) ...@@ -564,6 +743,8 @@ JNIThread( void *data )
audio_output_t *p_aout = data; audio_output_t *p_aout = data;
aout_sys_t *p_sys = p_aout->sys; aout_sys_t *p_sys = p_aout->sys;
bool b_error = false; bool b_error = false;
bool b_paused = false;
uint32_t i_audiotrack_delay = 0;
JNIEnv* env; JNIEnv* env;
jni_attach_thread( &env, THREAD_NAME ); jni_attach_thread( &env, THREAD_NAME );
...@@ -574,49 +755,77 @@ JNIThread( void *data ) ...@@ -574,49 +755,77 @@ JNIThread( void *data )
while( p_sys->b_thread_run ) while( p_sys->b_thread_run )
{ {
struct thread_cmd *p_cmd;
/* wait to process a command */ /* wait to process a command */
while( p_sys->b_thread_run && p_sys->p_cmd == NULL ) while( ( p_cmd = TAILQ_FIRST( &p_sys->thread_cmd_queue ) ) == NULL
&& p_sys->b_thread_run )
vlc_cond_wait( &p_sys->cond, &p_sys->mutex ); vlc_cond_wait( &p_sys->cond, &p_sys->mutex );
if( !p_sys->b_thread_run || p_sys->p_cmd == NULL )
if( !p_sys->b_thread_run || p_cmd == NULL )
break; break;
if( b_paused && p_cmd->id == CMD_PLAY )
{
vlc_cond_wait( &p_sys->cond, &p_sys->mutex );
continue;
}
TAILQ_REMOVE( &p_sys->thread_cmd_queue, p_cmd, next );
if( p_cmd->id == CMD_PLAY )
{
p_sys->i_samples_queued -= p_cmd->in.play.p_buffer->i_nb_samples;
vlc_cond_signal( &p_sys->cond );
}
vlc_mutex_unlock( &p_sys->mutex );
/* process a command */ /* process a command */
switch( p_sys->p_cmd->id ) switch( p_cmd->id )
{ {
case CMD_START: case CMD_START:
p_sys->fmt = *p_sys->p_cmd->in.start.p_fmt; p_sys->fmt = *p_cmd->in.start.p_fmt;
p_sys->p_cmd->out.start.i_ret = p_cmd->out.start.i_ret =
JNIThread_Start( env, &b_error, p_aout ); JNIThread_Start( env, &b_error, p_aout );
p_sys->p_cmd->out.start.p_fmt = &p_sys->fmt; JNIThread_InitDelay( env, p_aout, &i_audiotrack_delay );
p_cmd->out.start.p_fmt = &p_sys->fmt;
b_paused = false;
break; break;
case CMD_STOP: case CMD_STOP:
JNIThread_Stop( env, &b_error, p_aout ); JNIThread_Stop( env, &b_error, p_aout );
b_paused = false;
break; break;
case CMD_PLAY: case CMD_PLAY:
JNIThread_Play( env, &b_error, p_aout, JNIThread_Play( env, &b_error, p_aout,
p_sys->p_cmd->in.play.p_buffer ); p_cmd->in.play.p_buffer );
JNIThread_SetDelay( env, p_aout, &i_audiotrack_delay );
break; break;
case CMD_PAUSE: case CMD_PAUSE:
JNIThread_Pause( env, &b_error, p_aout, JNIThread_Pause( env, &b_error, p_aout,
p_sys->p_cmd->in.pause.b_pause, p_cmd->in.pause.b_pause,
p_sys->p_cmd->in.pause.i_date ); p_cmd->in.pause.i_date );
b_paused = p_cmd->in.pause.b_pause;
break; break;
case CMD_FLUSH: case CMD_FLUSH:
JNIThread_Flush( env, &b_error, p_aout, JNIThread_Flush( env, &b_error, p_aout,
p_sys->p_cmd->in.flush.b_wait ); p_cmd->in.flush.b_wait );
break; JNIThread_InitDelay( env, p_aout, &i_audiotrack_delay );
case CMD_TIME_GET:
p_sys->p_cmd->out.time_get.i_ret =
JNIThread_TimeGet( env, &b_error, p_aout,
&p_sys->p_cmd->out.time_get.i_delay );
break; break;
default: default:
vlc_assert_unreachable(); vlc_assert_unreachable();
} }
vlc_mutex_lock( &p_sys->mutex );
p_sys->i_audiotrack_delay = i_audiotrack_delay;
p_cmd->id = CMD_DONE;
if( p_cmd->pf_destroy )
p_cmd->pf_destroy( p_cmd );
if( b_error ) if( b_error )
p_sys->b_thread_run = false; p_sys->b_thread_run = false;
p_sys->p_cmd->id = CMD_DONE;
p_sys->p_cmd = NULL;
/* signal that command is processed */ /* signal that command is processed */
vlc_cond_signal( &p_sys->cond ); vlc_cond_signal( &p_sys->cond );
} }
...@@ -636,13 +845,13 @@ end: ...@@ -636,13 +845,13 @@ end:
static int static int
Start( audio_output_t *p_aout, audio_sample_format_t *restrict p_fmt ) Start( audio_output_t *p_aout, audio_sample_format_t *restrict p_fmt )
{ {
int i_ret; int i_ret = VLC_EGENERIC;
struct thread_cmd cmd; struct thread_cmd *p_cmd;
aout_sys_t *p_sys = p_aout->sys; aout_sys_t *p_sys = p_aout->sys;
vlc_mutex_lock( &p_sys->mutex ); vlc_mutex_lock( &p_sys->mutex );
assert( !p_sys->b_thread_run && p_sys->p_cmd == NULL ); assert( !p_sys->b_thread_run );
/* create JNIThread */ /* create JNIThread */
p_sys->b_thread_run = true; p_sys->b_thread_run = true;
...@@ -654,25 +863,25 @@ Start( audio_output_t *p_aout, audio_sample_format_t *restrict p_fmt ) ...@@ -654,25 +863,25 @@ Start( audio_output_t *p_aout, audio_sample_format_t *restrict p_fmt )
return VLC_EGENERIC; return VLC_EGENERIC;
} }
/* ask the thread to process the Start command */ p_cmd = ThreadCmd_New( CMD_START );
cmd.id = CMD_START; if( p_cmd )
cmd.in.start.p_fmt = p_fmt; {
p_sys->p_cmd = &cmd; /* ask the thread to process the Start command */
vlc_cond_signal( &p_sys->cond ); p_cmd->in.start.p_fmt = p_fmt;
/* wait for the thread */
while( cmd.id != CMD_DONE && p_sys->b_thread_run )
vlc_cond_wait( &p_sys->cond, &p_sys->mutex );
ThreadCmd_InsertHead( p_sys, p_cmd );
if( ThreadCmd_Wait( p_sys, p_cmd ) )
{
i_ret = p_cmd->out.start.i_ret;
if( i_ret == VLC_SUCCESS )
*p_fmt = *p_cmd->out.start.p_fmt;
}
free( p_cmd );
}
vlc_mutex_unlock( &p_sys->mutex ); vlc_mutex_unlock( &p_sys->mutex );
/* retrieve results */
i_ret = cmd.out.start.i_ret;
if( i_ret == VLC_SUCCESS ) if( i_ret == VLC_SUCCESS )
{
*p_fmt = *cmd.out.start.p_fmt;
aout_SoftVolumeStart( p_aout ); aout_SoftVolumeStart( p_aout );
}
return i_ret; return i_ret;
} }
...@@ -684,21 +893,22 @@ Stop( audio_output_t *p_aout ) ...@@ -684,21 +893,22 @@ Stop( audio_output_t *p_aout )
vlc_mutex_lock( &p_sys->mutex ); vlc_mutex_lock( &p_sys->mutex );
assert( p_sys->p_cmd == NULL );
if( p_sys->b_thread_run ) if( p_sys->b_thread_run )
{ {
struct thread_cmd cmd; struct thread_cmd *p_cmd;
/* ask the thread to process the Stop command */ p_sys->i_samples_queued = 0;
cmd.id = CMD_STOP; ThreadCmd_FlushQueue( p_sys );
p_sys->p_cmd = &cmd;
vlc_cond_signal( &p_sys->cond );
/* wait for the thread */ p_cmd = ThreadCmd_New( CMD_STOP );
while( cmd.id != CMD_DONE && p_sys->b_thread_run ) if( p_cmd )
vlc_cond_wait( &p_sys->cond, &p_sys->mutex ); {
/* ask the thread to process the Stop command */
ThreadCmd_InsertHead( p_sys, p_cmd );
ThreadCmd_Wait( p_sys, p_cmd );
free( p_cmd );
}
/* kill the thread */ /* kill the thread */
p_sys->b_thread_run = false; p_sys->b_thread_run = false;
vlc_cond_signal( &p_sys->cond ); vlc_cond_signal( &p_sys->cond );
...@@ -708,32 +918,44 @@ Stop( audio_output_t *p_aout ) ...@@ -708,32 +918,44 @@ Stop( audio_output_t *p_aout )
vlc_join( p_sys->thread, NULL ); vlc_join( p_sys->thread, NULL );
} }
static void
PlayCmd_Destroy( struct thread_cmd *p_cmd )
{
block_Release( p_cmd->in.play.p_buffer );
free( p_cmd );
}
static void static void
Play( audio_output_t *p_aout, block_t *p_buffer ) Play( audio_output_t *p_aout, block_t *p_buffer )
{ {
aout_sys_t *p_sys = p_aout->sys; aout_sys_t *p_sys = p_aout->sys;
vlc_mutex_lock( &p_sys->mutex );
assert( p_sys->p_cmd == NULL ); vlc_mutex_lock( &p_sys->mutex );
if( p_sys->b_thread_run ) if( p_sys->b_thread_run )
{ {
struct thread_cmd cmd; struct thread_cmd *p_cmd;
/* ask the thread to process the Play command */ while( p_sys->i_samples_queued != 0 && p_sys->b_thread_run
cmd.id = CMD_PLAY; && FRAMES_TO_US( p_sys->i_samples_queued +
cmd.in.play.p_buffer = p_buffer; p_buffer->i_nb_samples ) >= MAX_QUEUE_US )
p_sys->p_cmd = &cmd;
vlc_cond_signal( &p_sys->cond );
/* wait for the thread */
while( cmd.id != CMD_DONE && p_sys->b_thread_run )
vlc_cond_wait( &p_sys->cond, &p_sys->mutex ); vlc_cond_wait( &p_sys->cond, &p_sys->mutex );
}
vlc_mutex_unlock( &p_sys->mutex ); p_cmd = ThreadCmd_New( CMD_PLAY );
if( p_cmd )
{
/* ask the thread to process the Play command */
p_cmd->in.play.p_buffer = p_buffer;
p_cmd->pf_destroy = PlayCmd_Destroy;
block_Release( p_buffer ); ThreadCmd_InsertTail( p_sys, p_cmd );
p_sys->i_samples_queued += p_buffer->i_nb_samples;
} else
block_Release( p_cmd->in.play.p_buffer );
}
vlc_mutex_unlock( &p_sys->mutex );
} }
static void static void
...@@ -743,82 +965,68 @@ Pause( audio_output_t *p_aout, bool b_pause, mtime_t i_date ) ...@@ -743,82 +965,68 @@ Pause( audio_output_t *p_aout, bool b_pause, mtime_t i_date )
vlc_mutex_lock( &p_sys->mutex ); vlc_mutex_lock( &p_sys->mutex );
assert( p_sys->p_cmd == NULL );
if( p_sys->b_thread_run ) if( p_sys->b_thread_run )
{ {
struct thread_cmd cmd; struct thread_cmd *p_cmd = ThreadCmd_New( CMD_PAUSE );
/* ask the thread to process the Pause command */ if( p_cmd )
cmd.id = CMD_PAUSE; {
cmd.in.pause.b_pause = b_pause; /* ask the thread to process the Pause command */
cmd.in.pause.i_date = i_date; p_cmd->in.pause.b_pause = b_pause;
p_sys->p_cmd = &cmd; p_cmd->in.pause.i_date = i_date;
vlc_cond_signal( &p_sys->cond );
/* wait for the thread */ ThreadCmd_InsertHead( p_sys, p_cmd );
while( cmd.id != CMD_DONE && p_sys->b_thread_run ) ThreadCmd_Wait( p_sys, p_cmd );
vlc_cond_wait( &p_sys->cond, &p_sys->mutex );
}
free( p_cmd );
}
}
vlc_mutex_unlock( &p_sys->mutex ); vlc_mutex_unlock( &p_sys->mutex );
} }
static void static void
Flush ( audio_output_t *p_aout, bool b_wait ) Flush( audio_output_t *p_aout, bool b_wait )
{ {
aout_sys_t *p_sys = p_aout->sys; aout_sys_t *p_sys = p_aout->sys;
vlc_mutex_lock( &p_sys->mutex ); vlc_mutex_lock( &p_sys->mutex );
assert( p_sys->p_cmd == NULL );
if( p_sys->b_thread_run ) if( p_sys->b_thread_run )
{ {
struct thread_cmd cmd; struct thread_cmd *p_cmd;
/* ask the thread to process the Flush command */ p_sys->i_samples_queued = 0;
cmd.id = CMD_FLUSH; ThreadCmd_FlushQueue( p_sys );
cmd.in.flush.b_wait = b_wait;
p_sys->p_cmd = &cmd;
vlc_cond_signal( &p_sys->cond );
/* wait for the thread */ p_cmd = ThreadCmd_New( CMD_FLUSH );
while( cmd.id != CMD_DONE && p_sys->b_thread_run ) if( p_cmd)
vlc_cond_wait( &p_sys->cond, &p_sys->mutex ); {
} /* ask the thread to process the Flush command */
p_cmd->in.flush.b_wait = b_wait;
ThreadCmd_InsertHead( p_sys, p_cmd );
ThreadCmd_Wait( p_sys, p_cmd );
free( p_cmd );
}
}
vlc_mutex_unlock( &p_sys->mutex ); vlc_mutex_unlock( &p_sys->mutex );
} }
static int static int
TimeGet( audio_output_t *p_aout, mtime_t *restrict p_delay ) TimeGet( audio_output_t *p_aout, mtime_t *restrict p_delay )
{ {
int i_ret = -1;
aout_sys_t *p_sys = p_aout->sys; aout_sys_t *p_sys = p_aout->sys;
int i_ret;
vlc_mutex_lock( &p_sys->mutex ); vlc_mutex_lock( &p_sys->mutex );
if( p_sys->i_samples_queued != 0 )
assert( p_sys->p_cmd == NULL );
if( p_sys->b_thread_run )
{ {
struct thread_cmd cmd; *p_delay = FRAMES_TO_US( p_sys->i_samples_queued +
p_sys->i_audiotrack_delay );
/* ask the thread to process the TimeGet */ i_ret = 0;
cmd.id = CMD_TIME_GET; } else
p_sys->p_cmd = &cmd; i_ret = -1;
vlc_cond_signal( &p_sys->cond );
/* wait for the thread */
while( cmd.id != CMD_DONE && p_sys->b_thread_run )
vlc_cond_wait( &p_sys->cond, &p_sys->mutex );
/* retrieve results */
i_ret = cmd.out.time_get.i_ret;
*p_delay = cmd.out.time_get.i_delay;
}
vlc_mutex_unlock( &p_sys->mutex ); vlc_mutex_unlock( &p_sys->mutex );
return i_ret; return i_ret;
...@@ -841,6 +1049,7 @@ Open( vlc_object_t *obj ) ...@@ -841,6 +1049,7 @@ Open( vlc_object_t *obj )
vlc_mutex_init( &p_sys->mutex ); vlc_mutex_init( &p_sys->mutex );
vlc_cond_init( &p_sys->cond ); vlc_cond_init( &p_sys->cond );
TAILQ_INIT( &p_sys->thread_cmd_queue );
p_aout->sys = p_sys; p_aout->sys = p_sys;
p_aout->start = Start; p_aout->start = Start;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment