Commit a58d7c76 authored by Thomas Guillem's avatar Thomas Guillem

audiotrack: add AUDIOTRACK_HW_LATENCY define

If defined, getTimestamp (since Android 4.4) or getPlaybackHeadPosition +
getLatency (since Android 4.3) will be used to get the AudioTrack position.
parent ce58d9dd
...@@ -106,8 +106,7 @@ struct aout_sys_t { ...@@ -106,8 +106,7 @@ struct aout_sys_t {
// Don't use Float for now since 5.1/7.1 Float is down sampled to Stereo Float // Don't use Float for now since 5.1/7.1 Float is down sampled to Stereo Float
//#define AUDIOTRACK_USE_FLOAT //#define AUDIOTRACK_USE_FLOAT
// TODO: activate getTimestamp for new android versions //#define AUDIOTRACK_HW_LATENCY
//#define AUDIOTRACK_USE_TIMESTAMP
#define AUDIO_CHAN_TEXT N_("Audio output channels") #define AUDIO_CHAN_TEXT N_("Audio output channels")
#define AUDIO_CHAN_LONGTEXT N_("Channels available for audio output. " \ #define AUDIO_CHAN_LONGTEXT N_("Channels available for audio output. " \
...@@ -271,8 +270,10 @@ InitJNIFields( audio_output_t *p_aout ) ...@@ -271,8 +270,10 @@ InitJNIFields( audio_output_t *p_aout )
} else } else
GET_ID( GetMethodID, AudioTrack.write, "write", "([BII)I", true ); GET_ID( GetMethodID, AudioTrack.write, "write", "([BII)I", true );
#ifdef AUDIOTRACK_HW_LATENCY
GET_ID( GetMethodID, AudioTrack.getTimestamp, GET_ID( GetMethodID, AudioTrack.getTimestamp,
"getTimestamp", "(Landroid/media/AudioTimestamp;)Z", false ); "getTimestamp", "(Landroid/media/AudioTimestamp;)Z", false );
#endif
GET_ID( GetMethodID, AudioTrack.getPlaybackHeadPosition, GET_ID( GetMethodID, AudioTrack.getPlaybackHeadPosition,
"getPlaybackHeadPosition", "()I", true ); "getPlaybackHeadPosition", "()I", true );
...@@ -302,6 +303,7 @@ InitJNIFields( audio_output_t *p_aout ) ...@@ -302,6 +303,7 @@ InitJNIFields( audio_output_t *p_aout )
"nanoTime", "J", true ); "nanoTime", "J", true );
} }
#ifdef AUDIOTRACK_HW_LATENCY
/* AudioSystem class init */ /* AudioSystem class init */
GET_CLASS( "android/media/AudioSystem", false ); GET_CLASS( "android/media/AudioSystem", false );
if( clazz ) if( clazz )
...@@ -310,6 +312,7 @@ InitJNIFields( audio_output_t *p_aout ) ...@@ -310,6 +312,7 @@ InitJNIFields( audio_output_t *p_aout )
GET_ID( GetStaticMethodID, AudioSystem.getOutputLatency, GET_ID( GetStaticMethodID, AudioSystem.getOutputLatency,
"getOutputLatency", "(I)I", false ); "getOutputLatency", "(I)I", false );
} }
#endif
/* AudioFormat class init */ /* AudioFormat class init */
GET_CLASS( "android/media/AudioFormat", true ); GET_CLASS( "android/media/AudioFormat", true );
...@@ -973,7 +976,6 @@ Start( audio_output_t *p_aout, audio_sample_format_t *restrict p_fmt ) ...@@ -973,7 +976,6 @@ Start( audio_output_t *p_aout, audio_sample_format_t *restrict p_fmt )
} }
p_sys->i_max_audiotrack_samples = BYTES_TO_FRAMES( i_audiotrack_size ); p_sys->i_max_audiotrack_samples = BYTES_TO_FRAMES( i_audiotrack_size );
#ifdef AUDIOTRACK_USE_TIMESTAMP
if( jfields.AudioTimestamp.clazz ) if( jfields.AudioTimestamp.clazz )
{ {
/* create AudioTimestamp object */ /* create AudioTimestamp object */
...@@ -990,7 +992,6 @@ Start( audio_output_t *p_aout, audio_sample_format_t *restrict p_fmt ) ...@@ -990,7 +992,6 @@ Start( audio_output_t *p_aout, audio_sample_format_t *restrict p_fmt )
return VLC_EGENERIC; return VLC_EGENERIC;
} }
} }
#endif
if( p_sys->fmt.i_format == VLC_CODEC_FL32 ) if( p_sys->fmt.i_format == VLC_CODEC_FL32 )
{ {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment