Commit c79648fe authored by Christophe Massiot's avatar Christophe Massiot

* Optimisation du video_parser ; define VPAR_OPTIM_LEVEL dans config.h

  pour r�gler le temps de compilation ;
* virage d'un printf dans l'idct et d'une fonction inutile.
parent 060c6b7a
......@@ -288,6 +288,10 @@
/* Time to sleep when waiting for a buffer (from vout or the video fifo). */
#define VPAR_OUTMEM_SLEEP 50000
/* Optimization level, from 0 to 2 - 1 is generally a good compromise. Remember
* that raising this level dramatically lengthens the compilation time. */
#define VPAR_OPTIM_LEVEL 1
/* The following directives only apply if you define VDEC_SMP below. */
/* Number of macroblock buffers available. It should be always greater than
......
......@@ -148,7 +148,6 @@ typedef void (*f_idct_t)( struct vdec_thread_s *, dctelem_t*, int );
/*****************************************************************************
* Prototypes
*****************************************************************************/
void vdec_DummyIDCT( struct vdec_thread_s *, dctelem_t*, int );
void vdec_InitIDCT (struct vdec_thread_s * p_vdec);
void vdec_SparseIDCT( struct vdec_thread_s *, dctelem_t*, int );
void vdec_IDCT( struct vdec_thread_s *, dctelem_t*, int );
......@@ -150,6 +150,21 @@ void vpar_DestroyThread ( vpar_thread_t *p_vpar /*, int *pi_stat
/* Dynamic thread settings */
/* ?? */
/*****************************************************************************
* NextStartCode : Find the next start code
*****************************************************************************/
static __inline__ void NextStartCode( vpar_thread_t * p_vpar )
{
/* Re-align the buffer on an 8-bit boundary */
RealignBits( &p_vpar->bit_stream );
while( ShowBits( &p_vpar->bit_stream, 24 ) != 0x01L && !p_vpar->b_die )
{
RemoveBits( &p_vpar->bit_stream, 8 );
}
}
/*****************************************************************************
* LoadQuantizerScale
*****************************************************************************
......
......@@ -141,53 +141,17 @@ void vpar_InitPMBType( struct vpar_thread_s * p_vpar );
void vpar_InitBMBType( struct vpar_thread_s * p_vpar );
void vpar_InitCodedPattern( struct vpar_thread_s * p_vpar );
void vpar_InitDCTTables( struct vpar_thread_s * p_vpar );
void vpar_ParseMacroblockGENERIC( struct vpar_thread_s* p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field );
void vpar_ParseMacroblock2I420F0( struct vpar_thread_s* p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field );
void vpar_ParseMacroblock2P420F0( struct vpar_thread_s* p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field );
void vpar_ParseMacroblock2B420F0( struct vpar_thread_s* p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field );
void vpar_ParseMacroblock2I420T0( struct vpar_thread_s* p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field );
void vpar_ParseMacroblock2P420T0( struct vpar_thread_s* p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field );
void vpar_ParseMacroblock2B420T0( struct vpar_thread_s* p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field );
void vpar_ParseMacroblock2I420B1( struct vpar_thread_s* p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field );
void vpar_ParseMacroblock2P420B1( struct vpar_thread_s* p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field );
void vpar_ParseMacroblock2B420B1( struct vpar_thread_s* p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field );
void vpar_PictureDataGENERIC( struct vpar_thread_s * p_vpar, int i_mb_base );
#if (VPAR_OPTIM_LEVEL > 0)
void vpar_PictureData2I420F0( struct vpar_thread_s * p_vpar, int i_mb_base );
void vpar_PictureData2P420F0( struct vpar_thread_s * p_vpar, int i_mb_base );
void vpar_PictureData2B420F0( struct vpar_thread_s * p_vpar, int i_mb_base );
#endif
#if (VPAR_OPTIM_LEVEL > 1)
void vpar_PictureData2I420TZ( struct vpar_thread_s * p_vpar, int i_mb_base );
void vpar_PictureData2P420TZ( struct vpar_thread_s * p_vpar, int i_mb_base );
void vpar_PictureData2B420TZ( struct vpar_thread_s * p_vpar, int i_mb_base );
void vpar_PictureData2I420BZ( struct vpar_thread_s * p_vpar, int i_mb_base );
void vpar_PictureData2P420BZ( struct vpar_thread_s * p_vpar, int i_mb_base );
void vpar_PictureData2B420BZ( struct vpar_thread_s * p_vpar, int i_mb_base );
#endif
\ No newline at end of file
......@@ -15,13 +15,6 @@
* "video_fifo.h"
*****************************************************************************/
/*****************************************************************************
* Function pointers
*****************************************************************************/
struct vpar_thread_s;
typedef void (*f_slice_header_t)( struct vpar_thread_s*, int*, int, u32);
/*****************************************************************************
* quant_matrix_t : Quantization Matrix
*****************************************************************************/
......@@ -44,7 +37,6 @@ typedef struct sequence_s
boolean_t b_mpeg2;
boolean_t b_progressive;
unsigned int i_scalable_mode;
f_slice_header_t pf_slice_header;
quant_matrix_t intra_quant, nonintra_quant;
quant_matrix_t chroma_intra_quant, chroma_nonintra_quant;
......@@ -154,7 +146,6 @@ typedef struct slice_parsing_s
#define BOTTOM_FIELD 2
#define FRAME_STRUCTURE 3
/*****************************************************************************
* Prototypes
*****************************************************************************/
......
......@@ -46,20 +46,12 @@
* an MMX DCT in the future. */
/*****************************************************************************
* vdec_DummyIDCT : dummy function that does nothing
*****************************************************************************/
void vdec_DummyIDCT( vdec_thread_t * p_vdec, dctelem_t * p_block,
int i_idontcare )
{
}
/*****************************************************************************
* init_SparseIDCT : initialize datas for vdec_SparceIDCT
* vdec_InitIDCT : initialize datas for vdec_SparceIDCT
* vdec_SparseIDCT : IDCT function for sparse matrices
*****************************************************************************/
void vdec_InitIDCT (vdec_thread_t * p_vdec)
{
{
int i;
dctelem_t * p_pre = p_vdec->p_pre_idct;
......@@ -86,7 +78,7 @@ void vdec_SparseIDCT (vdec_thread_t * p_vdec, dctelem_t * p_block,
/* If DC Coefficient. */
if ( i_sparse_pos == 0 )
{
dp=(int *)p_block;
dp=(int *)p_block;
val=RIGHT_SHIFT((*p_block + 4), 3);
/* Compute int to assign. This speeds things up a bit */
v = ((val & 0xffff) | (val << 16));
......@@ -372,7 +364,6 @@ void vdec_IDCT( vdec_thread_t * p_vdec, dctelem_t * p_block, int i_idontcare )
dataptr = p_block;
fprintf( stderr, "normal dct" );
for (rowctr = DCTSIZE-1; rowctr >= 0; rowctr--)
{
/* Due to quantization, we will usually find that many of the input
......
......@@ -37,15 +37,6 @@
#include "video_parser.h"
#include "video_fifo.h"
/*
* Local prototypes
*/
typedef void (*f_decode_block_t)( vpar_thread_t *, macroblock_t *, int );
static void vpar_DecodeMPEG1Non( vpar_thread_t * p_vpar, macroblock_t * p_mb, int i_b );
static void vpar_DecodeMPEG1Intra( vpar_thread_t * p_vpar, macroblock_t * p_mb, int i_b );
static void vpar_DecodeMPEG2Non( vpar_thread_t * p_vpar, macroblock_t * p_mb, int i_b );
static void vpar_DecodeMPEG2Intra( vpar_thread_t * p_vpar, macroblock_t * p_mb, int i_b );
/*
* Welcome to vpar_blocks.c ! Here's where the heavy processor-critical parsing
* task is done. This file is divided in several parts :
......@@ -53,7 +44,8 @@ static void vpar_DecodeMPEG2Intra( vpar_thread_t * p_vpar, macroblock_t * p_mb,
* - Decoding of coded blocks
* - Decoding of motion vectors
* - Decoding of the other macroblock structures
* It's a pretty long file. Good luck !
* - Picture data parsing management (slices and error handling)
* It's a pretty long file. Good luck and have a nice day.
*/
......@@ -580,9 +572,11 @@ void vpar_InitDCTTables( vpar_thread_t * p_vpar )
*/
/*****************************************************************************
* vpar_DecodeMPEG1Non : decode MPEG-1 non-intra blocks
* DecodeMPEG1NonIntra : decode MPEG-1 non-intra blocks
*****************************************************************************/
static void vpar_DecodeMPEG1Non( vpar_thread_t * p_vpar, macroblock_t * p_mb, int i_b )
static __inline__ void DecodeMPEG1NonIntra( vpar_thread_t * p_vpar,
macroblock_t * p_mb, int i_b,
int i_chroma_format )
{
if( p_vpar->picture.i_coding_type == D_CODING_TYPE )
......@@ -594,9 +588,11 @@ static void vpar_DecodeMPEG1Non( vpar_thread_t * p_vpar, macroblock_t * p_mb, in
}
/*****************************************************************************
* vpar_DecodeMPEG1Intra : decode MPEG-1 intra blocks
* DecodeMPEG1Intra : decode MPEG-1 intra blocks
*****************************************************************************/
static void vpar_DecodeMPEG1Intra( vpar_thread_t * p_vpar, macroblock_t * p_mb, int i_b )
static __inline__ void DecodeMPEG1Intra( vpar_thread_t * p_vpar,
macroblock_t * p_mb, int i_b ,
int i_chroma_format )
{
if( p_vpar->picture.i_coding_type == D_CODING_TYPE )
......@@ -608,9 +604,11 @@ static void vpar_DecodeMPEG1Intra( vpar_thread_t * p_vpar, macroblock_t * p_mb,
}
/*****************************************************************************
* vpar_DecodeMPEG2Non : decode MPEG-2 non-intra blocks
* DecodeMPEG2NonIntra : decode MPEG-2 non-intra blocks
*****************************************************************************/
static void vpar_DecodeMPEG2Non( vpar_thread_t * p_vpar, macroblock_t * p_mb, int i_b )
static __inline__ void DecodeMPEG2NonIntra( vpar_thread_t * p_vpar,
macroblock_t * p_mb, int i_b,
int i_chroma_format )
{
int i_parse;
int i_nc;
......@@ -622,9 +620,8 @@ static void vpar_DecodeMPEG2Non( vpar_thread_t * p_vpar, macroblock_t * p_mb, in
int i_pos;
int i_run;
int i_level;
int i_quant_type;
boolean_t b_sign;
int * ppi_quant[2];
int * pi_quant;
/* Lookup Table for the chromatic component */
static int pi_cc_index[12] = { 0, 0, 0, 0, 1, 2, 1, 2, 1, 2 };
......@@ -633,11 +630,16 @@ static void vpar_DecodeMPEG2Non( vpar_thread_t * p_vpar, macroblock_t * p_mb, in
/* Determine whether it is luminance or not (chrominance) */
i_type = ( i_cc + 1 ) >> 1;
i_quant_type = (!i_type) || (p_vpar->sequence.i_chroma_format == CHROMA_420);
/* Give a pointer to the quantization matrices for intra blocks */
ppi_quant[1] = p_vpar->sequence.nonintra_quant.pi_matrix;
ppi_quant[0] = p_vpar->sequence.chroma_nonintra_quant.pi_matrix;
if( (i_chroma_format == CHROMA_420) || (!i_type) )
{
pi_quant = p_vpar->sequence.nonintra_quant.pi_matrix;
}
else
{
pi_quant = p_vpar->sequence.chroma_nonintra_quant.pi_matrix;
}
/* Decoding of the AC coefficients */
......@@ -712,7 +714,7 @@ static void vpar_DecodeMPEG2Non( vpar_thread_t * p_vpar, macroblock_t * p_mb, in
i_pos = pi_scan[p_vpar->picture.b_alternate_scan][i_parse];
i_level = ( ((i_level << 1) + 1) * p_vpar->slice.i_quantizer_scale
* ppi_quant[i_quant_type][i_pos] ) >> 5;
* pi_quant[i_pos] ) >> 5;
p_mb->ppi_blocks[i_b][i_pos] = b_sign ? -i_level : i_level;
}
......@@ -721,15 +723,17 @@ static void vpar_DecodeMPEG2Non( vpar_thread_t * p_vpar, macroblock_t * p_mb, in
}
/*****************************************************************************
* vpar_DecodeMPEG2Intra : decode MPEG-2 intra blocks
* DecodeMPEG2Intra : decode MPEG-2 intra blocks
*****************************************************************************/
static void vpar_DecodeMPEG2Intra( vpar_thread_t * p_vpar, macroblock_t * p_mb, int i_b )
static __inline__ void DecodeMPEG2Intra( vpar_thread_t * p_vpar,
macroblock_t * p_mb, int i_b,
int i_chroma_format )
{
int i_parse;
int i_nc;
int i_cc;
int i_coef;
int i_type, i_quant_type;
int i_type;
int i_code;
int i_length;
int i_pos;
......@@ -739,7 +743,7 @@ static void vpar_DecodeMPEG2Intra( vpar_thread_t * p_vpar, macroblock_t * p_mb,
int i_level;
boolean_t b_vlc_intra;
boolean_t b_sign;
int * ppi_quant[2];
int * pi_quant;
/* Lookup Table for the chromatic component */
static int pi_cc_index[12] = { 0, 0, 0, 0, 1, 2, 1, 2, 1, 2 };
......@@ -747,11 +751,16 @@ static void vpar_DecodeMPEG2Intra( vpar_thread_t * p_vpar, macroblock_t * p_mb,
/* Determine whether it is luminance or not (chrominance) */
i_type = ( i_cc + 1 ) >> 1;
i_quant_type = (!i_type) | (p_vpar->sequence.i_chroma_format == CHROMA_420);
/* Give a pointer to the quantization matrices for intra blocks */
ppi_quant[1] = p_vpar->sequence.intra_quant.pi_matrix;
ppi_quant[0] = p_vpar->sequence.chroma_intra_quant.pi_matrix;
if( (i_chroma_format == CHROMA_420) || (!i_type) )
{
pi_quant = p_vpar->sequence.intra_quant.pi_matrix;
}
else
{
pi_quant = p_vpar->sequence.chroma_intra_quant.pi_matrix;
}
#if 0
/* Decoding of the DC intra coefficient */
......@@ -947,7 +956,7 @@ static void vpar_DecodeMPEG2Intra( vpar_thread_t * p_vpar, macroblock_t * p_mb,
i_pos = pi_scan[p_vpar->picture.b_alternate_scan][i_parse];
i_level = ( i_level *
p_vpar->slice.i_quantizer_scale *
ppi_quant[i_quant_type][i_pos] ) >> 4;
pi_quant[i_pos] ) >> 4;
p_mb->ppi_blocks[i_b][i_pos] = b_sign ? -i_level : i_level;
}
......@@ -1316,10 +1325,11 @@ static __inline__ int CodedPattern444( vpar_thread_t * p_vpar )
*****************************************************************************/
static __inline__ void InitMacroblock( vpar_thread_t * p_vpar,
macroblock_t * p_mb, int i_coding_type,
int i_chroma_format,
int i_structure,
boolean_t b_second_field )
{
p_mb->i_chroma_nb_blocks = p_vpar->sequence.i_chroma_nb_blocks;
p_mb->i_chroma_nb_blocks = 1 << i_chroma_format;
p_mb->p_picture = p_vpar->picture.p_picture;
if( i_coding_type == B_CODING_TYPE )
......@@ -1394,7 +1404,8 @@ static __inline__ void SkippedMacroblock( vpar_thread_t * p_vpar, int i_mb,
p_vpar->picture.pp_mb[i_mb_base + i_mb] = p_mb;
#endif
InitMacroblock( p_vpar, p_mb, i_coding_type, i_structure, b_second_field );
InitMacroblock( p_vpar, p_mb, i_coding_type, i_chroma_format,
i_structure, b_second_field );
/* Motion type is picture structure. */
p_mb->pf_motion = pf_motion_skipped[i_chroma_format]
......@@ -1447,7 +1458,8 @@ static __inline__ void MacroblockModes( vpar_thread_t * p_vpar,
* has to be dropped, take care if you use scalable streams. */
/* RemoveBits( &p_vpar->bit_stream, 2 ); */
if( p_mb->i_mb_type & (MB_MOTION_FORWARD | MB_MOTION_BACKWARD) )
if( (i_coding_type == P_CODING_TYPE || i_coding_type == B_CODING_TYPE)
&& (p_mb->i_mb_type & (MB_MOTION_FORWARD | MB_MOTION_BACKWARD)) )
{
if( !(i_structure == FRAME_STRUCTURE
&& p_vpar->picture.b_frame_pred_frame_dct) )
......@@ -1458,14 +1470,14 @@ static __inline__ void MacroblockModes( vpar_thread_t * p_vpar,
{
p_vpar->mb.i_motion_type = MOTION_FRAME;
}
}
/* ???? */
p_vpar->mb.i_mv_count = ppi_mv_count[i_structure == FRAME_STRUCTURE]
[p_vpar->mb.i_motion_type];
p_vpar->mb.i_mv_format = ppi_mv_format[i_structure == FRAME_STRUCTURE]
[p_vpar->mb.i_motion_type];
p_vpar->mb.b_dmv = p_vpar->mb.i_motion_type == MOTION_DMV;
/* ???? */
p_vpar->mb.i_mv_count = ppi_mv_count[i_structure == FRAME_STRUCTURE]
[p_vpar->mb.i_motion_type];
p_vpar->mb.i_mv_format = ppi_mv_format[i_structure == FRAME_STRUCTURE]
[p_vpar->mb.i_motion_type];
p_vpar->mb.b_dmv = p_vpar->mb.i_motion_type == MOTION_DMV;
}
p_vpar->mb.b_dct_type = 0;
if( (i_structure == FRAME_STRUCTURE) &&
......@@ -1491,6 +1503,73 @@ static __inline__ void MacroblockModes( vpar_thread_t * p_vpar,
/*****************************************************************************
* ParseMacroblock : Parse the next macroblock
*****************************************************************************/
#define PARSEBLOCKS( MPEG1FUNC, MPEG2FUNC ) \
{ \
i_mask = 1 << (3 + (1 << i_chroma_format)); \
\
/* luminance */ \
p_data1 = p_mb->p_picture->p_y \
+ p_mb->i_l_x + p_vpar->mb.i_l_y*(p_vpar->sequence.i_width); \
\
for( i_b = 0 ; i_b < 4 ; i_b++, i_mask >>= 1 ) \
{ \
if( p_mb->i_coded_block_pattern & i_mask ) \
{ \
memset( p_mb->ppi_blocks[i_b], 0, 64*sizeof(dctelem_t) ); \
if( b_mpeg2 ) \
MPEG2FUNC( p_vpar, p_mb, i_b, i_chroma_format ); \
else \
MPEG1FUNC( p_vpar, p_mb, i_b, i_chroma_format ); \
\
/* Calculate block coordinates. */ \
p_mb->p_data[i_b] = p_data1 \
+ pi_y[p_vpar->mb.b_dct_type][i_b] \
* p_vpar->sequence.i_width \
+ pi_x[i_b]; \
} \
} \
\
if( p_vpar->picture.b_error ) \
{ \
/* Mark this block as skipped (better than green blocks), and \
* go to the next slice. */ \
(*pi_mb_address)--; \
vpar_DestroyMacroblock( &p_vpar->vfifo, p_mb ); \
return; \
} \
\
/* chrominance */ \
p_data1 = p_mb->p_picture->p_u \
+ p_mb->i_c_x \
+ p_vpar->mb.i_c_y \
* (p_vpar->sequence.i_chroma_width); \
p_data2 = p_mb->p_picture->p_v \
+ p_mb->i_c_x \
+ p_vpar->mb.i_c_y \
* (p_vpar->sequence.i_chroma_width); \
\
for( i_b = 4; i_b < 4 + (1 << i_chroma_format); \
i_b++, i_mask >>= 1 ) \
{ \
yuv_data_t * pp_data[2] = {p_data1, p_data2}; \
\
if( p_mb->i_coded_block_pattern & i_mask ) \
{ \
memset( p_mb->ppi_blocks[i_b], 0, 64*sizeof(dctelem_t) ); \
if( b_mpeg2 ) \
MPEG2FUNC( p_vpar, p_mb, i_b, i_chroma_format ); \
else \
MPEG1FUNC( p_vpar, p_mb, i_b, i_chroma_format ); \
\
/* Calculate block coordinates. */ \
p_mb->p_data[i_b] = pp_data[i_b & 1] \
+ pi_y[p_vpar->mb.b_dct_type][i_b] \
* p_vpar->sequence.i_chroma_width \
+ pi_x[i_b]; \
} \
} \
}
static __inline__ void ParseMacroblock(
vpar_thread_t * p_vpar,
int * pi_mb_address, /* previous address to be
......@@ -1527,13 +1606,9 @@ static __inline__ void ParseMacroblock(
vdec_MotionFrameDMV444}
}
};
static f_decode_block_t pppf_decode_block[2][2] =
{ {vpar_DecodeMPEG1Non, vpar_DecodeMPEG1Intra},
{vpar_DecodeMPEG2Non, vpar_DecodeMPEG2Intra} };
static int pi_x[12] = {0,8,0,8,0,0,0,0,8,8,8,8};
static int pi_y[2][12] = { {0,0,8,8,0,0,8,8,0,0,8,8},
{0,0,1,1,0,0,1,1,0,0,1,1} };
static int pi_dc_dct_reinit[4] = {128,256,512,1024};
int i_mb, i_b, i_mask;
macroblock_t * p_mb;
......@@ -1549,7 +1624,7 @@ static __inline__ void ParseMacroblock(
/* Reset DC predictors (7.2.1). */
p_vpar->slice.pi_dc_dct_pred[0] = p_vpar->slice.pi_dc_dct_pred[1]
= p_vpar->slice.pi_dc_dct_pred[2]
= pi_dc_dct_reinit[p_vpar->picture.i_intra_dc_precision];
= 1 << (7 + p_vpar->picture.i_intra_dc_precision);
if( i_coding_type == P_CODING_TYPE )
{
......@@ -1574,7 +1649,8 @@ static __inline__ void ParseMacroblock(
p_vpar->picture.pp_mb[i_mb_base + *pi_mb_address] = p_mb;
#endif
InitMacroblock( p_vpar, p_mb, i_coding_type, i_structure, b_second_field );
InitMacroblock( p_vpar, p_mb, i_coding_type, i_chroma_format,
i_structure, b_second_field );
/* Parse off macroblock_modes structure. */
MacroblockModes( p_vpar, p_mb, i_chroma_format, i_coding_type,
......@@ -1614,12 +1690,12 @@ static __inline__ void ParseMacroblock(
p_mb->ppi_field_select[0][0] = (i_structure == BOTTOM_FIELD);
}
if( !(p_mb->i_mb_type & MB_INTRA) )
if( (i_coding_type != I_CODING_TYPE) && !(p_mb->i_mb_type & MB_INTRA) )
{
/* Reset DC predictors (7.2.1). */
p_vpar->slice.pi_dc_dct_pred[0] = p_vpar->slice.pi_dc_dct_pred[1]
= p_vpar->slice.pi_dc_dct_pred[2]
= pi_dc_dct_reinit[p_vpar->picture.i_intra_dc_precision];
= 1 << (7 + p_vpar->picture.i_intra_dc_precision);
/* Motion function pointer. */
p_mb->pf_motion = pppf_motion[i_chroma_format]
......@@ -1644,6 +1720,11 @@ static __inline__ void ParseMacroblock(
{
p_mb->i_coded_block_pattern = 0;
}
/*
* Effectively decode blocks.
*/
PARSEBLOCKS( DecodeMPEG1NonIntra, DecodeMPEG2NonIntra )
}
else
{
......@@ -1678,104 +1759,153 @@ static __inline__ void ParseMacroblock(
else
{
p_mb->i_coded_block_pattern =
(1 << (4 + p_mb->i_chroma_nb_blocks)) - 1;
(1 << (4 + (1 << i_chroma_format))) - 1;
}
/*
* Effectively decode blocks.
*/
PARSEBLOCKS( DecodeMPEG1Intra, DecodeMPEG2Intra )
}
if( p_vpar->picture.b_error )
if( !p_vpar->picture.b_error )
{
UpdateContext( p_vpar, i_structure );
#ifndef VDEC_SMP
/* Decode the macroblock NOW ! */
vdec_DecodeMacroblock( p_vpar->pp_vdec[0], p_mb );
#endif
}
else
{
/* Mark this block as skipped (better than green blocks), and go
* to the next slice. */
(*pi_mb_address)--;
vpar_DestroyMacroblock( &p_vpar->vfifo, p_mb );
return;
}
}
/*
* Effectively decode blocks.
*/
/*
* Picture data parsing management
*/
i_mask = 1 << (3 + p_mb->i_chroma_nb_blocks);
/*****************************************************************************
* SliceHeader : Parse the next slice structure
*****************************************************************************/
static __inline__ void SliceHeader( vpar_thread_t * p_vpar,
int * pi_mb_address, int i_mb_base,
u32 i_vert_code, boolean_t b_high,
boolean_t b_dp_scalable,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field )
{
int i_mb_address_save = *pi_mb_address;
/* luminance */
p_data1 = p_mb->p_picture->p_y
+ p_mb->i_l_x + p_vpar->mb.i_l_y*(p_vpar->sequence.i_width);
p_vpar->picture.b_error = 0;
for( i_b = 0 ; i_b < 4 ; i_b++, i_mask >>= 1 )
if( b_high )
{
if( p_mb->i_coded_block_pattern & i_mask )
/* Picture with more than 2800 lines. */
i_vert_code += GetBits( &p_vpar->bit_stream, 3 ) << 7;
}
if( b_dp_scalable )
{
/* DATA_PARTITIONING scalability. */
RemoveBits( &p_vpar->bit_stream, 7 ); /* priority_breakpoint */
}
LoadQuantizerScale( p_vpar );
if( GetBits( &p_vpar->bit_stream, 1 ) )
{
/* intra_slice, slice_id */
RemoveBits( &p_vpar->bit_stream, 8 );
/* extra_information_slice */
while( GetBits( &p_vpar->bit_stream, 1 ) )
{
memset( p_mb->ppi_blocks[i_b], 0, 64*sizeof(dctelem_t) );
(*pppf_decode_block[b_mpeg2]
[p_mb->i_mb_type & MB_INTRA])
( p_vpar, p_mb, i_b );
/* Calculate block coordinates. */
p_mb->p_data[i_b] = p_data1
+ pi_y[p_vpar->mb.b_dct_type][i_b]
* p_vpar->sequence.i_width
+ pi_x[i_b];
RemoveBits( &p_vpar->bit_stream, 8 );
}
}
*pi_mb_address = (i_vert_code - 1)*p_vpar->sequence.i_mb_width;
/* Reset DC coefficients predictors (ISO/IEC 13818-2 7.2.1). */
p_vpar->slice.pi_dc_dct_pred[0] = p_vpar->slice.pi_dc_dct_pred[1]
= p_vpar->slice.pi_dc_dct_pred[2]
= 1 << (7 + p_vpar->picture.i_intra_dc_precision);
if( p_vpar->picture.b_error )
/* Reset motion vector predictors (ISO/IEC 13818-2 7.6.3.4). */
memset( p_vpar->slice.pppi_pmv, 0, 8*sizeof(int) );
do
{
/* Mark this block as skipped (better than green blocks), and go
* to the next slice. */
(*pi_mb_address)--;
vpar_DestroyMacroblock( &p_vpar->vfifo, p_mb );
return;
ParseMacroblock( p_vpar, pi_mb_address, i_mb_address_save,
i_mb_base, b_mpeg2, i_coding_type,
i_chroma_format, i_structure,
b_second_field );
i_mb_address_save = *pi_mb_address;
}
while( ShowBits( &p_vpar->bit_stream, 23 ) && !p_vpar->picture.b_error
&& !p_vpar->b_die );
NextStartCode( p_vpar );
}
/* chrominance */
p_data1 = p_mb->p_picture->p_u
+ p_mb->i_c_x
+ p_vpar->mb.i_c_y
* (p_vpar->sequence.i_chroma_width);
p_data2 = p_mb->p_picture->p_v
+ p_mb->i_c_x
+ p_vpar->mb.i_c_y
* (p_vpar->sequence.i_chroma_width);
for( i_b = 4; i_b < 4 + p_mb->i_chroma_nb_blocks;
i_b++, i_mask >>= 1 )
{
yuv_data_t * pp_data[2] = {p_data1, p_data2};
/*****************************************************************************
* PictureData : Parse off all macroblocks (ISO/IEC 13818-2 6.2.3.7)
*****************************************************************************/
static __inline__ void PictureData( vpar_thread_t * p_vpar, int i_mb_base,
boolean_t b_high, boolean_t b_dp_scalable,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field )
{
int i_mb_address = 0;
u32 i_dummy;
if( p_mb->i_coded_block_pattern & i_mask )
NextStartCode( p_vpar );
while( i_mb_address+i_mb_base < p_vpar->sequence.i_mb_size
&& !p_vpar->b_die )
{
if( ((i_dummy = ShowBits( &p_vpar->bit_stream, 32 ))
< SLICE_START_CODE_MIN) ||
(i_dummy > SLICE_START_CODE_MAX) )
{
memset( p_mb->ppi_blocks[i_b], 0, 64*sizeof(dctelem_t) );
(*pppf_decode_block[b_mpeg2]
[p_mb->i_mb_type & MB_INTRA])
( p_vpar, p_mb, i_b );
/* Calculate block coordinates. */
p_mb->p_data[i_b] = pp_data[i_b & 1]
+ pi_y[p_vpar->mb.b_dct_type][i_b]
* p_vpar->sequence.i_chroma_width
+ pi_x[i_b];
intf_DbgMsg("vpar debug: premature end of picture\n");
p_vpar->picture.b_error = 1;
break;
}
RemoveBits32( &p_vpar->bit_stream );
/* Decode slice data. */
SliceHeader( p_vpar, &i_mb_address, i_mb_base, i_dummy & 255,
b_high, b_dp_scalable, b_mpeg2, i_coding_type,
i_chroma_format, i_structure, b_second_field );
}
if( !p_vpar->picture.b_error )
/* Try to recover from error. If we missed less than half the
* number of macroblocks of the picture, mark the missed ones
* as skipped. */
if( p_vpar->picture.b_error &&
( (i_mb_address-i_mb_base) > (p_vpar->sequence.i_mb_size >> 1)
|| (i_structure != FRAME_STRUCTURE
&& (i_mb_address-i_mb_base) > (p_vpar->sequence.i_mb_size >> 2) ) ) )
{
UpdateContext( p_vpar, i_structure );
#ifndef VDEC_SMP
/* Decode the macroblock NOW ! */
vdec_DecodeMacroblock( p_vpar->pp_vdec[0], p_mb );
#endif
}
else
{
/* Mark this block as skipped (better than green blocks), and go
* to the next slice. */
(*pi_mb_address)--;
vpar_DestroyMacroblock( &p_vpar->vfifo, p_mb );
int i_mb;
p_vpar->picture.b_error = 0;
for( i_mb = i_mb_address + 1;
i_mb < (p_vpar->sequence.i_mb_size
<< (i_structure != FRAME_STRUCTURE));
i_mb++ )
{
SkippedMacroblock( p_vpar, i_mb, i_mb_base, i_coding_type,
i_chroma_format, i_structure, b_second_field );
}
}
}
/*****************************************************************************
* vpar_ParseMacroblockVWXYZ : Parse the next macroblock ; specific functions
* vpar_PictureDataVWXYZ : Parse the next macroblock ; specific functions
*****************************************************************************
* V = MPEG2 ?
* W = coding type ?
......@@ -1783,102 +1913,79 @@ static __inline__ void ParseMacroblock(
* Y = structure ?
* Z = second field ?
*****************************************************************************/
void vpar_ParseMacroblock2I420F0( vpar_thread_t * p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field )
void vpar_PictureDataGENERIC( vpar_thread_t * p_vpar, int i_mb_base )
{
ParseMacroblock( p_vpar, pi_mb_address, i_mb_previous, 0, 1,
I_CODING_TYPE, CHROMA_420, FRAME_STRUCTURE, 0 );
PictureData( p_vpar, i_mb_base, (p_vpar->sequence.i_height > 2800),
(p_vpar->sequence.i_scalable_mode == SC_DP),
p_vpar->sequence.b_mpeg2, p_vpar->picture.i_coding_type,
p_vpar->sequence.i_chroma_format,
p_vpar->picture.i_structure,
(p_vpar->picture.i_structure !=
p_vpar->picture.i_current_structure) );
}
void vpar_ParseMacroblock2P420F0( vpar_thread_t * p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field )
#if (VPAR_OPTIM_LEVEL > 0)
/* Optimizations for frame pictures */
void vpar_PictureData2I420F0( vpar_thread_t * p_vpar, int i_mb_base )
{
ParseMacroblock( p_vpar, pi_mb_address, i_mb_previous, 0, 1,
P_CODING_TYPE, CHROMA_420, FRAME_STRUCTURE, 0 );
PictureData( p_vpar, 0, 0, 0, 1, I_CODING_TYPE, CHROMA_420,
FRAME_STRUCTURE, 0 );
}
void vpar_ParseMacroblock2B420F0( vpar_thread_t * p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field )
void vpar_PictureData2P420F0( vpar_thread_t * p_vpar, int i_mb_base )
{
ParseMacroblock( p_vpar, pi_mb_address, i_mb_previous, 0, 1,
B_CODING_TYPE, CHROMA_420, FRAME_STRUCTURE, 0 );
PictureData( p_vpar, 0, 0, 0, 1, P_CODING_TYPE, CHROMA_420,
FRAME_STRUCTURE, 0 );
}
void vpar_ParseMacroblock2I420T0( vpar_thread_t * p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field )
void vpar_PictureData2B420F0( vpar_thread_t * p_vpar, int i_mb_base )
{
ParseMacroblock( p_vpar, pi_mb_address, i_mb_previous, 0, 1,
I_CODING_TYPE, CHROMA_420, TOP_FIELD, 0 );
PictureData( p_vpar, 0, 0, 0, 1, B_CODING_TYPE, CHROMA_420,
FRAME_STRUCTURE, 0 );
}
#endif
void vpar_ParseMacroblock2P420T0( vpar_thread_t * p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field )
#if (VPAR_OPTIM_LEVEL > 1)
/* Optimizations for field pictures */
void vpar_PictureData2I420TZ( vpar_thread_t * p_vpar, int i_mb_base )
{
ParseMacroblock( p_vpar, pi_mb_address, i_mb_previous, 0, 1,
P_CODING_TYPE, CHROMA_420, TOP_FIELD, 0 );
PictureData( p_vpar, i_mb_base, 0, 0, 1, I_CODING_TYPE, CHROMA_420,
TOP_FIELD, (p_vpar->picture.i_structure !=
p_vpar->picture.i_current_structure) );
}
void vpar_ParseMacroblock2B420T0( vpar_thread_t * p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field )
void vpar_PictureData2P420TZ( vpar_thread_t * p_vpar, int i_mb_base )
{
ParseMacroblock( p_vpar, pi_mb_address, i_mb_previous, 0, 1,
B_CODING_TYPE, CHROMA_420, TOP_FIELD, 0 );
PictureData( p_vpar, i_mb_base, 0, 0, 1, P_CODING_TYPE, CHROMA_420,
TOP_FIELD, (p_vpar->picture.i_structure !=
p_vpar->picture.i_current_structure) );
}
void vpar_ParseMacroblock2I420B1( vpar_thread_t * p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field )
void vpar_PictureData2B420TZ( vpar_thread_t * p_vpar, int i_mb_base )
{
ParseMacroblock( p_vpar, pi_mb_address, i_mb_previous, i_mb_base, 1,
I_CODING_TYPE, CHROMA_420, BOTTOM_FIELD, 1 );
PictureData( p_vpar, i_mb_base, 0, 0, 1, B_CODING_TYPE, CHROMA_420,
TOP_FIELD, (p_vpar->picture.i_structure !=
p_vpar->picture.i_current_structure) );
}
void vpar_ParseMacroblock2P420B1( vpar_thread_t * p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field )
void vpar_PictureData2I420BZ( vpar_thread_t * p_vpar, int i_mb_base )
{
ParseMacroblock( p_vpar, pi_mb_address, i_mb_previous, i_mb_base, 1,
P_CODING_TYPE, CHROMA_420, BOTTOM_FIELD, 1 );
PictureData( p_vpar, i_mb_base, 0, 0, 1, I_CODING_TYPE, CHROMA_420,
BOTTOM_FIELD, (p_vpar->picture.i_structure !=
p_vpar->picture.i_current_structure) );
}
void vpar_ParseMacroblock2B420B1( vpar_thread_t * p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field )
void vpar_PictureData2P420BZ( vpar_thread_t * p_vpar, int i_mb_base )
{
ParseMacroblock( p_vpar, pi_mb_address, i_mb_previous, i_mb_base, 1,
B_CODING_TYPE, CHROMA_420, BOTTOM_FIELD, 1 );
PictureData( p_vpar, i_mb_base, 0, 0, 1, P_CODING_TYPE, CHROMA_420,
BOTTOM_FIELD, (p_vpar->picture.i_structure !=
p_vpar->picture.i_current_structure) );
}
void vpar_ParseMacroblockGENERIC( vpar_thread_t * p_vpar, int * pi_mb_address,
int i_mb_previous, int i_mb_base,
boolean_t b_mpeg2, int i_coding_type,
int i_chroma_format, int i_structure,
boolean_t b_second_field )
void vpar_PictureData2B420BZ( vpar_thread_t * p_vpar, int i_mb_base )
{
ParseMacroblock( p_vpar, pi_mb_address, i_mb_previous, i_mb_base, b_mpeg2,
i_coding_type, i_chroma_format, i_structure, b_second_field );
}
\ No newline at end of file
PictureData( p_vpar, i_mb_base, 0, 0, 1, B_CODING_TYPE, CHROMA_420,
BOTTOM_FIELD, (p_vpar->picture.i_structure !=
p_vpar->picture.i_current_structure) );
}
#endif
......@@ -38,6 +38,11 @@
#include "vpar_motion.h"
#include "video_fifo.h"
/*
* Function pointer
*/
typedef void (*f_picture_data_t)( vpar_thread_t*, int );
/*
* Local prototypes
*/
......@@ -45,21 +50,6 @@ static __inline__ void NextStartCode( vpar_thread_t * p_vpar );
static void SequenceHeader( vpar_thread_t * p_vpar );
static void GroupHeader( vpar_thread_t * p_vpar );
static void PictureHeader( vpar_thread_t * p_vpar );
static void SliceHeader00( vpar_thread_t * p_vpar,
int * pi_mb_address, int i_mb_base,
u32 i_vert_code );
static void SliceHeader01( vpar_thread_t * p_vpar,
int * pi_mb_address, int i_mb_base,
u32 i_vert_code );
static void SliceHeader10( vpar_thread_t * p_vpar,
int * pi_mb_address, int i_mb_base,
u32 i_vert_code );
static void SliceHeader11( vpar_thread_t * p_vpar,
int * pi_mb_address, int i_mb_base,
u32 i_vert_code );
static __inline__ void SliceHeader( vpar_thread_t * p_vpar,
int * pi_mb_address, int i_mb_base,
u32 i_vert_code );
static void ExtensionAndUserData( vpar_thread_t * p_vpar );
static void QuantMatrixExtension( vpar_thread_t * p_vpar );
static void SequenceScalableExtension( vpar_thread_t * p_vpar );
......@@ -154,20 +144,6 @@ u8 pi_scan[2][64] =
* Local inline functions.
*/
/*****************************************************************************
* NextStartCode : Find the next start code
*****************************************************************************/
static __inline__ void NextStartCode( vpar_thread_t * p_vpar )
{
/* Re-align the buffer on an 8-bit boundary */
RealignBits( &p_vpar->bit_stream );
while( ShowBits( &p_vpar->bit_stream, 24 ) != 0x01L && !p_vpar->b_die )
{
RemoveBits( &p_vpar->bit_stream, 8 );
}
}
/*****************************************************************************
* ReferenceUpdate : Update the reference pointers when we have a new picture
*****************************************************************************/
......@@ -430,7 +406,7 @@ static void SequenceHeader( vpar_thread_t * p_vpar )
p_vpar->sequence.i_size = p_vpar->sequence.i_width
* p_vpar->sequence.i_height;
/* Update chromatic information */
/* Update chromatic information. */
switch( p_vpar->sequence.i_chroma_format )
{
case CHROMA_420:
......@@ -454,35 +430,16 @@ static void SequenceHeader( vpar_thread_t * p_vpar )
p_vpar->sequence.i_chroma_mb_height = 16;
}
/* Slice Header functions */
if( p_vpar->sequence.i_height <= 2800 )
{
if( p_vpar->sequence.i_scalable_mode != SC_DP )
{
p_vpar->sequence.pf_slice_header = SliceHeader00;
}
else
{
p_vpar->sequence.pf_slice_header = SliceHeader01;
}
}
else
{
if( p_vpar->sequence.i_scalable_mode != SC_DP )
{
p_vpar->sequence.pf_slice_header = SliceHeader10;
}
else
{
p_vpar->sequence.pf_slice_header = SliceHeader11;
}
}
/* Reset scalable_mode. */
p_vpar->sequence.i_scalable_mode = SC_NONE;
#if 0
if( p_vpar->sequence.i_width != i_width_save
|| p_vpar->sequence.i_height != i_height_save )
{
/* What do we do in case of a size change ??? */
}
#endif
/* Extension and User data */
ExtensionAndUserData( p_vpar );
......@@ -503,38 +460,47 @@ static void GroupHeader( vpar_thread_t * p_vpar )
*****************************************************************************/
static void PictureHeader( vpar_thread_t * p_vpar )
{
static f_parse_mb_t ppf_parse_mb[4][4][2] =
/* Table of optimized PictureData functions. */
static f_picture_data_t ppf_picture_data[4][4] =
{
{
{NULL, NULL}, {NULL, NULL}, {NULL, NULL}, {NULL, NULL}
NULL, NULL, NULL, NULL
},
{
/* I_CODING_TYPE */
{NULL, NULL},
{vpar_ParseMacroblock2I420T0, vpar_ParseMacroblockGENERIC},
{vpar_ParseMacroblockGENERIC, vpar_ParseMacroblock2I420B1},
{vpar_ParseMacroblock2I420F0, vpar_ParseMacroblock2I420F0}
/* TOP_FIELD */
#if (VPAR_OPTIM_LEVEL > 1)
NULL, vpar_PictureData2I420TZ, vpar_PictureData2P420TZ,
vpar_PictureData2B420TZ
#else
NULL, vpar_PictureDataGENERIC, vpar_PictureDataGENERIC,
vpar_PictureDataGENERIC
#endif
},
{
/* P_CODING_TYPE */
{NULL, NULL},
{vpar_ParseMacroblock2P420T0, vpar_ParseMacroblockGENERIC},
{vpar_ParseMacroblockGENERIC, vpar_ParseMacroblock2P420B1},
{vpar_ParseMacroblock2P420F0, vpar_ParseMacroblock2P420F0}
/* BOTTOM_FIELD */
#if (VPAR_OPTIM_LEVEL > 1)
NULL, vpar_PictureData2I420BZ, vpar_PictureData2P420BZ,
vpar_PictureData2B420BZ
#else
NULL, vpar_PictureDataGENERIC, vpar_PictureDataGENERIC,
vpar_PictureDataGENERIC
#endif
},
{
/* B_CODING_TYPE */
{NULL, NULL},
{vpar_ParseMacroblock2B420T0, vpar_ParseMacroblockGENERIC},
{vpar_ParseMacroblockGENERIC, vpar_ParseMacroblock2B420B1},
{vpar_ParseMacroblock2B420F0, vpar_ParseMacroblock2B420F0}
/* FRAME_PICTURE */
#if (VPAR_OPTIM_LEVEL > 0)
NULL, vpar_PictureData2I420F0, vpar_PictureData2P420F0,
vpar_PictureData2B420F0
#else
NULL, vpar_PictureDataGENERIC, vpar_PictureDataGENERIC,
vpar_PictureDataGENERIC
#endif
}
};
int i_structure;
int i_mb_address, i_mb_base;
int i_mb_base;
boolean_t b_parsable;
u32 i_dummy;
#ifdef VDEC_SMP
int i_mb;
#endif
......@@ -694,7 +660,7 @@ static void PictureHeader( vpar_thread_t * p_vpar )
p_vpar->sequence.i_height ) )
== NULL )
{
intf_DbgMsg("vpar debug: allocation error in vout_CreatePicture\n");
intf_DbgMsg("vpar debug: allocation error in vout_CreatePicture, delaying\n");
if( p_vpar->b_die || p_vpar->b_error )
{
return;
......@@ -757,44 +723,24 @@ static void PictureHeader( vpar_thread_t * p_vpar )
i_mb_base = 0;
p_vpar->mb.i_l_y = p_vpar->mb.i_c_y = 0;
}
i_mb_address = 0;
p_vpar->mb.i_l_x = p_vpar->mb.i_c_x = 0;
/* Extension and User data. */
ExtensionAndUserData( p_vpar );
/* Macroblock parsing function. */
/* Picture data (ISO/IEC 13818-2 6.2.3.7). */
if( p_vpar->sequence.i_chroma_format != CHROMA_420
|| !p_vpar->sequence.b_mpeg2 )
|| !p_vpar->sequence.b_mpeg2 || p_vpar->sequence.i_height > 2800
|| p_vpar->sequence.i_scalable_mode == SC_DP )
{
p_vpar->picture.pf_parse_mb = vpar_ParseMacroblockGENERIC;
/* Weird stream. Use the slower generic function. */
vpar_PictureDataGENERIC( p_vpar, i_mb_base );
}
else
{
p_vpar->picture.pf_parse_mb =
ppf_parse_mb[p_vpar->picture.i_coding_type]
[p_vpar->picture.i_structure]
[(p_vpar->picture.i_structure !=
p_vpar->picture.i_current_structure)];
}
/* Picture data (ISO/IEC 13818-2 6.2.3.7). */
NextStartCode( p_vpar );
while( i_mb_address+i_mb_base < p_vpar->sequence.i_mb_size
&& !p_vpar->b_die )
{
if( ((i_dummy = ShowBits( &p_vpar->bit_stream, 32 ))
< SLICE_START_CODE_MIN) ||
(i_dummy > SLICE_START_CODE_MAX) )
{
intf_DbgMsg("vpar debug: premature end of picture\n");
p_vpar->picture.b_error = 1;
break;
}
RemoveBits32( &p_vpar->bit_stream );
/* Decode slice data. */
p_vpar->sequence.pf_slice_header( p_vpar, &i_mb_address, i_mb_base, i_dummy & 255 );
/* Try to find an optimized function. */
ppf_picture_data[p_vpar->picture.i_structure]
[p_vpar->picture.i_coding_type]( p_vpar, i_mb_base );
}
if( p_vpar->b_die || p_vpar->b_error )
......@@ -848,99 +794,6 @@ static void PictureHeader( vpar_thread_t * p_vpar )
#undef P_picture
}
/*****************************************************************************
* SliceHeader : Parse the next slice structure
*****************************************************************************/
static __inline__ void SliceHeader( vpar_thread_t * p_vpar,
int * pi_mb_address, int i_mb_base,
u32 i_vert_code )
{
/* DC predictors initialization table */
static int pi_dc_dct_reinit[4] = {128,256,512,1024};
int i_mb_address_save = *pi_mb_address;
p_vpar->picture.b_error = 0;
/* slice_vertical_position_extension and priority_breakpoint already done */
LoadQuantizerScale( p_vpar );
if( GetBits( &p_vpar->bit_stream, 1 ) )
{
/* intra_slice, slice_id */
RemoveBits( &p_vpar->bit_stream, 8 );
/* extra_information_slice */
while( GetBits( &p_vpar->bit_stream, 1 ) )
{
RemoveBits( &p_vpar->bit_stream, 8 );
}
}
*pi_mb_address = (i_vert_code - 1)*p_vpar->sequence.i_mb_width;
/* Reset DC coefficients predictors (ISO/IEC 13818-2 7.2.1). Why
* does the reference decoder put 0 instead of the normative values ? */
p_vpar->slice.pi_dc_dct_pred[0] = p_vpar->slice.pi_dc_dct_pred[1]
= p_vpar->slice.pi_dc_dct_pred[2]
= pi_dc_dct_reinit[p_vpar->picture.i_intra_dc_precision];
/* Reset motion vector predictors (ISO/IEC 13818-2 7.6.3.4). */
memset( p_vpar->slice.pppi_pmv, 0, 8*sizeof(int) );
do
{
p_vpar->picture.pf_parse_mb( p_vpar, pi_mb_address,
i_mb_address_save, i_mb_base,
p_vpar->sequence.b_mpeg2,
p_vpar->picture.i_coding_type,
p_vpar->sequence.i_chroma_format,
p_vpar->picture.i_structure,
(p_vpar->picture.i_structure !=
p_vpar->picture.i_current_structure) );
i_mb_address_save = *pi_mb_address;
}
while( ShowBits( &p_vpar->bit_stream, 23 ) && !p_vpar->picture.b_error
&& !p_vpar->b_die );
NextStartCode( p_vpar );
}
/*****************************************************************************
* SliceHeaderXY : Parse the next slice structure
*****************************************************************************
* X = i_height > 2800 ?
* Y = scalable_mode == SC_DP ?
*****************************************************************************/
static void SliceHeader00( vpar_thread_t * p_vpar,
int * pi_mb_address, int i_mb_base,
u32 i_vert_code )
{
SliceHeader( p_vpar, pi_mb_address, i_mb_base, i_vert_code );
}
static void SliceHeader01( vpar_thread_t * p_vpar,
int * pi_mb_address, int i_mb_base,
u32 i_vert_code )
{
RemoveBits( &p_vpar->bit_stream, 7 ); /* priority_breakpoint */
SliceHeader( p_vpar, pi_mb_address, i_mb_base, i_vert_code );
}
static void SliceHeader10( vpar_thread_t * p_vpar,
int * pi_mb_address, int i_mb_base,
u32 i_vert_code )
{
i_vert_code += GetBits( &p_vpar->bit_stream, 3 ) << 7;
SliceHeader( p_vpar, pi_mb_address, i_mb_base, i_vert_code );
}
static void SliceHeader11( vpar_thread_t * p_vpar,
int * pi_mb_address, int i_mb_base,
u32 i_vert_code )
{
i_vert_code += GetBits( &p_vpar->bit_stream, 3 ) << 7;
RemoveBits( &p_vpar->bit_stream, 7 ); /* priority_breakpoint */
SliceHeader( p_vpar, pi_mb_address, i_mb_base, i_vert_code );
}
/*****************************************************************************
* ExtensionAndUserData : Parse the extension_and_user_data structure
*****************************************************************************/
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment