avcodec/av1dec: Change bit_depth to int
Suggested-by: James Almer <jamrial@gmail.com> Signed-off-by: Michael Niedermayer <michael@niedermayer.cc> (cherry picked from commit 69b4d9736b0d0ad01c41fcae2d66eaa534b76969) Signed-off-by: Michael Niedermayer <michael@niedermayer.cc>
This commit is contained in:
parent
c5671e9de9
commit
8170914a34
@ -468,7 +468,7 @@ static int get_tiles_info(AVCodecContext *avctx, const AV1RawTileGroup *tile_gro
|
|||||||
static enum AVPixelFormat get_sw_pixel_format(void *logctx,
|
static enum AVPixelFormat get_sw_pixel_format(void *logctx,
|
||||||
const AV1RawSequenceHeader *seq)
|
const AV1RawSequenceHeader *seq)
|
||||||
{
|
{
|
||||||
uint8_t bit_depth;
|
int bit_depth;
|
||||||
enum AVPixelFormat pix_fmt = AV_PIX_FMT_NONE;
|
enum AVPixelFormat pix_fmt = AV_PIX_FMT_NONE;
|
||||||
|
|
||||||
if (seq->seq_profile == 2 && seq->color_config.high_bitdepth)
|
if (seq->seq_profile == 2 && seq->color_config.high_bitdepth)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user