Skip to content
This repository has been archived by the owner on Oct 23, 2023. It is now read-only.

Fix low latency #849

Merged
merged 4 commits into from
Jun 29, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion configure.ac
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ m4_define([yami_api_major_version], 0)
# update this for every release when micro version large than zero
m4_define([yami_api_minor_version], 5)
# change this for any api change
m4_define([yami_api_micro_version], 4)
m4_define([yami_api_micro_version], 5)
m4_define([yami_api_version],
[yami_api_major_version.yami_api_minor_version.yami_api_micro_version])

Expand Down
96 changes: 96 additions & 0 deletions decoder/DecoderApi_unittest.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,49 @@ static FrameData g_jpegdata[] = {
g_EOF,
};

static FrameData g_h264data_lowlatency[] = {
g_avc8x8I,
g_avc8x8P,
g_avc8x16,
g_avc16x16,
g_avc8x8I,
g_avc8x8P,
g_avc8x8I,
g_avc8x8P,
g_EOF,
};

static FrameData g_vp8data_lowlatency[] = {
g_vp8_8x8I,
g_vp8_8x8P1,
g_vp8_8x8P2,
g_vp8_16x16,
g_vp8_8x8I,
g_vp8_8x8P1,
g_vp8_8x8P2,
g_EOF,
};

static FrameData g_vp9data_lowlatency[] = {
g_vp9_8x8I,
g_vp9_8x8P1,
g_vp9_8x8P2,
g_vp9_16x16,
g_vp9_8x8I,
g_vp9_8x8P1,
g_vp9_8x8P2,
g_EOF,
};

static FrameData g_jpegdata_lowlatency[] = {
g_jpeg1_8x8,
g_jpeg2_8x8,
g_jpeg_16x16,
g_jpeg1_8x8,
g_jpeg2_8x8,
g_EOF,
};

static const std::string getFullTestName()
{
const ::testing::TestInfo* const info = ::testing::UnitTest::GetInstance()->current_test_info();
Expand Down Expand Up @@ -218,6 +261,11 @@ class DecodeApiTest
public ::testing::WithParamInterface<TestDecodeFrames::Shared> {
};

class DecodeApiTestLowlatency
: public ::testing::Test,
public ::testing::WithParamInterface<TestDecodeFrames::Shared> {
};

bool checkOutput(SharedPtr<IVideoDecoder>& decoder, std::deque<VideoFormatInfo>& formats)
{
SharedPtr<VideoFrame> output(decoder->getOutput());
Expand Down Expand Up @@ -408,6 +456,46 @@ TEST_P(DecodeApiTest, Flush)
EXPECT_EQ(outFrames, size);
}

TEST_P(DecodeApiTestLowlatency, Format_Change)
{
SharedPtr<IVideoDecoder> decoder;
TestDecodeFrames frames = *GetParam();
decoder.reset(createVideoDecoder(frames.getMime()), releaseVideoDecoder);
ASSERT_TRUE(bool(decoder));

VideoConfigBuffer config;
memset(&config, 0, sizeof(config));
config.enableLowLatency = true;
ASSERT_EQ(YAMI_SUCCESS, decoder->start(&config));

VideoDecodeBuffer buffer;
memset(&buffer, 0, sizeof(buffer));
FrameInfo info;
uint32_t inFrames = 0;
uint32_t outFrames = 0;
//keep previous resolution

while (frames.getFrame(buffer, info)) {
buffer.flag = VIDEO_DECODE_BUFFER_FLAG_FRAME_END;
YamiStatus status = decoder->decode(&buffer);
if (status == YAMI_DECODE_FORMAT_CHANGE) {

//send buffer again
status = decoder->decode(&buffer);
if (YAMI_UNSUPPORTED == status) {
RecordProperty("skipped", true);
std::cout << "[ SKIPPED ] " << getFullTestName()
<< " Hw does not support this decoder." << std::endl;
return;
}
}
inFrames++;
while (bool(decoder->getOutput()))
outFrames++;
EXPECT_EQ(inFrames, outFrames);
}
}

/** Teach Google Test how to print a TestDecodeFrames::Shared object */
void PrintTo(const TestDecodeFrames::Shared& t, std::ostream* os)
{
Expand All @@ -422,4 +510,12 @@ INSTANTIATE_TEST_CASE_P(
TestDecodeFrames::create(g_vp8data, YAMI_MIME_VP8),
TestDecodeFrames::create(g_vp9data, YAMI_MIME_VP9),
TestDecodeFrames::create(g_jpegdata, YAMI_MIME_JPEG)));

INSTANTIATE_TEST_CASE_P(
VaapiDecoderLowlatency, DecodeApiTestLowlatency,
::testing::Values(
TestDecodeFrames::create(g_h264data_lowlatency, YAMI_MIME_H264),
TestDecodeFrames::create(g_vp8data_lowlatency, YAMI_MIME_VP8),
TestDecodeFrames::create(g_vp9data_lowlatency, YAMI_MIME_VP9),
TestDecodeFrames::create(g_jpegdata_lowlatency, YAMI_MIME_JPEG)));
}
26 changes: 8 additions & 18 deletions decoder/vaapidecoder_h264.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -336,8 +336,6 @@ VaapiDecoderH264::DPB::DPB(OutputCallback output)
, m_maxFrameNum(0)
, m_maxNumRefFrames(0)
, m_maxDecFrameBuffering(H264_MAX_REFRENCE_SURFACE_NUMBER)
, m_isOutputStarted(false)
, m_lastOutputPoc(0)
{
}

Expand Down Expand Up @@ -1021,7 +1019,6 @@ bool VaapiDecoderH264::DPB::output(const PicturePtr& picture)
picture->m_picOutputFlag = false;

DEBUG("DPB: output picture(Poc:%d)", picture->m_poc);
m_lastOutputPoc = picture->m_poc;
return m_output(picture) == YAMI_SUCCESS;
}

Expand Down Expand Up @@ -1106,24 +1103,15 @@ bool VaapiDecoderH264::DPB::add(const PicturePtr& picture)
}

if (m_isLowLatencymode)
return outputReadyFrames();
forEach(bind(&VaapiDecoderH264::DPB::outputReadyFrame, this, _1));

return true;
}

bool VaapiDecoderH264::DPB::outputReadyFrames()
void VaapiDecoderH264::DPB::outputReadyFrame(const PicturePtr& picture)
{
PictureList::iterator it;
while(true) {
it = find_if(m_pictures.begin(), m_pictures.end(), isOutputNeeded);
if (it == m_pictures.end())
return true;
if (isFrame(*it) && (!m_isOutputStarted || ((*it)->m_poc -m_lastOutputPoc == 1))) {
output(*it);
m_isOutputStarted = true;
} else {
return true;
}
if (isOutputNeeded(picture)) {
output(picture);
}
}

Expand All @@ -1133,8 +1121,6 @@ void VaapiDecoderH264::DPB::flush()
clearRefSet();
m_pictures.clear();
m_prevPicture.reset();
m_isOutputStarted = false;
m_lastOutputPoc = 0;
}

VaapiDecoderH264::VaapiDecoderH264()
Expand Down Expand Up @@ -1845,6 +1831,10 @@ YamiStatus VaapiDecoderH264::decode(VideoDecodeBuffer* buffer)
return status;
}
}
if (buffer->flag & VIDEO_DECODE_BUFFER_FLAG_FRAME_END) {
//send current buffer to libva
decodeCurrent();
}
return lastError;
}

Expand Down
4 changes: 1 addition & 3 deletions decoder/vaapidecoder_h264.h
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ class VaapiDecoderH264 : public VaapiDecoderBase {
bool newStream, bool contextChanged,
uint32_t maxDecFrameBuffering);
bool add(const PicturePtr&);
bool outputReadyFrames();
void outputReadyFrame(const PicturePtr&);
void initReference(const PicturePtr&, const SliceHeader* const);
void flush();

Expand Down Expand Up @@ -127,8 +127,6 @@ class VaapiDecoderH264 : public VaapiDecoderBase {
uint32_t m_maxNumRefFrames;
uint32_t m_maxDecFrameBuffering;
YamiParser::H264::DecRefPicMarking m_decRefPicMarking;
bool m_isOutputStarted;
int32_t m_lastOutputPoc;
};

YamiStatus decodeNalu(NalUnit*);
Expand Down
5 changes: 5 additions & 0 deletions decoder/vaapidecoder_h265.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1187,6 +1187,11 @@ YamiStatus VaapiDecoderH265::decode(VideoDecodeBuffer* buffer)
}
}
}

if (buffer->flag & VIDEO_DECODE_BUFFER_FLAG_FRAME_END) {
//send current buffer to libva
decodeCurrent();
}
return lastError;
}

Expand Down
7 changes: 7 additions & 0 deletions interface/VideoDecoderDefs.h
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,13 @@ typedef enum {
HAS_VA_PROFILE = 0x08,
} VIDEO_BUFFER_FLAG;

typedef enum {
//this will tell decoder, we have the whole frame.
//so the decoder can do decode immediately
//else they may need to wait for next frame boundary
VIDEO_DECODE_BUFFER_FLAG_FRAME_END = 0x1,
} VIDEO_DECODE_BUFFER_FLAG;

typedef struct {
uint8_t *data;
size_t size;
Expand Down