Set delay after decoder creation

This commit is contained in:
MaranBr 2025-07-23 16:04:23 -04:00
parent 213a7acdd0
commit 772db74bfc
3 changed files with 4 additions and 4 deletions

View file

@ -41,7 +41,7 @@ void Decoder::Decode() {
if (!frame.get()) {
LOG_ERROR(HW_GPU,
"Nvdec {} dailed to decode interlaced frame for top 0x{:X} bottom 0x{:X}", id,
"Nvdec {} failed to decode interlaced frame for top 0x{:X} bottom 0x{:X}", id,
luma_top, luma_bottom);
}

View file

@ -282,10 +282,10 @@ bool DecodeApi::Initialize(Tegra::Host1x::NvdecCommon::VideoCodec codec) {
m_decoder_context.emplace(*m_decoder);
// Enable GPU decoding if requested.
//if (Settings::values.nvdec_emulation.GetValue() == Settings::NvdecEmulation::Gpu) {
if (Settings::values.nvdec_emulation.GetValue() == Settings::NvdecEmulation::Gpu) {
m_hardware_context.emplace();
m_hardware_context->InitializeForDecoder(*m_decoder_context, *m_decoder);
//}
}
// Open the decoder context.
if (!m_decoder_context->OpenContext(*m_decoder)) {

View file

@ -45,7 +45,7 @@ private:
NvdecCommon::NvdecRegisters regs{};
std::unique_ptr<Decoder> decoder;
bool wait_needed{false};
bool wait_needed{true};
};
} // namespace Host1x