diff --git a/NVEncCore/rgy_output_avcodec.cpp b/NVEncCore/rgy_output_avcodec.cpp index 5843f664..726667d4 100644 --- a/NVEncCore/rgy_output_avcodec.cpp +++ b/NVEncCore/rgy_output_avcodec.cpp @@ -2900,9 +2900,9 @@ RGY_ERR RGYOutputAvcodec::WriteNextFrameInternal(RGYBitstream *bitstream, int64_ //dts生成を初期化 //何フレーム前からにすればよいかは、b-pyramid次第で異なるので、可能な限りエンコーダの情報を使用する + const auto srcTimebase = (ENCODER_QSV) ? HW_NATIVE_TIMEBASE : m_Mux.video.bitstreamTimebase; #if ENCODER_QSV if (bitstream->dts() != MFX_TIMESTAMP_UNKNOWN) { - const auto srcTimebase = (ENCODER_QSV) ? HW_NATIVE_TIMEBASE : m_Mux.video.bitstreamTimebase; m_VideoOutputInfo.videoDelay = (m_VideoOutputInfo.codec == RGY_CODEC_AV1 && AV1_TIMESTAMP_OVERRIDE) ? 0 : -1 * (int)av_rescale_q(bitstream->dts() - bitstream->pts(), srcTimebase, av_inv_q(m_Mux.video.outputFps)); } #endif @@ -2911,8 +2911,9 @@ RGY_ERR RGYOutputAvcodec::WriteNextFrameInternal(RGYBitstream *bitstream, int64_ const AVRational fpsTimebase = (m_Mux.video.afs) ? av_inv_q(av_mul_q(m_Mux.video.outputFps, av_make_q(4, 5))) : av_inv_q(m_Mux.video.outputFps); const AVRational streamTimebase = m_Mux.video.streamOut->time_base; + const auto firstPacketPts = av_rescale_q(bitstream->pts(), srcTimebase, streamTimebase); for (int i = m_Mux.video.fpsBaseNextDts; i < 0; i++) { - m_Mux.video.timestampList.add(bitstream->pts() + av_rescale_q(i, fpsTimebase, streamTimebase)); + m_Mux.video.timestampList.add(firstPacketPts + av_rescale_q(i, fpsTimebase, streamTimebase)); } }