Sync video frame delay / skip by audio play progress.

This commit is contained in:
John Preston 2016-07-10 22:44:55 +03:00
parent 1fee0822fb
commit 41cd427834
12 changed files with 152 additions and 52 deletions

View file

@ -1530,6 +1530,7 @@ void MainWidget::ui_autoplayMediaInlineAsync(qint32 channelId, qint32 msgId) {
void MainWidget::audioPlayProgress(const AudioMsgId &audioId) {
if (audioId.type() == AudioMsgId::Type::Video) {
audioPlayer()->videoSoundProgress(audioId);
return;
}

View file

@ -482,7 +482,7 @@ void AudioPlayer::play(const AudioMsgId &audio, int64 position) {
if (stopped) emit updated(stopped);
}
void AudioPlayer::playFromVideo(const AudioMsgId &audio, int64 position, std_::unique_ptr<VideoSoundData> &&data) {
void AudioPlayer::playFromVideo(const AudioMsgId &audio, uint64 videoPlayId, std_::unique_ptr<VideoSoundData> &&data, int64 position) {
t_assert(audio.type() == AudioMsgId::Type::Video);
auto type = audio.type();
@ -502,8 +502,15 @@ void AudioPlayer::playFromVideo(const AudioMsgId &audio, int64 position, std_::u
emit faderOnTimer();
current->clear();
current->audio = audio;
current->videoPlayId = videoPlayId;
current->videoData = std_::move(data);
_loader->startFromVideo(current->videoData->videoPlayId);
{
QMutexLocker videoLock(&_lastVideoMutex);
_lastVideoPlayId = current->videoPlayId;
_lastVideoPlaybackWhen = 0;
_lastVideoPlaybackCorrectedMs = 0;
}
_loader->startFromVideo(current->videoPlayId);
current->playbackState.state = AudioPlayerPlaying;
current->loading = true;
@ -516,6 +523,36 @@ void AudioPlayer::feedFromVideo(VideoSoundPart &&part) {
_loader->feedFromVideo(std_::move(part));
}
int64 AudioPlayer::getVideoCorrectedTime(uint64 playId, uint64 systemMs) {
int64 result = systemMs;
QMutexLocker videoLock(&_lastVideoMutex);
if (_lastVideoPlayId == playId && _lastVideoPlaybackWhen > 0) {
result = static_cast<int64>(_lastVideoPlaybackCorrectedMs);
if (systemMs > _lastVideoPlaybackWhen) {
result += (systemMs - _lastVideoPlaybackWhen);
}
}
return result;
}
void AudioPlayer::videoSoundProgress(const AudioMsgId &audio) {
auto type = audio.type();
t_assert(type == AudioMsgId::Type::Video);
QMutexLocker lock(&playerMutex);
QMutexLocker videoLock(&_lastVideoMutex);
auto current = dataForType(type);
t_assert(current != nullptr);
if (current->videoPlayId == _lastVideoPlayId && current->playbackState.frequency) {
_lastVideoPlaybackWhen = getms();
_lastVideoPlaybackCorrectedMs = (current->playbackState.position * 1000ULL) / current->playbackState.frequency;
}
}
bool AudioPlayer::checkCurrentALError(AudioMsgId::Type type) {
if (_checkALError()) return true;

View file

@ -67,9 +67,10 @@ public:
void stop(AudioMsgId::Type type);
// Video player audio stream interface.
void playFromVideo(const AudioMsgId &audio, int64 position, std_::unique_ptr<VideoSoundData> &&data);
void playFromVideo(const AudioMsgId &audio, uint64 videoPlayId, std_::unique_ptr<VideoSoundData> &&data, int64 position);
void feedFromVideo(VideoSoundPart &&part);
AudioPlaybackState getStateForVideo(uint64 playId);
int64 getVideoCorrectedTime(uint64 playId, uint64 systemMs);
void videoSoundProgress(const AudioMsgId &audio);
void stopAndClear();
@ -122,6 +123,7 @@ private:
uint32 buffers[3] = { 0 };
int64 samplesCount[3] = { 0 };
uint64 videoPlayId = 0;
std_::unique_ptr<VideoSoundData> videoData;
private:
@ -147,8 +149,10 @@ private:
AudioMsg _songData[AudioSimultaneousLimit];
AudioMsg _videoData;
uint64 _lastVideoPlayId;
AudioPlaybackState _lastVideoPlaybackState;
uint64 _lastVideoPlayId = 0;
uint64 _lastVideoPlaybackWhen = 0;
uint64 _lastVideoPlaybackCorrectedMs = 0;
QMutex _lastVideoMutex;
QMutex _mutex;

View file

@ -361,7 +361,7 @@ AudioPlayerLoader *AudioPlayerLoaders::setupLoader(const AudioMsgId &audio, Setu
LOG(("Audio Error: video sound data not ready"));
return nullptr;
}
_videoLoader = std_::make_unique<ChildFFMpegLoader>(std_::move(data->videoData));
_videoLoader = std_::make_unique<ChildFFMpegLoader>(data->videoPlayId, std_::move(data->videoData));
l = _videoLoader.get();
} else {
*loader = std_::make_unique<FFMpegLoader>(data->file, data->data);

View file

@ -33,7 +33,8 @@ VideoSoundData::~VideoSoundData() {
}
}
ChildFFMpegLoader::ChildFFMpegLoader(std_::unique_ptr<VideoSoundData> &&data) : AudioPlayerLoader(FileLocation(), QByteArray())
ChildFFMpegLoader::ChildFFMpegLoader(uint64 videoPlayId, std_::unique_ptr<VideoSoundData> &&data) : AudioPlayerLoader(FileLocation(), QByteArray())
, _videoPlayId(videoPlayId)
, _parentData(std_::move(data)) {
_frame = av_frame_alloc();
}

View file

@ -32,7 +32,6 @@ extern "C" {
#include <AL/al.h>
struct VideoSoundData {
uint64 videoPlayId = 0;
AVCodecContext *context = nullptr;
int32 frequency = AudioVoiceMsgFrequency;
int64 length = 0;
@ -64,7 +63,7 @@ inline void freePacket(AVPacket *packet) {
class ChildFFMpegLoader : public AudioPlayerLoader {
public:
ChildFFMpegLoader(std_::unique_ptr<VideoSoundData> &&data);
ChildFFMpegLoader(uint64 videoPlayId, std_::unique_ptr<VideoSoundData> &&data);
bool open(qint64 position = 0) override;
@ -88,7 +87,7 @@ public:
void enqueuePackets(QQueue<AVPacket> &packets);
uint64 playId() const {
return _parentData->videoPlayId;
return _videoPlayId;
}
bool eofReached() const {
return _eofReached;
@ -106,6 +105,7 @@ private:
int32 _maxResampleSamples = 1024;
uint8_t **_dstSamplesData = nullptr;
uint64 _videoPlayId = 0;
std_::unique_ptr<VideoSoundData> _parentData;
AVSampleFormat _inputFormat;
AVFrame *_frame = nullptr;

View file

@ -103,6 +103,7 @@ bool FFMpegReaderImplementation::readNextFrame() {
_frameMs = frameMs;
_hadFrame = _frameRead = true;
_frameTime += _currentFrameDelay;
return true;
}
@ -133,6 +134,44 @@ bool FFMpegReaderImplementation::readNextFrame() {
return false;
}
bool FFMpegReaderImplementation::readFramesTill(int64 ms) {
if (_audioStreamId >= 0) { // sync by audio stream
auto correctMs = audioPlayer()->getVideoCorrectedTime(_playId, ms);
if (!_frameRead && !readNextFrame()) {
return false;
}
while (_frameTime <= correctMs) {
if (!readNextFrame()) {
return false;
}
}
_frameTimeCorrection = ms - correctMs;
return true;
} else { // just keep up
if (_frameRead && _frameTime > ms) {
return true;
}
if (!readNextFrame()) {
return false;
}
if (_frameTime > ms) {
return true;
}
if (!readNextFrame()) {
return false;
}
if (_frameTime <= ms) {
_frameTime = ms + 5; // keep up
}
return true;
}
}
uint64 FFMpegReaderImplementation::framePresentationTime() const {
return static_cast<uint64>(qMax(_frameTime + _frameTimeCorrection, 0LL));
}
bool FFMpegReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const QSize &size) {
t_assert(_frameRead);
_frameRead = false;
@ -184,10 +223,6 @@ bool FFMpegReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const Q
return true;
}
int FFMpegReaderImplementation::nextFrameDelay() {
return _currentFrameDelay;
}
bool FFMpegReaderImplementation::start(Mode mode) {
_mode = mode;
@ -276,8 +311,8 @@ bool FFMpegReaderImplementation::start(Mode mode) {
} else {
soundData->length = (_fmtContext->streams[_audioStreamId]->duration * soundData->frequency * _fmtContext->streams[_audioStreamId]->time_base.num) / _fmtContext->streams[_audioStreamId]->time_base.den;
}
soundData->videoPlayId = _playId = rand_value<uint64>();
audioPlayer()->playFromVideo(AudioMsgId(AudioMsgId::Type::Video), 0, std_::move(soundData));
_playId = rand_value<uint64>();
audioPlayer()->playFromVideo(AudioMsgId(AudioMsgId::Type::Video), _playId, std_::move(soundData), 0);
}
return true;

View file

@ -37,9 +37,9 @@ class FFMpegReaderImplementation : public ReaderImplementation {
public:
FFMpegReaderImplementation(FileLocation *location, QByteArray *data);
bool readNextFrame() override;
bool readFramesTill(int64 ms) override;
uint64 framePresentationTime() const override;
bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override;
int nextFrameDelay() override;
bool start(Mode mode) override;
int duration() const;
@ -48,6 +48,8 @@ public:
~FFMpegReaderImplementation();
private:
bool readNextFrame();
enum class PacketResult {
Ok,
EndOfFile,
@ -93,6 +95,9 @@ private:
int _nextFrameDelay = 0;
int _currentFrameDelay = 0;
int64 _frameTime = 0;
int64 _frameTimeCorrection = 0;
};
} // namespace internal

View file

@ -28,7 +28,6 @@ namespace internal {
class ReaderImplementation {
public:
ReaderImplementation(FileLocation *location, QByteArray *data)
: _location(location)
, _data(data) {
@ -38,9 +37,16 @@ public:
Silent,
Normal,
};
virtual bool readNextFrame() = 0;
// Read frames till current frame will have presentation time > ms.
virtual bool readFramesTill(int64 ms) = 0;
// Get current frame presentation time.
virtual uint64 framePresentationTime() const = 0;
// Render current frame to an image with specific size.
virtual bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) = 0;
virtual int nextFrameDelay() = 0;
virtual bool start(Mode mode) = 0;
virtual ~ReaderImplementation() {
}

View file

@ -28,6 +28,29 @@ namespace internal {
QtGifReaderImplementation::QtGifReaderImplementation(FileLocation *location, QByteArray *data) : ReaderImplementation(location, data) {
}
bool QtGifReaderImplementation::readFramesTill(int64 ms) {
if (!_frame.isNull() && _frameTime > ms) {
return true;
}
if (!readNextFrame()) {
return false;
}
if (_frameTime > ms) {
return true;
}
if (!readNextFrame()) {
return false;
}
if (_frameTime <= ms) {
_frameTime = ms + 5; // keep up
}
return true;
}
uint64 QtGifReaderImplementation::framePresentationTime() const {
return static_cast<uint64>(qMax(_frameTime, 0LL));
}
bool QtGifReaderImplementation::readNextFrame() {
if (_reader) _frameDelay = _reader->nextImageDelay();
if (_framesLeft < 1 && !jumpToStart()) {
@ -39,6 +62,7 @@ bool QtGifReaderImplementation::readNextFrame() {
return false;
}
--_framesLeft;
_frameTime += _frameDelay;
return true;
}
@ -66,10 +90,6 @@ bool QtGifReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const QS
return true;
}
int QtGifReaderImplementation::nextFrameDelay() {
return _frameDelay;
}
bool QtGifReaderImplementation::start(Mode mode) {
if (mode == Mode::OnlyGifv) return false;
return jumpToStart();

View file

@ -31,18 +31,20 @@ public:
QtGifReaderImplementation(FileLocation *location, QByteArray *data);
bool readNextFrame() override;
bool readFramesTill(int64 ms) override;
uint64 framePresentationTime() const override;
bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override;
int nextFrameDelay() override;
bool start(Mode mode) override;
~QtGifReaderImplementation();
private:
bool jumpToStart();
bool readNextFrame();
QImageReader *_reader = nullptr;
int _framesLeft = 0;
int64 _frameTime = 0;
int _frameDelay = 0;
QImage _frame;

View file

@ -303,7 +303,7 @@ public:
return error();
}
if (frame() && frame()->original.isNull()) {
if (!_implementation->readNextFrame()) {
if (!_implementation->readFramesTill(-1)) { // Read the first frame.
return error();
}
if (!_implementation->renderFrame(frame()->original, frame()->alpha, QSize())) {
@ -330,34 +330,17 @@ public:
}
ProcessResult finishProcess(uint64 ms) {
if (!readNextFrame()) {
return error();
}
if (ms >= _nextFrameWhen && !readNextFrame(true)) {
if (!_implementation->readFramesTill(ms - _animationStarted)) {
return error();
}
_nextFrameWhen = _animationStarted + _implementation->framePresentationTime();
if (!renderFrame()) {
return error();
}
return ProcessResult::CopyFrame;
}
uint64 nextFrameDelay() {
int32 delay = _implementation->nextFrameDelay();
return qMax(delay, 5);
}
bool readNextFrame(bool keepup = false) {
if (!_implementation->readNextFrame()) {
return false;
}
_nextFrameWhen += nextFrameDelay();
if (keepup) {
_nextFrameWhen = qMax(_nextFrameWhen, getms());
}
return true;
}
bool renderFrame() {
t_assert(frame() != 0 && _request.valid());
if (!_implementation->renderFrame(frame()->original, frame()->alpha, QSize(_request.framew, _request.frameh))) {
@ -394,6 +377,10 @@ public:
return _implementation->start(implementationMode());
}
void startedAt(uint64 ms) {
_animationStarted = _nextFrameWhen = ms;
}
ProcessResult error() {
stop();
_state = State::Error;
@ -447,6 +434,7 @@ private:
int _width = 0;
int _height = 0;
uint64 _animationStarted = 0;
uint64 _nextFrameWhen = 0;
bool _paused = false;
@ -541,7 +529,8 @@ bool Manager::handleProcessResult(ReaderPrivate *reader, ProcessResult result, u
if (result == ProcessResult::Started) {
_loadLevel.fetchAndAddRelaxed(reader->_width * reader->_height - AverageGifSize);
}
if (!reader->_paused && result == ProcessResult::Repaint) {
// See if we need to pause GIF because it is not displayed right now.
if (!reader->_paused && reader->_mode == Reader::Mode::Gif && result == ProcessResult::Repaint) {
int32 ishowing, iprevious;
Reader::Frame *showing = it.key()->frameToShow(&ishowing), *previous = it.key()->frameToWriteNext(false, &iprevious);
t_assert(previous != 0 && showing != 0 && ishowing >= 0 && iprevious >= 0);
@ -561,7 +550,7 @@ bool Manager::handleProcessResult(ReaderPrivate *reader, ProcessResult result, u
frame->original = reader->frame()->original;
frame->displayed.storeRelease(0);
if (result == ProcessResult::Started) {
reader->_nextFrameWhen = ms;
reader->startedAt(ms);
it.key()->moveToNextWrite();
emit callback(it.key(), it.key()->threadIndex(), NotificationReinit);
}
@ -701,7 +690,7 @@ MTPDocumentAttribute readAttributes(const QString &fname, const QByteArray &data
auto reader = std_::make_unique<internal::FFMpegReaderImplementation>(&localloc, &localdata);
if (reader->start(internal::ReaderImplementation::Mode::OnlyGifv)) {
bool hasAlpha = false;
if (reader->readNextFrame() && reader->renderFrame(cover, hasAlpha, QSize())) {
if (reader->readFramesTill(-1) && reader->renderFrame(cover, hasAlpha, QSize())) {
if (cover.width() > 0 && cover.height() > 0 && cover.width() < cover.height() * 10 && cover.height() < cover.width() * 10) {
if (hasAlpha) {
QImage cacheForResize;