diff options
-rw-r--r-- | engines/mohawk/myst_stacks/dni.cpp | 2 | ||||
-rw-r--r-- | engines/mohawk/riven.cpp | 2 | ||||
-rw-r--r-- | engines/mohawk/riven_external.cpp | 4 | ||||
-rw-r--r-- | engines/mohawk/video.cpp | 6 | ||||
-rw-r--r-- | engines/mohawk/video.h | 2 | ||||
-rw-r--r-- | engines/sword1/animation.cpp | 4 | ||||
-rw-r--r-- | engines/sword1/animation.h | 2 | ||||
-rw-r--r-- | engines/sword2/animation.cpp | 4 | ||||
-rw-r--r-- | engines/sword2/animation.h | 2 | ||||
-rw-r--r-- | engines/sword25/fmv/movieplayer.cpp | 2 | ||||
-rw-r--r-- | engines/sword25/fmv/theora_decoder.cpp | 6 | ||||
-rw-r--r-- | engines/sword25/fmv/theora_decoder.h | 2 | ||||
-rw-r--r-- | video/avi_decoder.cpp | 4 | ||||
-rw-r--r-- | video/avi_decoder.h | 2 | ||||
-rw-r--r-- | video/bink_decoder.cpp | 2 | ||||
-rw-r--r-- | video/bink_decoder.h | 2 | ||||
-rw-r--r-- | video/psx_decoder.cpp | 6 | ||||
-rw-r--r-- | video/psx_decoder.h | 2 | ||||
-rw-r--r-- | video/qt_decoder.cpp | 6 | ||||
-rw-r--r-- | video/qt_decoder.h | 2 | ||||
-rw-r--r-- | video/smk_decoder.cpp | 4 | ||||
-rw-r--r-- | video/smk_decoder.h | 2 | ||||
-rw-r--r-- | video/video_decoder.cpp | 4 | ||||
-rw-r--r-- | video/video_decoder.h | 15 |
24 files changed, 47 insertions, 42 deletions
diff --git a/engines/mohawk/myst_stacks/dni.cpp b/engines/mohawk/myst_stacks/dni.cpp index 2ced265f02..cae165ccf0 100644 --- a/engines/mohawk/myst_stacks/dni.cpp +++ b/engines/mohawk/myst_stacks/dni.cpp @@ -103,7 +103,7 @@ void Dni::o_handPage(uint16 op, uint16 var, uint16 argc, uint16 *argv) { VideoHandle atrus = _vm->_video->findVideoHandle(_video); // Good ending and Atrus asked to give page - if (_globals.ending == 1 && _vm->_video->getElapsedTime(atrus) > (uint)Audio::Timestamp(0, 6801, 600).msecs()) { + if (_globals.ending == 1 && _vm->_video->getTime(atrus) > (uint)Audio::Timestamp(0, 6801, 600).msecs()) { _globals.ending = 2; _globals.heldPage = 0; _vm->setMainCursor(kDefaultMystCursor); diff --git a/engines/mohawk/riven.cpp b/engines/mohawk/riven.cpp index 95a8313536..07b1b59929 100644 --- a/engines/mohawk/riven.cpp +++ b/engines/mohawk/riven.cpp @@ -979,7 +979,7 @@ void MohawkEngine_Riven::doVideoTimer(VideoHandle handle, bool force) { return; // Run the opcode if we can at this point - if (force || _video->getElapsedTime(handle) >= _scriptMan->getStoredMovieOpcodeTime()) + if (force || _video->getTime(handle) >= _scriptMan->getStoredMovieOpcodeTime()) _scriptMan->runStoredMovieOpcode(); } diff --git a/engines/mohawk/riven_external.cpp b/engines/mohawk/riven_external.cpp index 8dfc74ebf0..337a57e3e1 100644 --- a/engines/mohawk/riven_external.cpp +++ b/engines/mohawk/riven_external.cpp @@ -2059,7 +2059,7 @@ void RivenExternal::xbookclick(uint16 argc, uint16 *argv) { debug(0, "\tHotspot = %d -> %d", argv[3], hotspotMap[argv[3] - 1]); // Just let the video play while we wait until Gehn opens the trap book for us - while (_vm->_video->getElapsedTime(video) < startTime && !_vm->shouldQuit()) { + while (_vm->_video->getTime(video) < startTime && !_vm->shouldQuit()) { if (_vm->_video->updateMovies()) _vm->_system->updateScreen(); @@ -2084,7 +2084,7 @@ void RivenExternal::xbookclick(uint16 argc, uint16 *argv) { // OK, Gehn has opened the trap book and has asked us to go in. Let's watch // and see what the player will do... - while (_vm->_video->getElapsedTime(video) < endTime && !_vm->shouldQuit()) { + while (_vm->_video->getTime(video) < endTime && !_vm->shouldQuit()) { bool updateScreen = _vm->_video->updateMovies(); Common::Event event; diff --git a/engines/mohawk/video.cpp b/engines/mohawk/video.cpp index 80fa4bf9a0..83fca9ac35 100644 --- a/engines/mohawk/video.cpp +++ b/engines/mohawk/video.cpp @@ -49,7 +49,7 @@ void VideoEntry::clear() { } bool VideoEntry::endOfVideo() { - return !video || video->endOfVideo() || video->getElapsedTime() >= (uint)end.msecs(); + return !video || video->endOfVideo() || video->getTime() >= (uint)end.msecs(); } VideoManager::VideoManager(MohawkEngine* vm) : _vm(vm) { @@ -481,9 +481,9 @@ uint32 VideoManager::getFrameCount(VideoHandle handle) { return _videoStreams[handle]->getFrameCount(); } -uint32 VideoManager::getElapsedTime(VideoHandle handle) { +uint32 VideoManager::getTime(VideoHandle handle) { assert(handle != NULL_VID_HANDLE); - return _videoStreams[handle]->getElapsedTime(); + return _videoStreams[handle]->getTime(); } uint32 VideoManager::getDuration(VideoHandle handle) { diff --git a/engines/mohawk/video.h b/engines/mohawk/video.h index 34c287497f..8736782d7a 100644 --- a/engines/mohawk/video.h +++ b/engines/mohawk/video.h @@ -100,7 +100,7 @@ public: VideoHandle findVideoHandle(const Common::String &filename); int32 getCurFrame(VideoHandle handle); uint32 getFrameCount(VideoHandle handle); - uint32 getElapsedTime(VideoHandle handle); + uint32 getTime(VideoHandle handle); uint32 getDuration(VideoHandle videoHandle); bool endOfVideo(VideoHandle handle); void setVideoBounds(VideoHandle handle, Audio::Timestamp start, Audio::Timestamp end); diff --git a/engines/sword1/animation.cpp b/engines/sword1/animation.cpp index 1e2964054a..a70ca960ba 100644 --- a/engines/sword1/animation.cpp +++ b/engines/sword1/animation.cpp @@ -510,11 +510,11 @@ DXADecoderWithSound::DXADecoderWithSound(Audio::Mixer *mixer, Audio::SoundHandle : _mixer(mixer), _bgSoundHandle(bgSoundHandle) { } -uint32 DXADecoderWithSound::getElapsedTime() const { +uint32 DXADecoderWithSound::getTime() const { if (_mixer->isSoundHandleActive(*_bgSoundHandle)) return _mixer->getSoundElapsedTime(*_bgSoundHandle); - return DXADecoder::getElapsedTime(); + return DXADecoder::getTime(); } /////////////////////////////////////////////////////////////////////////////// diff --git a/engines/sword1/animation.h b/engines/sword1/animation.h index f64b03dd1b..c2ed86a1a3 100644 --- a/engines/sword1/animation.h +++ b/engines/sword1/animation.h @@ -60,7 +60,7 @@ public: DXADecoderWithSound(Audio::Mixer *mixer, Audio::SoundHandle *bgSoundHandle); ~DXADecoderWithSound() {} - uint32 getElapsedTime() const; + uint32 getTime() const; private: Audio::Mixer *_mixer; diff --git a/engines/sword2/animation.cpp b/engines/sword2/animation.cpp index e77ae98163..90ee7375ab 100644 --- a/engines/sword2/animation.cpp +++ b/engines/sword2/animation.cpp @@ -410,11 +410,11 @@ DXADecoderWithSound::DXADecoderWithSound(Audio::Mixer *mixer, Audio::SoundHandle : _mixer(mixer), _bgSoundHandle(bgSoundHandle) { } -uint32 DXADecoderWithSound::getElapsedTime() const { +uint32 DXADecoderWithSound::getTime() const { if (_mixer->isSoundHandleActive(*_bgSoundHandle)) return _mixer->getSoundElapsedTime(*_bgSoundHandle); - return DXADecoder::getElapsedTime(); + return DXADecoder::getTime(); } /////////////////////////////////////////////////////////////////////////////// diff --git a/engines/sword2/animation.h b/engines/sword2/animation.h index 3ef8dac754..3d5c42b7f7 100644 --- a/engines/sword2/animation.h +++ b/engines/sword2/animation.h @@ -60,7 +60,7 @@ public: DXADecoderWithSound(Audio::Mixer *mixer, Audio::SoundHandle *bgSoundHandle); ~DXADecoderWithSound() {} - uint32 getElapsedTime() const; + uint32 getTime() const; private: Audio::Mixer *_mixer; Audio::SoundHandle *_bgSoundHandle; diff --git a/engines/sword25/fmv/movieplayer.cpp b/engines/sword25/fmv/movieplayer.cpp index 1acb366b3a..4609565223 100644 --- a/engines/sword25/fmv/movieplayer.cpp +++ b/engines/sword25/fmv/movieplayer.cpp @@ -167,7 +167,7 @@ void MoviePlayer::setScaleFactor(float scaleFactor) { } double MoviePlayer::getTime() { - return _decoder.getElapsedTime() / 1000.0; + return _decoder.getTime() / 1000.0; } #else // USE_THEORADEC diff --git a/engines/sword25/fmv/theora_decoder.cpp b/engines/sword25/fmv/theora_decoder.cpp index a7ebb5df8c..082c569fda 100644 --- a/engines/sword25/fmv/theora_decoder.cpp +++ b/engines/sword25/fmv/theora_decoder.cpp @@ -507,7 +507,7 @@ uint32 TheoraDecoder::getTimeToNextFrame() const { if (endOfVideo() || _curFrame < 0) return 0; - uint32 elapsedTime = getElapsedTime(); + uint32 elapsedTime = getTime(); uint32 nextFrameStartTime = (uint32)(_nextFrameStartTime * 1000); if (nextFrameStartTime <= elapsedTime) @@ -516,11 +516,11 @@ uint32 TheoraDecoder::getTimeToNextFrame() const { return nextFrameStartTime - elapsedTime; } -uint32 TheoraDecoder::getElapsedTime() const { +uint32 TheoraDecoder::getTime() const { if (_audStream) return g_system->getMixer()->getSoundElapsedTime(*_audHandle); - return VideoDecoder::getElapsedTime(); + return VideoDecoder::getTime(); } void TheoraDecoder::pauseVideoIntern(bool pause) { diff --git a/engines/sword25/fmv/theora_decoder.h b/engines/sword25/fmv/theora_decoder.h index e8cc5ab8b9..4fd7cc0f03 100644 --- a/engines/sword25/fmv/theora_decoder.h +++ b/engines/sword25/fmv/theora_decoder.h @@ -81,7 +81,7 @@ public: } Graphics::PixelFormat getPixelFormat() const { return _displaySurface.format; } - uint32 getElapsedTime() const; + uint32 getTime() const; uint32 getTimeToNextFrame() const; bool endOfVideo() const; diff --git a/video/avi_decoder.cpp b/video/avi_decoder.cpp index 9685952304..28fa712d4f 100644 --- a/video/avi_decoder.cpp +++ b/video/avi_decoder.cpp @@ -305,11 +305,11 @@ void AviDecoder::close() { reset(); } -uint32 AviDecoder::getElapsedTime() const { +uint32 AviDecoder::getTime() const { if (_audStream) return _mixer->getSoundElapsedTime(*_audHandle); - return FixedRateVideoDecoder::getElapsedTime(); + return FixedRateVideoDecoder::getTime(); } const Graphics::Surface *AviDecoder::decodeNextFrame() { diff --git a/video/avi_decoder.h b/video/avi_decoder.h index 508760ec89..edd08c42a0 100644 --- a/video/avi_decoder.h +++ b/video/avi_decoder.h @@ -195,7 +195,7 @@ public: uint16 getWidth() const { return _header.width; } uint16 getHeight() const { return _header.height; } uint32 getFrameCount() const { return _header.totalFrames; } - uint32 getElapsedTime() const; + uint32 getTime() const; const Graphics::Surface *decodeNextFrame(); Graphics::PixelFormat getPixelFormat() const; const byte *getPalette() { _dirtyPalette = false; return _palette; } diff --git a/video/bink_decoder.cpp b/video/bink_decoder.cpp index 884ca69f17..4738c3c8c0 100644 --- a/video/bink_decoder.cpp +++ b/video/bink_decoder.cpp @@ -181,7 +181,7 @@ void BinkDecoder::close() { _frames.clear(); } -uint32 BinkDecoder::getElapsedTime() const { +uint32 BinkDecoder::getTime() const { if (_audioStream && g_system->getMixer()->isSoundHandleActive(_audioHandle)) return g_system->getMixer()->getSoundElapsedTime(_audioHandle) + _audioStartOffset; diff --git a/video/bink_decoder.h b/video/bink_decoder.h index 3d5e882dd7..f1eadc6f17 100644 --- a/video/bink_decoder.h +++ b/video/bink_decoder.h @@ -70,7 +70,7 @@ public: uint16 getHeight() const { return _surface.h; } Graphics::PixelFormat getPixelFormat() const { return _surface.format; } uint32 getFrameCount() const { return _frames.size(); } - uint32 getElapsedTime() const; + uint32 getTime() const; const Graphics::Surface *decodeNextFrame(); // FixedRateVideoDecoder diff --git a/video/psx_decoder.cpp b/video/psx_decoder.cpp index 7c04b7f041..74f740f614 100644 --- a/video/psx_decoder.cpp +++ b/video/psx_decoder.cpp @@ -236,13 +236,13 @@ void PSXStreamDecoder::close() { reset(); } -uint32 PSXStreamDecoder::getElapsedTime() const { +uint32 PSXStreamDecoder::getTime() const { // TODO: Currently, the audio is always after the video so using this // can often lead to gaps in the audio... //if (_audStream) // return _mixer->getSoundElapsedTime(_audHandle); - return VideoDecoder::getElapsedTime(); + return VideoDecoder::getTime(); } uint32 PSXStreamDecoder::getTimeToNextFrame() const { @@ -250,7 +250,7 @@ uint32 PSXStreamDecoder::getTimeToNextFrame() const { return 0; uint32 nextTimeMillis = _nextFrameStartTime.msecs(); - uint32 elapsedTime = getElapsedTime(); + uint32 elapsedTime = getTime(); if (elapsedTime > nextTimeMillis) return 0; diff --git a/video/psx_decoder.h b/video/psx_decoder.h index c8ad92c45a..3695cb0f42 100644 --- a/video/psx_decoder.h +++ b/video/psx_decoder.h @@ -75,7 +75,7 @@ public: uint16 getWidth() const { return _surface->w; } uint16 getHeight() const { return _surface->h; } uint32 getFrameCount() const { return _frameCount; } - uint32 getElapsedTime() const; + uint32 getTime() const; uint32 getTimeToNextFrame() const; const Graphics::Surface *decodeNextFrame(); Graphics::PixelFormat getPixelFormat() const { return _surface->format; } diff --git a/video/qt_decoder.cpp b/video/qt_decoder.cpp index 0d80c93a1f..5c841b30fb 100644 --- a/video/qt_decoder.cpp +++ b/video/qt_decoder.cpp @@ -185,7 +185,7 @@ bool QuickTimeDecoder::endOfVideo() const { return true; } -uint32 QuickTimeDecoder::getElapsedTime() const { +uint32 QuickTimeDecoder::getTime() const { // Try to base sync off an active audio track for (uint32 i = 0; i < _audioHandles.size(); i++) { if (g_system->getMixer()->isSoundHandleActive(_audioHandles[i])) { @@ -196,7 +196,7 @@ uint32 QuickTimeDecoder::getElapsedTime() const { } // Just use time elapsed since the beginning - return SeekableVideoDecoder::getElapsedTime(); + return SeekableVideoDecoder::getTime(); } uint32 QuickTimeDecoder::getTimeToNextFrame() const { @@ -211,7 +211,7 @@ uint32 QuickTimeDecoder::getTimeToNextFrame() const { // TODO: Add support for rate modification - uint32 elapsedTime = getElapsedTime(); + uint32 elapsedTime = getTime(); if (elapsedTime < nextFrameStartTime) return nextFrameStartTime - elapsedTime; diff --git a/video/qt_decoder.h b/video/qt_decoder.h index 583b4b44b5..7f2d32e515 100644 --- a/video/qt_decoder.h +++ b/video/qt_decoder.h @@ -106,7 +106,7 @@ public: bool isVideoLoaded() const { return isOpen(); } const Graphics::Surface *decodeNextFrame(); bool endOfVideo() const; - uint32 getElapsedTime() const; + uint32 getTime() const; uint32 getTimeToNextFrame() const; Graphics::PixelFormat getPixelFormat() const; diff --git a/video/smk_decoder.cpp b/video/smk_decoder.cpp index 084028300d..439fe9027d 100644 --- a/video/smk_decoder.cpp +++ b/video/smk_decoder.cpp @@ -289,11 +289,11 @@ SmackerDecoder::~SmackerDecoder() { close(); } -uint32 SmackerDecoder::getElapsedTime() const { +uint32 SmackerDecoder::getTime() const { if (_audioStream && _audioStarted) return _mixer->getSoundElapsedTime(_audioHandle); - return FixedRateVideoDecoder::getElapsedTime(); + return FixedRateVideoDecoder::getTime(); } bool SmackerDecoder::loadStream(Common::SeekableReadStream *stream) { diff --git a/video/smk_decoder.h b/video/smk_decoder.h index 72cd32a222..fd5d658bdd 100644 --- a/video/smk_decoder.h +++ b/video/smk_decoder.h @@ -69,7 +69,7 @@ public: uint16 getWidth() const { return _surface->w; } uint16 getHeight() const { return _surface->h; } uint32 getFrameCount() const { return _frameCount; } - uint32 getElapsedTime() const; + uint32 getTime() const; const Graphics::Surface *decodeNextFrame(); Graphics::PixelFormat getPixelFormat() const { return Graphics::PixelFormat::createFormatCLUT8(); } const byte *getPalette() { _dirtyPalette = false; return _palette; } diff --git a/video/video_decoder.cpp b/video/video_decoder.cpp index e1122132a8..ae82a3374c 100644 --- a/video/video_decoder.cpp +++ b/video/video_decoder.cpp @@ -45,7 +45,7 @@ bool VideoDecoder::loadFile(const Common::String &filename) { return loadStream(file); } -uint32 VideoDecoder::getElapsedTime() const { +uint32 VideoDecoder::getTime() const { return g_system->getMillis() - _startTime; } @@ -98,7 +98,7 @@ uint32 FixedRateVideoDecoder::getTimeToNextFrame() const { if (endOfVideo() || _curFrame < 0) return 0; - uint32 elapsedTime = getElapsedTime(); + uint32 elapsedTime = getTime(); uint32 nextFrameStartTime = getFrameBeginTime(_curFrame + 1); // If the time that the next frame should be shown has past diff --git a/video/video_decoder.h b/video/video_decoder.h index 91c1ae2f11..73134307a4 100644 --- a/video/video_decoder.h +++ b/video/video_decoder.h @@ -125,15 +125,20 @@ public: virtual uint32 getFrameCount() const = 0; /** - * Returns the time (in ms) that the video has been running. - * This is based on the "wall clock" time as determined by - * OSystem::getMillis, and takes pausing the video into account. + * Returns the time position (in ms) of the current video. + * This can be based on the "wall clock" time as determined by + * OSystem::getMillis() or the current time of any audio track + * running in the video, and takes pausing the video into account. * - * As such, it can differ from what multiplying getCurFrame() by + * As such, it will differ from what multiplying getCurFrame() by * some constant would yield, e.g. for a video with non-constant * frame rate. + * + * Due to the nature of the timing, this value may not always be + * completely accurate (since our mixer does not have precise + * timing). */ - virtual uint32 getElapsedTime() const; + virtual uint32 getTime() const; /** * Return the time (in ms) until the next frame should be displayed. |