aboutsummaryrefslogtreecommitdiff
path: root/video
diff options
context:
space:
mode:
authorMatthew Hoops2012-03-20 14:18:57 -0400
committerMatthew Hoops2012-03-20 14:49:16 -0400
commit71756bdf4eae5ba9cc3f329b85e894f04640aaef (patch)
tree40d464262da107ab5eed82f198685209161ebac1 /video
parent03eba05b09e5c9e5a351f8111185934b92a3fed3 (diff)
parent3c3576a224b92c703b4e8ea20008ac8a069980dd (diff)
downloadscummvm-rg350-71756bdf4eae5ba9cc3f329b85e894f04640aaef.tar.gz
scummvm-rg350-71756bdf4eae5ba9cc3f329b85e894f04640aaef.tar.bz2
scummvm-rg350-71756bdf4eae5ba9cc3f329b85e894f04640aaef.zip
Merge remote branch 'upstream/master' into pegasus
Diffstat (limited to 'video')
-rw-r--r--video/bink_decoder.cpp64
-rw-r--r--video/bink_decoder.h15
-rw-r--r--video/codecs/mjpeg.cpp25
-rw-r--r--video/codecs/mjpeg.h3
-rw-r--r--video/dxa_decoder.cpp12
-rw-r--r--video/module.mk1
-rw-r--r--video/psx_decoder.cpp697
-rw-r--r--video/psx_decoder.h128
-rw-r--r--video/qt_decoder.cpp126
-rw-r--r--video/qt_decoder.h11
10 files changed, 939 insertions, 143 deletions
diff --git a/video/bink_decoder.cpp b/video/bink_decoder.cpp
index 46ac8ac386..884ca69f17 100644
--- a/video/bink_decoder.cpp
+++ b/video/bink_decoder.cpp
@@ -113,23 +113,33 @@ BinkDecoder::BinkDecoder() {
}
_audioStream = 0;
- _audioStarted = false;
}
-BinkDecoder::~BinkDecoder() {
- close();
-}
+void BinkDecoder::startAudio() {
+ if (_audioTrack < _audioTracks.size()) {
+ const AudioTrack &audio = _audioTracks[_audioTrack];
-void BinkDecoder::close() {
- reset();
+ _audioStream = Audio::makeQueuingAudioStream(audio.outSampleRate, audio.outChannels == 2);
+ g_system->getMixer()->playStream(Audio::Mixer::kPlainSoundType, &_audioHandle, _audioStream);
+ } // else no audio
+}
+void BinkDecoder::stopAudio() {
if (_audioStream) {
- // Stop audio
g_system->getMixer()->stopHandle(_audioHandle);
_audioStream = 0;
}
+}
+
+BinkDecoder::~BinkDecoder() {
+ close();
+}
- _audioStarted = false;
+void BinkDecoder::close() {
+ reset();
+
+ // Stop audio
+ stopAudio();
for (int i = 0; i < 4; i++) {
delete[] _curPlanes[i]; _curPlanes[i] = 0;
@@ -173,7 +183,7 @@ void BinkDecoder::close() {
uint32 BinkDecoder::getElapsedTime() const {
if (_audioStream && g_system->getMixer()->isSoundHandleActive(_audioHandle))
- return g_system->getMixer()->getSoundElapsedTime(_audioHandle);
+ return g_system->getMixer()->getSoundElapsedTime(_audioHandle) + _audioStartOffset;
return g_system->getMillis() - _startTime;
}
@@ -241,11 +251,6 @@ const Graphics::Surface *BinkDecoder::decodeNextFrame() {
if (_curFrame == 0)
_startTime = g_system->getMillis();
- if (!_audioStarted && _audioStream) {
- _audioStarted = true;
- g_system->getMixer()->playStream(Audio::Mixer::kPlainSoundType, &_audioHandle, _audioStream);
- }
-
return &_surface;
}
@@ -510,6 +515,11 @@ void BinkDecoder::mergeHuffmanSymbols(VideoFrame &video, byte *dst, const byte *
}
bool BinkDecoder::loadStream(Common::SeekableReadStream *stream) {
+ Graphics::PixelFormat format = g_system->getScreenFormat();
+ return loadStream(stream, format);
+}
+
+bool BinkDecoder::loadStream(Common::SeekableReadStream *stream, const Graphics::PixelFormat &format) {
close();
_id = stream->readUint32BE();
@@ -589,7 +599,6 @@ bool BinkDecoder::loadStream(Common::SeekableReadStream *stream) {
_hasAlpha = _videoFlags & kVideoFlagAlpha;
_swapPlanes = (_id == kBIKhID) || (_id == kBIKiID); // BIKh and BIKi swap the chroma planes
- Graphics::PixelFormat format = g_system->getScreenFormat();
_surface.create(width, height, format);
// Give the planes a bit extra space
@@ -618,11 +627,8 @@ bool BinkDecoder::loadStream(Common::SeekableReadStream *stream) {
initBundles();
initHuffman();
- if (_audioTrack < _audioTracks.size()) {
- const AudioTrack &audio = _audioTracks[_audioTrack];
-
- _audioStream = Audio::makeQueuingAudioStream(audio.outSampleRate, audio.outChannels == 2);
- }
+ startAudio();
+ _audioStartOffset = 0;
return true;
}
@@ -711,15 +717,15 @@ void BinkDecoder::initBundles() {
for (int i = 0; i < 2; i++) {
int width = MAX<uint32>(cw[i], 8);
- _bundles[kSourceBlockTypes ].countLengths[i] = Common::intLog2((width >> 3) + 511) + 1;
- _bundles[kSourceSubBlockTypes].countLengths[i] = Common::intLog2((width >> 4) + 511) + 1;
- _bundles[kSourceColors ].countLengths[i] = Common::intLog2((cbw[i] )*64 + 511) + 1;
- _bundles[kSourceIntraDC ].countLengths[i] = Common::intLog2((width >> 3) + 511) + 1;
- _bundles[kSourceInterDC ].countLengths[i] = Common::intLog2((width >> 3) + 511) + 1;
- _bundles[kSourceXOff ].countLengths[i] = Common::intLog2((width >> 3) + 511) + 1;
- _bundles[kSourceYOff ].countLengths[i] = Common::intLog2((width >> 3) + 511) + 1;
- _bundles[kSourcePattern ].countLengths[i] = Common::intLog2((cbw[i] << 3) + 511) + 1;
- _bundles[kSourceRun ].countLengths[i] = Common::intLog2((cbw[i] )*48 + 511) + 1;
+ _bundles[kSourceBlockTypes ].countLengths[i] = Common::intLog2((width >> 3) + 511) + 1;
+ _bundles[kSourceSubBlockTypes].countLengths[i] = Common::intLog2(((width + 7) >> 4) + 511) + 1;
+ _bundles[kSourceColors ].countLengths[i] = Common::intLog2((cbw[i]) * 64 + 511) + 1;
+ _bundles[kSourceIntraDC ].countLengths[i] = Common::intLog2((width >> 3) + 511) + 1;
+ _bundles[kSourceInterDC ].countLengths[i] = Common::intLog2((width >> 3) + 511) + 1;
+ _bundles[kSourceXOff ].countLengths[i] = Common::intLog2((width >> 3) + 511) + 1;
+ _bundles[kSourceYOff ].countLengths[i] = Common::intLog2((width >> 3) + 511) + 1;
+ _bundles[kSourcePattern ].countLengths[i] = Common::intLog2((cbw[i] << 3) + 511) + 1;
+ _bundles[kSourceRun ].countLengths[i] = Common::intLog2((cbw[i]) * 48 + 511) + 1;
}
}
diff --git a/video/bink_decoder.h b/video/bink_decoder.h
index dd1b7ca67d..3d5e882dd7 100644
--- a/video/bink_decoder.h
+++ b/video/bink_decoder.h
@@ -76,7 +76,9 @@ public:
// FixedRateVideoDecoder
Common::Rational getFrameRate() const { return _frameRate; }
-private:
+ // Bink specific
+ bool loadStream(Common::SeekableReadStream *stream, const Graphics::PixelFormat &format);
+protected:
static const int kAudioChannelsMax = 2;
static const int kAudioBlockSizeMax = (kAudioChannelsMax << 11);
@@ -221,15 +223,13 @@ private:
Audio::SoundHandle _audioHandle;
Audio::QueuingAudioStream *_audioStream;
- bool _audioStarted;
+ int32 _audioStartOffset;
uint32 _videoFlags; ///< Video frame features.
bool _hasAlpha; ///< Do video frames have alpha?
bool _swapPlanes; ///< Are the planes ordered (A)YVU instead of (A)YUV?
- uint32 _audioFrame;
-
Common::Array<AudioTrack> _audioTracks; ///< All audio tracks.
Common::Array<VideoFrame> _frames; ///< All video frames.
@@ -259,7 +259,7 @@ private:
/** Decode an audio packet. */
void audioPacket(AudioTrack &audio);
/** Decode a video packet. */
- void videoPacket(VideoFrame &video);
+ virtual void videoPacket(VideoFrame &video);
/** Decode a plane. */
void decodePlane(VideoFrame &video, int planeIdx, bool isChroma);
@@ -327,6 +327,11 @@ private:
void IDCT(int16 *block);
void IDCTPut(DecodeContext &ctx, int16 *block);
void IDCTAdd(DecodeContext &ctx, int16 *block);
+
+ /** Start playing the audio track */
+ void startAudio();
+ /** Stop playing the audio track */
+ void stopAudio();
};
} // End of namespace Video
diff --git a/video/codecs/mjpeg.cpp b/video/codecs/mjpeg.cpp
index 248a80d714..10fd9d7c50 100644
--- a/video/codecs/mjpeg.cpp
+++ b/video/codecs/mjpeg.cpp
@@ -22,8 +22,8 @@
#include "common/system.h"
#include "common/textconsole.h"
-#include "graphics/jpeg.h"
#include "graphics/surface.h"
+#include "graphics/decoders/jpeg.h"
#include "video/codecs/mjpeg.h"
@@ -34,38 +34,31 @@ class SeekableReadStream;
namespace Video {
JPEGDecoder::JPEGDecoder() : Codec() {
- _jpeg = new Graphics::JPEG();
_pixelFormat = g_system->getScreenFormat();
_surface = NULL;
}
JPEGDecoder::~JPEGDecoder() {
- delete _jpeg;
-
if (_surface) {
_surface->free();
delete _surface;
}
}
-const Graphics::Surface *JPEGDecoder::decodeImage(Common::SeekableReadStream* stream) {
- if (!_jpeg->read(stream)) {
+const Graphics::Surface *JPEGDecoder::decodeImage(Common::SeekableReadStream *stream) {
+ Graphics::JPEGDecoder jpeg;
+
+ if (!jpeg.loadStream(*stream)) {
warning("Failed to decode JPEG frame");
return 0;
}
- if (!_surface) {
- _surface = new Graphics::Surface();
- _surface->create(_jpeg->getWidth(), _jpeg->getHeight(), _pixelFormat);
+ if (_surface) {
+ _surface->free();
+ delete _surface;
}
- Graphics::Surface *frame = _jpeg->getSurface(_pixelFormat);
- assert(frame);
-
- _surface->copyFrom(*frame);
-
- frame->free();
- delete frame;
+ _surface = jpeg.getSurface()->convertTo(_pixelFormat);
return _surface;
}
diff --git a/video/codecs/mjpeg.h b/video/codecs/mjpeg.h
index 45cb57dea2..0c3b668a74 100644
--- a/video/codecs/mjpeg.h
+++ b/video/codecs/mjpeg.h
@@ -31,7 +31,7 @@ class SeekableReadStream;
}
namespace Graphics {
-class JPEG;
+struct Surface;
}
namespace Video {
@@ -50,7 +50,6 @@ public:
private:
Graphics::PixelFormat _pixelFormat;
- Graphics::JPEG *_jpeg;
Graphics::Surface *_surface;
};
diff --git a/video/dxa_decoder.cpp b/video/dxa_decoder.cpp
index 561719a27d..7d1112a59c 100644
--- a/video/dxa_decoder.cpp
+++ b/video/dxa_decoder.cpp
@@ -115,9 +115,9 @@ bool DXADecoder::loadStream(Common::SeekableReadStream *stream) {
_scaledBuffer = 0;
if (_scaleMode != S_NONE) {
_scaledBuffer = (uint8 *)malloc(_frameSize);
- memset(_scaledBuffer, 0, _frameSize);
if (!_scaledBuffer)
error("Error allocating scale buffer (size %u)", _frameSize);
+ memset(_scaledBuffer, 0, _frameSize);
}
#ifdef DXA_EXPERIMENT_MAXD
@@ -318,7 +318,7 @@ void DXADecoder::decode13(int size) {
for (uint32 by = 0; by < _curHeight; by += BLOCKH) {
for (uint32 bx = 0; bx < _width; bx += BLOCKW) {
uint8 type = *codeBuf++;
- uint8 *b2 = (uint8*)_frameBuffer1 + bx + by * _width;
+ uint8 *b2 = (uint8 *)_frameBuffer1 + bx + by * _width;
switch (type) {
case 0:
@@ -369,7 +369,7 @@ void DXADecoder::decode13(int size) {
if (mbyte & 0x08)
my = -my;
- uint8 *b1 = (uint8*)_frameBuffer2 + (bx+mx) + (by+my) * _width;
+ uint8 *b1 = (uint8 *)_frameBuffer2 + (bx+mx) + (by+my) * _width;
for (int yc = 0; yc < BLOCKH; yc++) {
memcpy(b2, b1, BLOCKW);
b1 += _width;
@@ -385,7 +385,7 @@ void DXADecoder::decode13(int size) {
for (int subBlock = 0; subBlock < 4; subBlock++) {
int sx = bx + subX[subBlock], sy = by + subY[subBlock];
- b2 = (uint8*)_frameBuffer1 + sx + sy * _width;
+ b2 = (uint8 *)_frameBuffer1 + sx + sy * _width;
switch (subMask & 0xC0) {
// 00: skip
case 0x00:
@@ -413,7 +413,7 @@ void DXADecoder::decode13(int size) {
if (mbyte & 0x08)
my = -my;
- uint8 *b1 = (uint8*)_frameBuffer2 + (sx+mx) + (sy+my) * _width;
+ uint8 *b1 = (uint8 *)_frameBuffer2 + (sx+mx) + (sy+my) * _width;
for (int yc = 0; yc < BLOCKH / 2; yc++) {
memcpy(b2, b1, BLOCKW / 2);
b1 += _width;
@@ -489,9 +489,9 @@ const Graphics::Surface *DXADecoder::decodeNextFrame() {
if ((_inBuffer == NULL) || (_inBufferSize < size)) {
free(_inBuffer);
_inBuffer = (byte *)malloc(size);
- memset(_inBuffer, 0, size);
if (_inBuffer == NULL)
error("Error allocating input buffer (size %u)", size);
+ memset(_inBuffer, 0, size);
_inBufferSize = size;
}
diff --git a/video/module.mk b/video/module.mk
index ceeac94384..900a781d0a 100644
--- a/video/module.mk
+++ b/video/module.mk
@@ -5,6 +5,7 @@ MODULE_OBJS := \
coktel_decoder.o \
dxa_decoder.o \
flic_decoder.o \
+ psx_decoder.o \
qt_decoder.o \
smk_decoder.o \
video_decoder.o \
diff --git a/video/psx_decoder.cpp b/video/psx_decoder.cpp
new file mode 100644
index 0000000000..7c04b7f041
--- /dev/null
+++ b/video/psx_decoder.cpp
@@ -0,0 +1,697 @@
+/* ScummVM - Graphic Adventure Engine
+ *
+ * ScummVM is the legal property of its developers, whose names
+ * are too numerous to list here. Please refer to the COPYRIGHT
+ * file distributed with this source distribution.
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public License
+ * as published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ *
+ */
+
+// PlayStation Stream demuxer and XA audio decoder based on FFmpeg/libav
+// MDEC video emulation based on http://kenai.com/downloads/jpsxdec/Old/PlayStation1_STR_format1-00.txt
+
+#include "audio/audiostream.h"
+#include "audio/mixer.h"
+#include "audio/decoders/raw.h"
+#include "common/bitstream.h"
+#include "common/huffman.h"
+#include "common/memstream.h"
+#include "common/stream.h"
+#include "common/system.h"
+#include "common/textconsole.h"
+#include "graphics/yuv_to_rgb.h"
+
+#include "video/psx_decoder.h"
+
+namespace Video {
+
+// Here are the codes/lengths/symbols that are used for decoding
+// DC coefficients (version 3 frames only)
+
+#define DC_CODE_COUNT 9
+#define DC_HUFF_VAL(b, n, p) (((b) << 16) | ((n) << 8) | (p))
+#define GET_DC_BITS(x) ((x) >> 16)
+#define GET_DC_NEG(x) ((int)(((x) >> 8) & 0xff))
+#define GET_DC_POS(x) ((int)((x) & 0xff))
+
+static const uint32 s_huffmanDCChromaCodes[DC_CODE_COUNT] = {
+ 254, 126, 62, 30, 14, 6, 2, 1, 0
+};
+
+static const byte s_huffmanDCChromaLengths[DC_CODE_COUNT] = {
+ 8, 7, 6, 5, 4, 3, 2, 2, 2
+};
+
+static const uint32 s_huffmanDCLumaCodes[DC_CODE_COUNT] = {
+ 126, 62, 30, 14, 6, 5, 1, 0, 4
+};
+
+static const byte s_huffmanDCLumaLengths[DC_CODE_COUNT] = {
+ 7, 6, 5, 4, 3, 3, 2, 2, 3
+};
+
+static const uint32 s_huffmanDCSymbols[DC_CODE_COUNT] = {
+ DC_HUFF_VAL(8, 255, 128), DC_HUFF_VAL(7, 127, 64), DC_HUFF_VAL(6, 63, 32),
+ DC_HUFF_VAL(5, 31, 16), DC_HUFF_VAL(4, 15, 8), DC_HUFF_VAL(3, 7, 4),
+ DC_HUFF_VAL(2, 3, 2), DC_HUFF_VAL(1, 1, 1), DC_HUFF_VAL(0, 0, 0)
+};
+
+// Here are the codes/lengths/symbols that are used for decoding
+// DC coefficients (version 2 and 3 frames)
+
+#define AC_CODE_COUNT 113
+#define AC_HUFF_VAL(z, a) ((z << 8) | a)
+#define ESCAPE_CODE ((uint32)-1) // arbitrary, just so we can tell what code it is
+#define END_OF_BLOCK ((uint32)-2) // arbitrary, just so we can tell what code it is
+#define GET_AC_ZERO_RUN(code) (code >> 8)
+#define GET_AC_COEFFICIENT(code) ((int)(code & 0xff))
+
+static const uint32 s_huffmanACCodes[AC_CODE_COUNT] = {
+ // Regular codes
+ 3, 3, 4, 5, 5, 6, 7, 4, 5, 6, 7, 4, 5, 6, 7,
+ 32, 33, 34, 35, 36, 37, 38, 39, 8, 9, 10, 11,
+ 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
+ 23, 24, 25, 26, 27, 28, 29, 30, 31, 16, 17,
+ 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28,
+ 29, 30, 31, 16, 17, 18, 19, 20, 21, 22, 23,
+ 24, 25, 26, 27, 28, 29, 30, 31, 16, 17, 18,
+ 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
+ 30, 31, 16, 17, 18, 19, 20, 21, 22, 23, 24,
+ 25, 26, 27, 28, 29, 30, 31,
+
+ // Escape code
+ 1,
+ // End of block code
+ 2
+};
+
+static const byte s_huffmanACLengths[AC_CODE_COUNT] = {
+ // Regular codes
+ 2, 3, 4, 4, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7,
+ 8, 8, 8, 8, 8, 8, 8, 8, 10, 10, 10, 10, 10,
+ 10, 10, 10, 12, 12, 12, 12, 12, 12, 12, 12,
+ 12, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13,
+ 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
+ 13, 13, 14, 14, 14, 14, 14, 14, 14, 14, 14,
+ 14, 14, 14, 14, 14, 14, 14, 15, 15, 15, 15,
+ 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
+ 15, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16,
+
+ // Escape code
+ 6,
+ // End of block code
+ 2
+};
+
+static const uint32 s_huffmanACSymbols[AC_CODE_COUNT] = {
+ // Regular codes
+ AC_HUFF_VAL(0, 1), AC_HUFF_VAL(1, 1), AC_HUFF_VAL(0, 2), AC_HUFF_VAL(2, 1), AC_HUFF_VAL(0, 3),
+ AC_HUFF_VAL(4, 1), AC_HUFF_VAL(3, 1), AC_HUFF_VAL(7, 1), AC_HUFF_VAL(6, 1), AC_HUFF_VAL(1, 2),
+ AC_HUFF_VAL(5, 1), AC_HUFF_VAL(2, 2), AC_HUFF_VAL(9, 1), AC_HUFF_VAL(0, 4), AC_HUFF_VAL(8, 1),
+ AC_HUFF_VAL(13, 1), AC_HUFF_VAL(0, 6), AC_HUFF_VAL(12, 1), AC_HUFF_VAL(11, 1), AC_HUFF_VAL(3, 2),
+ AC_HUFF_VAL(1, 3), AC_HUFF_VAL(0, 5), AC_HUFF_VAL(10, 1), AC_HUFF_VAL(16, 1), AC_HUFF_VAL(5, 2),
+ AC_HUFF_VAL(0, 7), AC_HUFF_VAL(2, 3), AC_HUFF_VAL(1, 4), AC_HUFF_VAL(15, 1), AC_HUFF_VAL(14, 1),
+ AC_HUFF_VAL(4, 2), AC_HUFF_VAL(0, 11), AC_HUFF_VAL(8, 2), AC_HUFF_VAL(4, 3), AC_HUFF_VAL(0, 10),
+ AC_HUFF_VAL(2, 4), AC_HUFF_VAL(7, 2), AC_HUFF_VAL(21, 1), AC_HUFF_VAL(20, 1), AC_HUFF_VAL(0, 9),
+ AC_HUFF_VAL(19, 1), AC_HUFF_VAL(18, 1), AC_HUFF_VAL(1, 5), AC_HUFF_VAL(3, 3), AC_HUFF_VAL(0, 8),
+ AC_HUFF_VAL(6, 2), AC_HUFF_VAL(17, 1), AC_HUFF_VAL(10, 2), AC_HUFF_VAL(9, 2), AC_HUFF_VAL(5, 3),
+ AC_HUFF_VAL(3, 4), AC_HUFF_VAL(2, 5), AC_HUFF_VAL(1, 7), AC_HUFF_VAL(1, 6), AC_HUFF_VAL(0, 15),
+ AC_HUFF_VAL(0, 14), AC_HUFF_VAL(0, 13), AC_HUFF_VAL(0, 12), AC_HUFF_VAL(26, 1), AC_HUFF_VAL(25, 1),
+ AC_HUFF_VAL(24, 1), AC_HUFF_VAL(23, 1), AC_HUFF_VAL(22, 1), AC_HUFF_VAL(0, 31), AC_HUFF_VAL(0, 30),
+ AC_HUFF_VAL(0, 29), AC_HUFF_VAL(0, 28), AC_HUFF_VAL(0, 27), AC_HUFF_VAL(0, 26), AC_HUFF_VAL(0, 25),
+ AC_HUFF_VAL(0, 24), AC_HUFF_VAL(0, 23), AC_HUFF_VAL(0, 22), AC_HUFF_VAL(0, 21), AC_HUFF_VAL(0, 20),
+ AC_HUFF_VAL(0, 19), AC_HUFF_VAL(0, 18), AC_HUFF_VAL(0, 17), AC_HUFF_VAL(0, 16), AC_HUFF_VAL(0, 40),
+ AC_HUFF_VAL(0, 39), AC_HUFF_VAL(0, 38), AC_HUFF_VAL(0, 37), AC_HUFF_VAL(0, 36), AC_HUFF_VAL(0, 35),
+ AC_HUFF_VAL(0, 34), AC_HUFF_VAL(0, 33), AC_HUFF_VAL(0, 32), AC_HUFF_VAL(1, 14), AC_HUFF_VAL(1, 13),
+ AC_HUFF_VAL(1, 12), AC_HUFF_VAL(1, 11), AC_HUFF_VAL(1, 10), AC_HUFF_VAL(1, 9), AC_HUFF_VAL(1, 8),
+ AC_HUFF_VAL(1, 18), AC_HUFF_VAL(1, 17), AC_HUFF_VAL(1, 16), AC_HUFF_VAL(1, 15), AC_HUFF_VAL(6, 3),
+ AC_HUFF_VAL(16, 2), AC_HUFF_VAL(15, 2), AC_HUFF_VAL(14, 2), AC_HUFF_VAL(13, 2), AC_HUFF_VAL(12, 2),
+ AC_HUFF_VAL(11, 2), AC_HUFF_VAL(31, 1), AC_HUFF_VAL(30, 1), AC_HUFF_VAL(29, 1), AC_HUFF_VAL(28, 1),
+ AC_HUFF_VAL(27, 1),
+
+ // Escape code
+ ESCAPE_CODE,
+ // End of block code
+ END_OF_BLOCK
+};
+
+PSXStreamDecoder::PSXStreamDecoder(CDSpeed speed, uint32 frameCount) : _nextFrameStartTime(0, speed), _frameCount(frameCount) {
+ _stream = 0;
+ _audStream = 0;
+ _surface = new Graphics::Surface();
+ _yBuffer = _cbBuffer = _crBuffer = 0;
+ _acHuffman = new Common::Huffman(0, AC_CODE_COUNT, s_huffmanACCodes, s_huffmanACLengths, s_huffmanACSymbols);
+ _dcHuffmanChroma = new Common::Huffman(0, DC_CODE_COUNT, s_huffmanDCChromaCodes, s_huffmanDCChromaLengths, s_huffmanDCSymbols);
+ _dcHuffmanLuma = new Common::Huffman(0, DC_CODE_COUNT, s_huffmanDCLumaCodes, s_huffmanDCLumaLengths, s_huffmanDCSymbols);
+}
+
+PSXStreamDecoder::~PSXStreamDecoder() {
+ close();
+ delete _surface;
+ delete _acHuffman;
+ delete _dcHuffmanLuma;
+ delete _dcHuffmanChroma;
+}
+
+#define RAW_CD_SECTOR_SIZE 2352
+
+#define CDXA_TYPE_MASK 0x0E
+#define CDXA_TYPE_DATA 0x08
+#define CDXA_TYPE_AUDIO 0x04
+#define CDXA_TYPE_VIDEO 0x02
+
+bool PSXStreamDecoder::loadStream(Common::SeekableReadStream *stream) {
+ close();
+
+ _stream = stream;
+
+ Common::SeekableReadStream *sector = readSector();
+
+ if (!sector) {
+ close();
+ return false;
+ }
+
+ // Rip out video info from the first frame
+ sector->seek(18);
+ byte sectorType = sector->readByte() & CDXA_TYPE_MASK;
+
+ if (sectorType != CDXA_TYPE_VIDEO && sectorType != CDXA_TYPE_DATA) {
+ close();
+ return false;
+ }
+
+ sector->seek(40);
+
+ uint16 width = sector->readUint16LE();
+ uint16 height = sector->readUint16LE();
+ _surface->create(width, height, g_system->getScreenFormat());
+
+ _macroBlocksW = (width + 15) / 16;
+ _macroBlocksH = (height + 15) / 16;
+ _yBuffer = new byte[_macroBlocksW * _macroBlocksH * 16 * 16];
+ _cbBuffer = new byte[_macroBlocksW * _macroBlocksH * 8 * 8];
+ _crBuffer = new byte[_macroBlocksW * _macroBlocksH * 8 * 8];
+
+ delete sector;
+ _stream->seek(0);
+
+ return true;
+}
+
+void PSXStreamDecoder::close() {
+ if (!_stream)
+ return;
+
+ delete _stream;
+ _stream = 0;
+
+ // Deinitialize sound
+ g_system->getMixer()->stopHandle(_audHandle);
+ _audStream = 0;
+
+ _surface->free();
+
+ memset(&_adpcmStatus, 0, sizeof(_adpcmStatus));
+
+ _macroBlocksW = _macroBlocksH = 0;
+ delete[] _yBuffer; _yBuffer = 0;
+ delete[] _cbBuffer; _cbBuffer = 0;
+ delete[] _crBuffer; _crBuffer = 0;
+
+ reset();
+}
+
+uint32 PSXStreamDecoder::getElapsedTime() const {
+ // TODO: Currently, the audio is always after the video so using this
+ // can often lead to gaps in the audio...
+ //if (_audStream)
+ // return _mixer->getSoundElapsedTime(_audHandle);
+
+ return VideoDecoder::getElapsedTime();
+}
+
+uint32 PSXStreamDecoder::getTimeToNextFrame() const {
+ if (!isVideoLoaded() || endOfVideo())
+ return 0;
+
+ uint32 nextTimeMillis = _nextFrameStartTime.msecs();
+ uint32 elapsedTime = getElapsedTime();
+
+ if (elapsedTime > nextTimeMillis)
+ return 0;
+
+ return nextTimeMillis - elapsedTime;
+}
+
+#define VIDEO_DATA_CHUNK_SIZE 2016
+#define VIDEO_DATA_HEADER_SIZE 56
+
+const Graphics::Surface *PSXStreamDecoder::decodeNextFrame() {
+ Common::SeekableReadStream *sector = 0;
+ byte *partialFrame = 0;
+ int sectorsRead = 0;
+
+ while (!endOfVideo()) {
+ sector = readSector();
+ sectorsRead++;
+
+ if (!sector)
+ error("Corrupt PSX stream sector");
+
+ sector->seek(0x11);
+ byte track = sector->readByte();
+ if (track >= 32)
+ error("Bad PSX stream track");
+
+ byte sectorType = sector->readByte() & CDXA_TYPE_MASK;
+
+ switch (sectorType) {
+ case CDXA_TYPE_DATA:
+ case CDXA_TYPE_VIDEO:
+ if (track == 1) {
+ sector->seek(28);
+ uint16 curSector = sector->readUint16LE();
+ uint16 sectorCount = sector->readUint16LE();
+ sector->readUint32LE();
+ uint16 frameSize = sector->readUint32LE();
+
+ if (curSector >= sectorCount)
+ error("Bad sector");
+
+ if (!partialFrame)
+ partialFrame = (byte *)malloc(sectorCount * VIDEO_DATA_CHUNK_SIZE);
+
+ sector->seek(VIDEO_DATA_HEADER_SIZE);
+ sector->read(partialFrame + curSector * VIDEO_DATA_CHUNK_SIZE, VIDEO_DATA_CHUNK_SIZE);
+
+ if (curSector == sectorCount - 1) {
+ // Done assembling the frame
+ Common::SeekableReadStream *frame = new Common::MemoryReadStream(partialFrame, frameSize, DisposeAfterUse::YES);
+
+ decodeFrame(frame);
+
+ delete frame;
+ delete sector;
+
+ _curFrame++;
+ if (_curFrame == 0)
+ _startTime = g_system->getMillis();
+
+ // Increase the time by the amount of sectors we read
+ // One may notice that this is still not the most precise
+ // method since a frame takes up the time its sectors took
+ // up instead of the amount of time it takes the next frame
+ // to be read from the sectors. The actual frame rate should
+ // be constant instead of variable, so the slight difference
+ // in a frame's showing time is negligible (1/150 of a second).
+ _nextFrameStartTime = _nextFrameStartTime.addFrames(sectorsRead);
+
+ return _surface;
+ }
+ } else
+ error("Unhandled multi-track video");
+ break;
+ case CDXA_TYPE_AUDIO:
+ // We only handle one audio channel so far
+ if (track == 1)
+ queueAudioFromSector(sector);
+ else
+ warning("Unhandled multi-track audio");
+ break;
+ default:
+ // This shows up way too often, but the other sectors
+ // are safe to ignore
+ //warning("Unknown PSX sector type 0x%x", sectorType);
+ break;
+ }
+
+ delete sector;
+ }
+
+ return 0;
+}
+
+static const byte s_syncHeader[12] = { 0x00, 0xff ,0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00 };
+
+Common::SeekableReadStream *PSXStreamDecoder::readSector() {
+ assert(_stream);
+
+ Common::SeekableReadStream *stream = _stream->readStream(RAW_CD_SECTOR_SIZE);
+
+ byte syncHeader[12];
+ stream->read(syncHeader, 12);
+ if (!memcmp(s_syncHeader, syncHeader, 12))
+ return stream;
+
+ return 0;
+}
+
+// Ha! It's palindromic!
+#define AUDIO_DATA_CHUNK_SIZE 2304
+#define AUDIO_DATA_SAMPLE_COUNT 4032
+
+static const int s_xaTable[5][2] = {
+ { 0, 0 },
+ { 60, 0 },
+ { 115, -52 },
+ { 98, -55 },
+ { 122, -60 }
+};
+
+void PSXStreamDecoder::queueAudioFromSector(Common::SeekableReadStream *sector) {
+ assert(sector);
+
+ if (!_audStream) {
+ // Initialize audio stream
+ sector->seek(19);
+ byte format = sector->readByte();
+
+ bool stereo = (format & (1 << 0)) != 0;
+ uint rate = (format & (1 << 2)) ? 18900 : 37800;
+
+ _audStream = Audio::makeQueuingAudioStream(rate, stereo);
+ g_system->getMixer()->playStream(Audio::Mixer::kPlainSoundType, &_audHandle, _audStream);
+ }
+
+ sector->seek(24);
+
+ // This XA audio is different (yet similar) from normal XA audio! Watch out!
+ // TODO: It's probably similar enough to normal XA that we can merge it somehow...
+ // TODO: RTZ PSX needs the same audio code in a regular AudioStream class. Probably
+ // will do something similar to QuickTime and creating a base class 'ISOMode2Parser'
+ // or something similar.
+ byte *buf = new byte[AUDIO_DATA_CHUNK_SIZE];
+ sector->read(buf, AUDIO_DATA_CHUNK_SIZE);
+
+ int channels = _audStream->isStereo() ? 2 : 1;
+ int16 *dst = new int16[AUDIO_DATA_SAMPLE_COUNT];
+ int16 *leftChannel = dst;
+ int16 *rightChannel = dst + 1;
+
+ for (byte *src = buf; src < buf + AUDIO_DATA_CHUNK_SIZE; src += 128) {
+ for (int i = 0; i < 4; i++) {
+ int shift = 12 - (src[4 + i * 2] & 0xf);
+ int filter = src[4 + i * 2] >> 4;
+ int f0 = s_xaTable[filter][0];
+ int f1 = s_xaTable[filter][1];
+ int16 s_1 = _adpcmStatus[0].sample[0];
+ int16 s_2 = _adpcmStatus[0].sample[1];
+
+ for (int j = 0; j < 28; j++) {
+ byte d = src[16 + i + j * 4];
+ int t = (int8)(d << 4) >> 4;
+ int s = (t << shift) + ((s_1 * f0 + s_2 * f1 + 32) >> 6);
+ s_2 = s_1;
+ s_1 = CLIP<int>(s, -32768, 32767);
+ *leftChannel = s_1;
+ leftChannel += channels;
+ }
+
+ if (channels == 2) {
+ _adpcmStatus[0].sample[0] = s_1;
+ _adpcmStatus[0].sample[1] = s_2;
+ s_1 = _adpcmStatus[1].sample[0];
+ s_2 = _adpcmStatus[1].sample[1];
+ }
+
+ shift = 12 - (src[5 + i * 2] & 0xf);
+ filter = src[5 + i * 2] >> 4;
+ f0 = s_xaTable[filter][0];
+ f1 = s_xaTable[filter][1];
+
+ for (int j = 0; j < 28; j++) {
+ byte d = src[16 + i + j * 4];
+ int t = (int8)d >> 4;
+ int s = (t << shift) + ((s_1 * f0 + s_2 * f1 + 32) >> 6);
+ s_2 = s_1;
+ s_1 = CLIP<int>(s, -32768, 32767);
+
+ if (channels == 2) {
+ *rightChannel = s_1;
+ rightChannel += 2;
+ } else {
+ *leftChannel++ = s_1;
+ }
+ }
+
+ if (channels == 2) {
+ _adpcmStatus[1].sample[0] = s_1;
+ _adpcmStatus[1].sample[1] = s_2;
+ } else {
+ _adpcmStatus[0].sample[0] = s_1;
+ _adpcmStatus[0].sample[1] = s_2;
+ }
+ }
+ }
+
+ int flags = Audio::FLAG_16BITS;
+
+ if (_audStream->isStereo())
+ flags |= Audio::FLAG_STEREO;
+
+#ifdef SCUMM_LITTLE_ENDIAN
+ flags |= Audio::FLAG_LITTLE_ENDIAN;
+#endif
+
+ _audStream->queueBuffer((byte *)dst, AUDIO_DATA_SAMPLE_COUNT * 2, DisposeAfterUse::YES, flags);
+ delete[] buf;
+}
+
+void PSXStreamDecoder::decodeFrame(Common::SeekableReadStream *frame) {
+ // A frame is essentially an MPEG-1 intra frame
+
+ Common::BitStream16LEMSB bits(frame);
+
+ bits.skip(16); // unknown
+ bits.skip(16); // 0x3800
+ uint16 scale = bits.getBits(16);
+ uint16 version = bits.getBits(16);
+
+ if (version != 2 && version != 3)
+ error("Unknown PSX stream frame version");
+
+ // Initalize default v3 DC here
+ _lastDC[0] = _lastDC[1] = _lastDC[2] = 0;
+
+ for (int mbX = 0; mbX < _macroBlocksW; mbX++)
+ for (int mbY = 0; mbY < _macroBlocksH; mbY++)
+ decodeMacroBlock(&bits, mbX, mbY, scale, version);
+
+ // Output data onto the frame
+ Graphics::convertYUV420ToRGB(_surface, _yBuffer, _cbBuffer, _crBuffer, _surface->w, _surface->h, _macroBlocksW * 16, _macroBlocksW * 8);
+}
+
+void PSXStreamDecoder::decodeMacroBlock(Common::BitStream *bits, int mbX, int mbY, uint16 scale, uint16 version) {
+ int pitchY = _macroBlocksW * 16;
+ int pitchC = _macroBlocksW * 8;
+
+ // Note the strange order of red before blue
+ decodeBlock(bits, _crBuffer + (mbY * pitchC + mbX) * 8, pitchC, scale, version, kPlaneV);
+ decodeBlock(bits, _cbBuffer + (mbY * pitchC + mbX) * 8, pitchC, scale, version, kPlaneU);
+ decodeBlock(bits, _yBuffer + (mbY * pitchY + mbX) * 16, pitchY, scale, version, kPlaneY);
+ decodeBlock(bits, _yBuffer + (mbY * pitchY + mbX) * 16 + 8, pitchY, scale, version, kPlaneY);
+ decodeBlock(bits, _yBuffer + (mbY * pitchY + mbX) * 16 + 8 * pitchY, pitchY, scale, version, kPlaneY);
+ decodeBlock(bits, _yBuffer + (mbY * pitchY + mbX) * 16 + 8 * pitchY + 8, pitchY, scale, version, kPlaneY);
+}
+
+// Standard JPEG/MPEG zig zag table
+static const byte s_zigZagTable[8 * 8] = {
+ 0, 1, 5, 6, 14, 15, 27, 28,
+ 2, 4, 7, 13, 16, 26, 29, 42,
+ 3, 8, 12, 17, 25, 30, 41, 43,
+ 9, 11, 18, 24, 31, 40, 44, 53,
+ 10, 19, 23, 32, 39, 45, 52, 54,
+ 20, 22, 33, 38, 46, 51, 55, 60,
+ 21, 34, 37, 47, 50, 56, 59, 61,
+ 35, 36, 48, 49, 57, 58, 62, 63
+};
+
+// One byte different from the standard MPEG-1 table
+static const byte s_quantizationTable[8 * 8] = {
+ 2, 16, 19, 22, 26, 27, 29, 34,
+ 16, 16, 22, 24, 27, 29, 34, 37,
+ 19, 22, 26, 27, 29, 34, 34, 38,
+ 22, 22, 26, 27, 29, 34, 37, 40,
+ 22, 26, 27, 29, 32, 35, 40, 48,
+ 26, 27, 29, 32, 35, 40, 48, 58,
+ 26, 27, 29, 34, 38, 46, 56, 69,
+ 27, 29, 35, 38, 46, 56, 69, 83
+};
+
+void PSXStreamDecoder::dequantizeBlock(int *coefficients, float *block, uint16 scale) {
+ // Dequantize the data, un-zig-zagging as we go along
+ for (int i = 0; i < 8 * 8; i++) {
+ if (i == 0) // Special case for the DC coefficient
+ block[i] = coefficients[i] * s_quantizationTable[i];
+ else
+ block[i] = (float)coefficients[s_zigZagTable[i]] * s_quantizationTable[i] * scale / 8;
+ }
+}
+
+int PSXStreamDecoder::readDC(Common::BitStream *bits, uint16 version, PlaneType plane) {
+ // Version 2 just has its coefficient as 10-bits
+ if (version == 2)
+ return readSignedCoefficient(bits);
+
+ // Version 3 has it stored as huffman codes as a difference from the previous DC value
+
+ Common::Huffman *huffman = (plane == kPlaneY) ? _dcHuffmanLuma : _dcHuffmanChroma;
+
+ uint32 symbol = huffman->getSymbol(*bits);
+ int dc = 0;
+
+ if (GET_DC_BITS(symbol) != 0) {
+ bool negative = (bits->getBit() == 0);
+ dc = bits->getBits(GET_DC_BITS(symbol) - 1);
+
+ if (negative)
+ dc -= GET_DC_NEG(symbol);
+ else
+ dc += GET_DC_POS(symbol);
+ }
+
+ _lastDC[plane] += dc * 4; // convert from 8-bit to 10-bit
+ return _lastDC[plane];
+}
+
+#define BLOCK_OVERFLOW_CHECK() \
+ if (count > 63) \
+ error("PSXStreamDecoder::readAC(): Too many coefficients")
+
+void PSXStreamDecoder::readAC(Common::BitStream *bits, int *block) {
+ // Clear the block first
+ for (int i = 0; i < 63; i++)
+ block[i] = 0;
+
+ int count = 0;
+
+ while (!bits->eos()) {
+ uint32 symbol = _acHuffman->getSymbol(*bits);
+
+ if (symbol == ESCAPE_CODE) {
+ // The escape code!
+ int zeroes = bits->getBits(6);
+ count += zeroes + 1;
+ BLOCK_OVERFLOW_CHECK();
+ block += zeroes;
+ *block++ = readSignedCoefficient(bits);
+ } else if (symbol == END_OF_BLOCK) {
+ // We're done
+ break;
+ } else {
+ // Normal huffman code
+ int zeroes = GET_AC_ZERO_RUN(symbol);
+ count += zeroes + 1;
+ BLOCK_OVERFLOW_CHECK();
+ block += zeroes;
+
+ if (bits->getBit())
+ *block++ = -GET_AC_COEFFICIENT(symbol);
+ else
+ *block++ = GET_AC_COEFFICIENT(symbol);
+ }
+ }
+}
+
+int PSXStreamDecoder::readSignedCoefficient(Common::BitStream *bits) {
+ uint val = bits->getBits(10);
+
+ // extend the sign
+ uint shift = 8 * sizeof(int) - 10;
+ return (int)(val << shift) >> shift;
+}
+
+// IDCT table built with :
+// _idct8x8[x][y] = cos(((2 * x + 1) * y) * (M_PI / 16.0)) * 0.5;
+// _idct8x8[x][y] /= sqrt(2.0) if y == 0
+static const double s_idct8x8[8][8] = {
+ { 0.353553390593274, 0.490392640201615, 0.461939766255643, 0.415734806151273, 0.353553390593274, 0.277785116509801, 0.191341716182545, 0.097545161008064 },
+ { 0.353553390593274, 0.415734806151273, 0.191341716182545, -0.097545161008064, -0.353553390593274, -0.490392640201615, -0.461939766255643, -0.277785116509801 },
+ { 0.353553390593274, 0.277785116509801, -0.191341716182545, -0.490392640201615, -0.353553390593274, 0.097545161008064, 0.461939766255643, 0.415734806151273 },
+ { 0.353553390593274, 0.097545161008064, -0.461939766255643, -0.277785116509801, 0.353553390593274, 0.415734806151273, -0.191341716182545, -0.490392640201615 },
+ { 0.353553390593274, -0.097545161008064, -0.461939766255643, 0.277785116509801, 0.353553390593274, -0.415734806151273, -0.191341716182545, 0.490392640201615 },
+ { 0.353553390593274, -0.277785116509801, -0.191341716182545, 0.490392640201615, -0.353553390593273, -0.097545161008064, 0.461939766255643, -0.415734806151273 },
+ { 0.353553390593274, -0.415734806151273, 0.191341716182545, 0.097545161008064, -0.353553390593274, 0.490392640201615, -0.461939766255643, 0.277785116509801 },
+ { 0.353553390593274, -0.490392640201615, 0.461939766255643, -0.415734806151273, 0.353553390593273, -0.277785116509801, 0.191341716182545, -0.097545161008064 }
+};
+
+void PSXStreamDecoder::idct(float *dequantData, float *result) {
+ // IDCT code based on JPEG's IDCT code
+ // TODO: Switch to the integer-based one mentioned in the docs
+ // This is by far the costliest operation here
+
+ float tmp[8 * 8];
+
+ // Apply 1D IDCT to rows
+ for (int y = 0; y < 8; y++) {
+ for (int x = 0; x < 8; x++) {
+ tmp[y + x * 8] = dequantData[0] * s_idct8x8[x][0]
+ + dequantData[1] * s_idct8x8[x][1]
+ + dequantData[2] * s_idct8x8[x][2]
+ + dequantData[3] * s_idct8x8[x][3]
+ + dequantData[4] * s_idct8x8[x][4]
+ + dequantData[5] * s_idct8x8[x][5]
+ + dequantData[6] * s_idct8x8[x][6]
+ + dequantData[7] * s_idct8x8[x][7];
+ }
+
+ dequantData += 8;
+ }
+
+ // Apply 1D IDCT to columns
+ for (int x = 0; x < 8; x++) {
+ const float *u = tmp + x * 8;
+ for (int y = 0; y < 8; y++) {
+ result[y * 8 + x] = u[0] * s_idct8x8[y][0]
+ + u[1] * s_idct8x8[y][1]
+ + u[2] * s_idct8x8[y][2]
+ + u[3] * s_idct8x8[y][3]
+ + u[4] * s_idct8x8[y][4]
+ + u[5] * s_idct8x8[y][5]
+ + u[6] * s_idct8x8[y][6]
+ + u[7] * s_idct8x8[y][7];
+ }
+ }
+}
+
+void PSXStreamDecoder::decodeBlock(Common::BitStream *bits, byte *block, int pitch, uint16 scale, uint16 version, PlaneType plane) {
+ // Version 2 just has signed 10 bits for DC
+ // Version 3 has them huffman coded
+ int coefficients[8 * 8];
+ coefficients[0] = readDC(bits, version, plane);
+ readAC(bits, &coefficients[1]); // Read in the AC
+
+ // Dequantize
+ float dequantData[8 * 8];
+ dequantizeBlock(coefficients, dequantData, scale);
+
+ // Perform IDCT
+ float idctData[8 * 8];
+ idct(dequantData, idctData);
+
+ // Now output the data
+ for (int y = 0; y < 8; y++) {
+ byte *start = block + pitch * y;
+
+ // Convert the result to be in the range [0, 255]
+ for (int x = 0; x < 8; x++)
+ *start++ = (int)CLIP<float>(idctData[y * 8 + x], -128.0f, 127.0f) + 128;
+ }
+}
+
+} // End of namespace Video
diff --git a/video/psx_decoder.h b/video/psx_decoder.h
new file mode 100644
index 0000000000..c8ad92c45a
--- /dev/null
+++ b/video/psx_decoder.h
@@ -0,0 +1,128 @@
+/* ScummVM - Graphic Adventure Engine
+ *
+ * ScummVM is the legal property of its developers, whose names
+ * are too numerous to list here. Please refer to the COPYRIGHT
+ * file distributed with this source distribution.
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public License
+ * as published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifndef VIDEO_PSX_DECODER_H
+#define VIDEO_PSX_DECODER_H
+
+#include "common/endian.h"
+#include "common/rational.h"
+#include "common/rect.h"
+#include "common/str.h"
+#include "graphics/surface.h"
+#include "video/video_decoder.h"
+
+namespace Audio {
+class QueuingAudioStream;
+}
+
+namespace Common {
+class BitStream;
+class Huffman;
+class SeekableReadStream;
+}
+
+namespace Graphics {
+struct PixelFormat;
+}
+
+namespace Video {
+
+/**
+ * Decoder for PSX stream videos.
+ * This currently implements the most basic PSX stream format that is
+ * used by most games on the system. Special variants are not supported
+ * at this time.
+ *
+ * Video decoder used in engines:
+ * - sword1 (psx)
+ * - sword2 (psx)
+ */
+class PSXStreamDecoder : public VideoDecoder {
+public:
+ // CD speed in sectors/second
+ // Calling code should use these enum values instead of the constants
+ enum CDSpeed {
+ kCD1x = 75,
+ kCD2x = 150
+ };
+
+ PSXStreamDecoder(CDSpeed speed, uint32 frameCount = 0);
+ virtual ~PSXStreamDecoder();
+
+ bool loadStream(Common::SeekableReadStream *stream);
+ void close();
+
+ bool isVideoLoaded() const { return _stream != 0; }
+ uint16 getWidth() const { return _surface->w; }
+ uint16 getHeight() const { return _surface->h; }
+ uint32 getFrameCount() const { return _frameCount; }
+ uint32 getElapsedTime() const;
+ uint32 getTimeToNextFrame() const;
+ const Graphics::Surface *decodeNextFrame();
+ Graphics::PixelFormat getPixelFormat() const { return _surface->format; }
+ bool endOfVideo() const { return _stream->pos() >= _stream->size(); }
+
+private:
+ void initCommon();
+ Common::SeekableReadStream *_stream;
+ Graphics::Surface *_surface;
+
+ uint32 _frameCount;
+ Audio::Timestamp _nextFrameStartTime;
+
+ Audio::SoundHandle _audHandle;
+ Audio::QueuingAudioStream *_audStream;
+ void queueAudioFromSector(Common::SeekableReadStream *sector);
+
+ enum PlaneType {
+ kPlaneY = 0,
+ kPlaneU = 1,
+ kPlaneV = 2
+ };
+
+ uint16 _macroBlocksW, _macroBlocksH;
+ byte *_yBuffer, *_cbBuffer, *_crBuffer;
+ void decodeFrame(Common::SeekableReadStream *frame);
+ void decodeMacroBlock(Common::BitStream *bits, int mbX, int mbY, uint16 scale, uint16 version);
+ void decodeBlock(Common::BitStream *bits, byte *block, int pitch, uint16 scale, uint16 version, PlaneType plane);
+
+ void readAC(Common::BitStream *bits, int *block);
+ Common::Huffman *_acHuffman;
+
+ int readDC(Common::BitStream *bits, uint16 version, PlaneType plane);
+ Common::Huffman *_dcHuffmanLuma, *_dcHuffmanChroma;
+ int _lastDC[3];
+
+ void dequantizeBlock(int *coefficients, float *block, uint16 scale);
+ void idct(float *dequantData, float *result);
+ int readSignedCoefficient(Common::BitStream *bits);
+
+ struct ADPCMStatus {
+ int16 sample[2];
+ } _adpcmStatus[2];
+
+ Common::SeekableReadStream *readSector();
+};
+
+} // End of namespace Video
+
+#endif
diff --git a/video/qt_decoder.cpp b/video/qt_decoder.cpp
index 4a57ac405d..7557222b13 100644
--- a/video/qt_decoder.cpp
+++ b/video/qt_decoder.cpp
@@ -57,7 +57,6 @@ namespace Video {
QuickTimeDecoder::QuickTimeDecoder() {
_setStartTime = false;
- _audHandle = Audio::SoundHandle();
_scaledSurface = 0;
_dirtyPalette = false;
_palette = 0;
@@ -95,25 +94,29 @@ uint32 QuickTimeDecoder::getFrameCount() const {
}
void QuickTimeDecoder::startAudio() {
- if (_audStream) {
- updateAudioBuffer();
- g_system->getMixer()->playStream(Audio::Mixer::kPlainSoundType, &_audHandle, _audStream, -1, Audio::Mixer::kMaxChannelVolume, 0, DisposeAfterUse::NO);
- } // else no audio or the audio compression is not supported
+ updateAudioBuffer();
+
+ for (uint32 i = 0; i < _audioTracks.size(); i++) {
+ g_system->getMixer()->playStream(Audio::Mixer::kPlainSoundType, &_audioHandles[i], _audioTracks[i], -1, Audio::Mixer::kMaxChannelVolume, 0, DisposeAfterUse::NO);
+
+ // Pause the audio again if we're still paused
+ if (isPaused())
+ g_system->getMixer()->pauseHandle(_audioHandles[i], true);
+ }
}
void QuickTimeDecoder::stopAudio() {
- if (_audStream)
- g_system->getMixer()->stopHandle(_audHandle);
+ for (uint32 i = 0; i < _audioHandles.size(); i++)
+ g_system->getMixer()->stopHandle(_audioHandles[i]);
}
void QuickTimeDecoder::pauseVideoIntern(bool pause) {
- if (_audStream)
- g_system->getMixer()->pauseHandle(_audHandle, pause);
+ for (uint32 i = 0; i < _audioHandles.size(); i++)
+ g_system->getMixer()->pauseHandle(_audioHandles[i], pause);
}
QuickTimeDecoder::VideoTrackHandler *QuickTimeDecoder::findNextVideoTrack() const {
VideoTrackHandler *bestTrack = 0;
- int32 num;
uint32 bestTime = 0xffffffff;
for (uint32 i = 0; i < _handlers.size(); i++) {
@@ -124,7 +127,6 @@ QuickTimeDecoder::VideoTrackHandler *QuickTimeDecoder::findNextVideoTrack() cons
if (time < bestTime) {
bestTime = time;
bestTrack = track;
- num = i;
}
}
}
@@ -148,9 +150,7 @@ const Graphics::Surface *QuickTimeDecoder::decodeNextFrame() {
// Update audio buffers too
// (needs to be done after we find the next track)
- for (uint32 i = 0; i < _handlers.size(); i++)
- if (_handlers[i]->getTrackType() == TrackHandler::kTrackTypeAudio)
- ((AudioTrackHandler *)_handlers[i])->updateBuffer();
+ updateAudioBuffer();
if (_scaledSurface) {
scaleSurface(frame, _scaledSurface, _scaleFactorX, _scaleFactorY);
@@ -180,15 +180,13 @@ bool QuickTimeDecoder::endOfVideo() const {
}
uint32 QuickTimeDecoder::getElapsedTime() const {
- // TODO: Convert to multi-track
- if (_audStream) {
- // Use the audio time if present and the audio track's time is less than the
- // total length of the audio track. The audio track can end before the video
- // track, so we need to fall back on the getMillis() time tracking in that
- // case.
- uint32 time = g_system->getMixer()->getSoundElapsedTime(_audHandle) + _audioStartOffset.msecs();
- if (time < _tracks[_audioTrackIndex]->mediaDuration * 1000 / _tracks[_audioTrackIndex]->timeScale)
- return time;
+ // Try to base sync off an active audio track
+ for (uint32 i = 0; i < _audioHandles.size(); i++) {
+ if (g_system->getMixer()->isSoundHandleActive(_audioHandles[i])) {
+ uint32 time = g_system->getMixer()->getSoundElapsedTime(_audioHandles[i]) + _audioStartOffset.msecs();
+ if (Audio::Timestamp(time, 1000) < _audioTracks[i]->getLength())
+ return time;
+ }
}
// Just use time elapsed since the beginning
@@ -238,14 +236,12 @@ void QuickTimeDecoder::init() {
_startTime = 0;
_setStartTime = false;
- // Start the audio codec if we've got one that we can handle
- if (_audStream) {
- startAudio();
- _audioStartOffset = Audio::Timestamp(0);
+ // Initialize all the audio tracks
+ if (!_audioTracks.empty()) {
+ _audioHandles.resize(_audioTracks.size());
- // TODO: Support multiple audio tracks
- // For now, just push back a handler for the first audio track
- _handlers.push_back(new AudioTrackHandler(this, _tracks[_audioTrackIndex]));
+ for (uint32 i = 0; i < _audioTracks.size(); i++)
+ _handlers.push_back(new AudioTrackHandler(this, _audioTracks[i]));
}
// Initialize all the video tracks
@@ -279,6 +275,12 @@ void QuickTimeDecoder::init() {
} else {
_needUpdate = false;
}
+
+ // Now start any audio
+ if (!_audioTracks.empty()) {
+ startAudio();
+ _audioStartOffset = Audio::Timestamp(0);
+ }
}
Common::QuickTimeParser::SampleDesc *QuickTimeDecoder::readSampleDesc(Track *track, uint32 format) {
@@ -403,10 +405,15 @@ void QuickTimeDecoder::freeAllTrackHandlers() {
}
void QuickTimeDecoder::seekToTime(Audio::Timestamp time) {
+ stopAudio();
+ _audioStartOffset = time;
+
// Sets all tracks to this time
for (uint32 i = 0; i < _handlers.size(); i++)
_handlers[i]->seekToTime(time);
+ startAudio();
+
// Reset our start time
_startTime = g_system->getMillis() - time.msecs();
_setStartTime = true;
@@ -485,7 +492,7 @@ void QuickTimeDecoder::VideoSampleDesc::initCodec() {
bool QuickTimeDecoder::endOfVideoTracks() const {
for (uint32 i = 0; i < _handlers.size(); i++)
- if (_handlers[i]->getTrackType() == TrackHandler::kTrackTypeVideo && !_handlers[i]->endOfTrack())
+ if (_handlers[i]->getTrackType() == TrackHandler::kTrackTypeVideo && !_handlers[i]->endOfTrack())
return false;
return true;
@@ -500,64 +507,23 @@ bool QuickTimeDecoder::TrackHandler::endOfTrack() {
return _curEdit == _parent->editCount;
}
-QuickTimeDecoder::AudioTrackHandler::AudioTrackHandler(QuickTimeDecoder *decoder, Track *parent) : TrackHandler(decoder, parent) {
+QuickTimeDecoder::AudioTrackHandler::AudioTrackHandler(QuickTimeDecoder *decoder, QuickTimeAudioTrack *audioTrack)
+ : TrackHandler(decoder, audioTrack->getParent()), _audioTrack(audioTrack) {
}
void QuickTimeDecoder::AudioTrackHandler::updateBuffer() {
- if (!_decoder->_audStream)
- return;
-
- uint32 numberOfChunksNeeded = 0;
-
- if (_decoder->endOfVideoTracks()) {
- // If we have no video left (or no video), there's nothing to base our buffer against
- numberOfChunksNeeded = _parent->chunkCount;
- } else {
- Audio::QuickTimeAudioDecoder::AudioSampleDesc *entry = (Audio::QuickTimeAudioDecoder::AudioSampleDesc *)_parent->sampleDescs[0];
-
- // Calculate the amount of chunks we need in memory until the next frame
- uint32 timeToNextFrame = _decoder->getTimeToNextFrame();
- uint32 timeFilled = 0;
- uint32 curAudioChunk = _decoder->_curAudioChunk - _decoder->_audStream->numQueuedStreams();
-
- for (; timeFilled < timeToNextFrame && curAudioChunk < _parent->chunkCount; numberOfChunksNeeded++, curAudioChunk++) {
- uint32 sampleCount = entry->getAudioChunkSampleCount(curAudioChunk);
- assert(sampleCount);
-
- timeFilled += sampleCount * 1000 / entry->_sampleRate;
- }
-
- // Add a couple extra to ensure we don't underrun
- numberOfChunksNeeded += 3;
- }
-
- // Keep three streams in buffer so that if/when the first two end, it goes right into the next
- while (_decoder->_audStream->numQueuedStreams() < numberOfChunksNeeded && _decoder->_curAudioChunk < _parent->chunkCount)
- _decoder->queueNextAudioChunk();
+ if (_decoder->endOfVideoTracks()) // If we have no video left (or no video), there's nothing to base our buffer against
+ _audioTrack->queueRemainingAudio();
+ else // Otherwise, queue enough to get us to the next frame plus another half second spare
+ _audioTrack->queueAudio(Audio::Timestamp(_decoder->getTimeToNextFrame() + 500, 1000));
}
bool QuickTimeDecoder::AudioTrackHandler::endOfTrack() {
- // TODO: Handle edits
- return (_decoder->_curAudioChunk == _parent->chunkCount) && _decoder->_audStream->endOfData();
+ return _audioTrack->endOfData();
}
void QuickTimeDecoder::AudioTrackHandler::seekToTime(Audio::Timestamp time) {
- if (_decoder->_audStream) {
- // Stop all audio
- _decoder->stopAudio();
-
- _decoder->_audioStartOffset = time;
-
- // Seek to the new audio location
- _decoder->setAudioStreamPos(_decoder->_audioStartOffset);
-
- // Restart the audio
- _decoder->startAudio();
-
- // Pause the audio again if we're still paused
- if (_decoder->isPaused() && _decoder->_audStream)
- g_system->getMixer()->pauseHandle(_decoder->_audHandle, true);
- }
+ _audioTrack->seek(time);
}
QuickTimeDecoder::VideoTrackHandler::VideoTrackHandler(QuickTimeDecoder *decoder, Common::QuickTimeParser::Track *parent) : TrackHandler(decoder, parent) {
diff --git a/video/qt_decoder.h b/video/qt_decoder.h
index b2d7153f42..583b4b44b5 100644
--- a/video/qt_decoder.h
+++ b/video/qt_decoder.h
@@ -140,7 +140,7 @@ private:
void stopAudio();
void updateAudioBuffer();
void readNextAudioChunk();
- Audio::SoundHandle _audHandle;
+ Common::Array<Audio::SoundHandle> _audioHandles;
Audio::Timestamp _audioStartOffset;
Codec *createCodec(uint32 codecTag, byte bitsPerPixel);
@@ -186,17 +186,18 @@ private:
};
// The AudioTrackHandler is currently just a wrapper around some
- // QuickTimeDecoder functions. Eventually this can be made to
- // handle multiple audio tracks, but I haven't seen a video with
- // that yet.
+ // QuickTimeDecoder functions.
class AudioTrackHandler : public TrackHandler {
public:
- AudioTrackHandler(QuickTimeDecoder *decoder, Track *parent);
+ AudioTrackHandler(QuickTimeDecoder *decoder, QuickTimeAudioTrack *audioTrack);
TrackType getTrackType() const { return kTrackTypeAudio; }
void updateBuffer();
void seekToTime(Audio::Timestamp time);
bool endOfTrack();
+
+ private:
+ QuickTimeAudioTrack *_audioTrack;
};
// The VideoTrackHandler is the bridge between the time of playback