Skip to content

Commit

Permalink
Add BT.601 YCbCr output
Browse files Browse the repository at this point in the history
ld-chroma-decoder: add the -p/--output-format option to select between
'rgb' and 'yuv' output formats.

'rgb' is the default and always outputs 16-bit packed RGB48 data. 'yuv'
outputs 16-bit planar YUV444P16, unless the mono chroma decoder is used
and then 16-bit GRAY16 is output instead.

Note that YCbCr is often mistakenly refered to as YUV, a convention we
continue here.
  • Loading branch information
ifb committed Feb 15, 2021
1 parent 3032c9a commit 693ff25
Show file tree
Hide file tree
Showing 29 changed files with 528 additions and 152 deletions.
4 changes: 3 additions & 1 deletion tools/ld-analyse/ld-analyse.pro
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ SOURCES += \
../ld-chroma-decoder/palcolour.cpp \
../ld-chroma-decoder/comb.cpp \
../ld-chroma-decoder/rgb.cpp \
../ld-chroma-decoder/ycbcr.cpp \
../ld-chroma-decoder/transformpal.cpp \
../ld-chroma-decoder/transformpal2d.cpp \
../ld-chroma-decoder/transformpal3d.cpp \
Expand Down Expand Up @@ -68,7 +69,8 @@ HEADERS += \
../ld-chroma-decoder/palcolour.h \
../ld-chroma-decoder/comb.h \
../ld-chroma-decoder/rgb.h \
../ld-chroma-decoder/rgbframe.h \
../ld-chroma-decoder/outputframe.h \
../ld-chroma-decoder/ycbcr.h \
../ld-chroma-decoder/yiq.h \
../ld-chroma-decoder/transformpal.h \
../ld-chroma-decoder/transformpal2d.h \
Expand Down
4 changes: 2 additions & 2 deletions tools/ld-analyse/tbcsource.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -536,7 +536,7 @@ QImage TbcSource::generateQImage(qint32 frameNumber)
// Chroma decode the current frame and display

// Decode colour for the current frame, to RGB 16-16-16 interlaced output
QVector<RGBFrame> outputFrames(1);
QVector<OutputFrame> outputFrames(1);
if (videoParameters.isSourcePal) {
// PAL source
palColour.decodeFrames(inputFields, startIndex, endIndex, outputFrames);
Expand All @@ -546,7 +546,7 @@ QImage TbcSource::generateQImage(qint32 frameNumber)
}

// Get a pointer to the RGB data
const quint16 *rgbPointer = outputFrames[0].data();
const quint16 *rgbPointer = outputFrames[0].RGB.data();

// Fill the QImage with black
frameImage.fill(Qt::black);
Expand Down
73 changes: 55 additions & 18 deletions tools/ld-chroma-decoder/comb.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ void Comb::updateConfiguration(const LdDecodeMetaData::VideoParameters &_videoPa
}

void Comb::decodeFrames(const QVector<SourceField> &inputFields, qint32 startIndex, qint32 endIndex,
QVector<RGBFrame> &outputFrames)
QVector<OutputFrame> &outputFrames)
{
assert(configurationSet);
assert((outputFrames.size() * 2) == (endIndex - startIndex));
Expand Down Expand Up @@ -172,8 +172,9 @@ void Comb::decodeFrames(const QVector<SourceField> &inputFields, qint32 startInd
currentFrameBuffer->doYNR();
currentFrameBuffer->doCNR();

// Convert the YIQ result to RGB
outputFrames[frameIndex] = currentFrameBuffer->yiqToRgbFrame();
// Convert the YIQ result to RGB or YCbCr
outputFrames[frameIndex] = configuration.outputYCbCr ? currentFrameBuffer->yiqToYUVFrame() :
currentFrameBuffer->yiqToRGBFrame();

// Overlay the map if required
if (configuration.dimensions == 3 && configuration.showMap) {
Expand All @@ -195,9 +196,9 @@ Comb::FrameBuffer::FrameBuffer(const LdDecodeMetaData::VideoParameters &videoPar
irescale = (videoParameters.white16bIre - videoParameters.black16bIre) / 100;
}

/*
/*
* The color burst frequency is 227.5 cycles per line, so it flips 180 degrees for each line.
*
*
* The color burst *signal* is at 180 degrees, which is a greenish yellow.
*
* When SCH phase is 0 (properly aligned) the color burst is in phase with the leading edge of the HSYNC pulse.
Expand All @@ -211,19 +212,19 @@ Comb::FrameBuffer::FrameBuffer(const LdDecodeMetaData::VideoParameters &videoPar
inline qint32 Comb::FrameBuffer::getFieldID(qint32 lineNumber) const
{
bool isFirstField = ((lineNumber % 2) == 0);

return isFirstField ? firstFieldPhaseID : secondFieldPhaseID;
}

// NOTE: lineNumber is presumed to be starting at 1. (This lines up with how splitIQ calls it)
inline bool Comb::FrameBuffer::getLinePhase(qint32 lineNumber) const
{
qint32 fieldID = getFieldID(lineNumber);
bool isPositivePhaseOnEvenLines = (fieldID == 1) || (fieldID == 4);
bool isPositivePhaseOnEvenLines = (fieldID == 1) || (fieldID == 4);

int fieldLine = (lineNumber / 2);
bool isEvenLine = (fieldLine % 2) == 0;

return isEvenLine ? isPositivePhaseOnEvenLines : !isPositivePhaseOnEvenLines;
}

Expand Down Expand Up @@ -662,22 +663,22 @@ void Comb::FrameBuffer::doYNR()
}
}

// Convert buffer from YIQ to RGB 16-16-16
RGBFrame Comb::FrameBuffer::yiqToRgbFrame()
// Convert buffer from YIQ to RGB and store as packed RGB48
OutputFrame Comb::FrameBuffer::yiqToRGBFrame()
{
RGBFrame rgbOutputFrame;
rgbOutputFrame.resize(videoParameters.fieldWidth * frameHeight * 3); // for RGB 16-16-16
OutputFrame outputFrame;
outputFrame.RGB.resize(videoParameters.fieldWidth * frameHeight * 3); // for RGB 16-16-16

// Initialise the output frame
rgbOutputFrame.fill(0);
outputFrame.RGB.fill(0);

// Initialise YIQ to RGB converter
RGB rgb(videoParameters.white16bIre, videoParameters.black16bIre, configuration.whitePoint75, configuration.chromaGain);

// Perform YIQ to RGB conversion
for (qint32 lineNumber = videoParameters.firstActiveFrameLine; lineNumber < videoParameters.lastActiveFrameLine; lineNumber++) {
// Get a pointer to the line
quint16 *linePointer = rgbOutputFrame.data() + (videoParameters.fieldWidth * 3 * lineNumber);
quint16 *linePointer = outputFrame.RGB.data() + (videoParameters.fieldWidth * 3 * lineNumber);

// Offset the output by the activeVideoStart to keep the output frame
// in the same x position as the input video frame
Expand All @@ -689,19 +690,18 @@ RGBFrame Comb::FrameBuffer::yiqToRgbFrame()
&linePointer[o]);
}

// Return the RGB frame data
return rgbOutputFrame;
return outputFrame;
}

// Convert buffer from YIQ to RGB
void Comb::FrameBuffer::overlayMap(const FrameBuffer &previousFrame, const FrameBuffer &nextFrame, RGBFrame &rgbFrame)
void Comb::FrameBuffer::overlayMap(const FrameBuffer &previousFrame, const FrameBuffer &nextFrame, OutputFrame &rgbFrame)
{
qDebug() << "Comb::FrameBuffer::overlayMap(): Overlaying map onto RGB output";

// Overlay the map on the output RGB
for (qint32 lineNumber = videoParameters.firstActiveFrameLine; lineNumber < videoParameters.lastActiveFrameLine; lineNumber++) {
// Get a pointer to the line
quint16 *linePointer = rgbFrame.data() + (videoParameters.fieldWidth * 3 * lineNumber);
quint16 *linePointer = rgbFrame.RGB.data() + (videoParameters.fieldWidth * 3 * lineNumber);

const quint16 *lineData = rawbuffer.data() + (lineNumber * videoParameters.fieldWidth);

Expand Down Expand Up @@ -729,3 +729,40 @@ void Comb::FrameBuffer::overlayMap(const FrameBuffer &previousFrame, const Frame
}
}
}

// Convert buffer from YIQ to YCbCr and store as planer YUV444P16
OutputFrame Comb::FrameBuffer::yiqToYUVFrame()
{
OutputFrame outputFrame;

outputFrame.Y.resize(videoParameters.fieldWidth * frameHeight);
outputFrame.Cb.resize(videoParameters.fieldWidth * frameHeight);
outputFrame.Cr.resize(videoParameters.fieldWidth * frameHeight);

// Initialise the output frame
outputFrame.Y.fill(16 * 256);
outputFrame.Cb.fill(128 * 256);
outputFrame.Cr.fill(128 * 256);

// Initialise YIQ to YCbCr converter
YCbCr ycbcr(videoParameters.white16bIre, videoParameters.black16bIre, configuration.whitePoint75, configuration.chromaGain);

// Perform YIQ to YCbCr conversion
for (qint32 lineNumber = videoParameters.firstActiveFrameLine; lineNumber < videoParameters.lastActiveFrameLine; lineNumber++) {
// Get a pointer to each plane
quint16 *linePointerY = outputFrame.Y.data() + (videoParameters.fieldWidth * lineNumber);
quint16 *linePointerCb = outputFrame.Cb.data() + (videoParameters.fieldWidth * lineNumber);
quint16 *linePointerCr = outputFrame.Cr.data() + (videoParameters.fieldWidth * lineNumber);

// Offset the output by the activeVideoStart to keep the output frame
// in the same x position as the input video frame
qint32 o = videoParameters.activeVideoStart;

// Fill the output line with YCbCr values
ycbcr.convertLine(&yiqBuffer[lineNumber][videoParameters.activeVideoStart],
&yiqBuffer[lineNumber][videoParameters.activeVideoEnd],
&linePointerY[o], &linePointerCb[o], &linePointerCr[o]);
}

return outputFrame;
}
13 changes: 9 additions & 4 deletions tools/ld-chroma-decoder/comb.h
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,11 @@

#include "lddecodemetadata.h"

#include "decoder.h"
#include "outputframe.h"
#include "rgb.h"
#include "rgbframe.h"
#include "sourcefield.h"
#include "ycbcr.h"
#include "yiq.h"

class Comb
Expand All @@ -53,6 +55,8 @@ class Comb
qint32 dimensions = 2;
bool adaptive = true;
bool showMap = false;
Decoder::PixelFormat pixelFormat = Decoder::PixelFormat::RGB48;
bool outputYCbCr = false;

double cNRLevel = 0.0;
double yNRLevel = 1.0;
Expand All @@ -67,7 +71,7 @@ class Comb

// Decode a sequence of fields into a sequence of interlaced frames
void decodeFrames(const QVector<SourceField> &inputFields, qint32 startIndex, qint32 endIndex,
QVector<RGBFrame> &outputFrames);
QVector<OutputFrame> &outputFrames);

// Maximum frame size
static constexpr qint32 MAX_WIDTH = 910;
Expand Down Expand Up @@ -99,8 +103,9 @@ class Comb
void doCNR();
void doYNR();

RGBFrame yiqToRgbFrame();
void overlayMap(const FrameBuffer &previousFrame, const FrameBuffer &nextFrame, RGBFrame &rgbOutputFrame);
OutputFrame yiqToRGBFrame();
OutputFrame yiqToYUVFrame();
void overlayMap(const FrameBuffer &previousFrame, const FrameBuffer &nextFrame, OutputFrame &outputFrame);

private:
const LdDecodeMetaData::VideoParameters &videoParameters;
Expand Down
72 changes: 51 additions & 21 deletions tools/ld-chroma-decoder/decoder.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -78,30 +78,60 @@ void Decoder::setVideoParameters(Decoder::Configuration &config, const LdDecodeM
// Show output information to the user
const qint32 frameHeight = (videoParameters.fieldHeight * 2) - 1;
qInfo() << "Input video of" << config.videoParameters.fieldWidth << "x" << frameHeight <<
"will be colourised and trimmed to" << outputWidth << "x" << outputHeight << "RGB 16-16-16 frames";
"will be colourised and trimmed to" << outputWidth << "x" << outputHeight << (config.outputYCbCr ? "YCbCr" : "RGB") << "frames";
}

RGBFrame Decoder::cropOutputFrame(const Decoder::Configuration &config, const RGBFrame &outputData) {
OutputFrame Decoder::cropOutputFrame(const Decoder::Configuration &config, const OutputFrame &outputData) {
const qint32 activeVideoStart = config.videoParameters.activeVideoStart;
const qint32 activeVideoEnd = config.videoParameters.activeVideoEnd;
const qint32 outputLineLength = (activeVideoEnd - activeVideoStart) * 3;

RGBFrame croppedData;

// Insert padding at the top
if (config.topPadLines > 0) {
croppedData.insert(croppedData.begin(), config.topPadLines * outputLineLength, 0);
}

// Copy the active region from the decoded image
for (qint32 y = config.videoParameters.firstActiveFrameLine; y < config.videoParameters.lastActiveFrameLine; y++) {
croppedData.append(outputData.mid((y * config.videoParameters.fieldWidth * 3) + (activeVideoStart * 3),
outputLineLength));
}

// Insert padding at the bottom
if (config.bottomPadLines > 0) {
croppedData.insert(croppedData.end(), config.bottomPadLines * outputLineLength, 0);
qint32 outputLineLength = (activeVideoEnd - activeVideoStart);

OutputFrame croppedData;
switch (config.pixelFormat) {
case RGB48:
outputLineLength *= 3;
// Insert padding at the top
if (config.topPadLines > 0) {
croppedData.RGB.insert(croppedData.RGB.begin(), config.topPadLines * outputLineLength, 0);
}
// Copy the active region from the decoded image
for (qint32 y = config.videoParameters.firstActiveFrameLine; y < config.videoParameters.lastActiveFrameLine; y++) {
croppedData.RGB.append(outputData.RGB.mid((y * config.videoParameters.fieldWidth * 3) + (activeVideoStart * 3),
outputLineLength));
}
// Insert padding at the bottom
if (config.bottomPadLines > 0) {
croppedData.RGB.insert(croppedData.RGB.end(), config.bottomPadLines * outputLineLength, 0);
}
break;
case YUV444P16:
if (config.topPadLines > 0) {
croppedData.Y.insert(croppedData.Y.begin(), config.topPadLines * outputLineLength, 16 * 256);
croppedData.Cb.insert(croppedData.Cb.begin(), config.topPadLines * outputLineLength, 128 * 256);
croppedData.Cr.insert(croppedData.Cr.begin(), config.topPadLines * outputLineLength, 128 * 256);
}
for (qint32 y = config.videoParameters.firstActiveFrameLine; y < config.videoParameters.lastActiveFrameLine; y++) {
croppedData.Y.append(outputData.Y.mid((y * config.videoParameters.fieldWidth) + activeVideoStart, outputLineLength));
croppedData.Cb.append(outputData.Cb.mid((y * config.videoParameters.fieldWidth) + activeVideoStart, outputLineLength));
croppedData.Cr.append(outputData.Cr.mid((y * config.videoParameters.fieldWidth) + activeVideoStart, outputLineLength));
}
if (config.bottomPadLines > 0) {
croppedData.Y.insert(croppedData.Y.end(), config.bottomPadLines * outputLineLength, 16 * 256);
croppedData.Cb.insert(croppedData.Cb.end(), config.bottomPadLines * outputLineLength, 128 * 256);
croppedData.Cr.insert(croppedData.Cr.end(), config.bottomPadLines * outputLineLength, 128 * 256);
}
break;
case GRAY16:
if (config.topPadLines > 0) {
croppedData.Y.insert(croppedData.Y.begin(), config.topPadLines * outputLineLength, 16 * 256);
}
for (qint32 y = config.videoParameters.firstActiveFrameLine; y < config.videoParameters.lastActiveFrameLine; y++) {
croppedData.Y.append(outputData.Y.mid((y * config.videoParameters.fieldWidth) + activeVideoStart, outputLineLength));
}
if (config.bottomPadLines > 0) {
croppedData.Y.insert(croppedData.Y.end(), config.bottomPadLines * outputLineLength, 16 * 256);
}
break;
}

return croppedData;
Expand All @@ -116,7 +146,7 @@ void DecoderThread::run()
{
// Input and output data
QVector<SourceField> inputFields;
QVector<RGBFrame> outputFrames;
QVector<OutputFrame> outputFrames;

while (!abort) {
// Get the next batch of fields to process
Expand Down
18 changes: 15 additions & 3 deletions tools/ld-chroma-decoder/decoder.h
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@

#include "lddecodemetadata.h"

#include "rgbframe.h"
#include "outputframe.h"
#include "sourcefield.h"

class DecoderPool;
Expand Down Expand Up @@ -76,21 +76,33 @@ class Decoder {
// Construct a new worker thread
virtual QThread *makeThread(QAtomicInt& abort, DecoderPool& decoderPool) = 0;

// All of the supported output pixel formats
enum PixelFormat {
RGB48 = 0,
YUV444P16,
GRAY16
};

// After configuration, return a readable output pixel format
virtual const char *getPixelName() const = 0;

// Parameters used by the decoder and its threads.
// This may be subclassed by decoders to add extra parameters.
struct Configuration {
// Parameters computed from the video metadata
LdDecodeMetaData::VideoParameters videoParameters;
qint32 topPadLines;
qint32 bottomPadLines;
Decoder::PixelFormat pixelFormat = RGB48;
bool outputYCbCr = false;
};

// Compute the output frame size in Configuration, adjusting the active
// video region as required
static void setVideoParameters(Configuration &config, const LdDecodeMetaData::VideoParameters &videoParameters);

// Crop a full decoded frame to the output frame size
static RGBFrame cropOutputFrame(const Configuration &config, const RGBFrame &outputData);
static OutputFrame cropOutputFrame(const Configuration &config, const OutputFrame &outputData);
};

// Abstract base class for chroma decoder worker threads.
Expand All @@ -104,7 +116,7 @@ class DecoderThread : public QThread {

// Decode a sequence of fields into a sequence of frames
virtual void decodeFrames(const QVector<SourceField> &inputFields, qint32 startIndex, qint32 endIndex,
QVector<RGBFrame> &outputFrames) = 0;
QVector<OutputFrame> &outputFrames) = 0;

// Decoder pool
QAtomicInt& abort;
Expand Down
Loading

0 comments on commit 693ff25

Please sign in to comment.