diff --git a/.gitmodules b/.gitmodules index fbcb25c31..712f09c06 100644 --- a/.gitmodules +++ b/.gitmodules @@ -31,3 +31,6 @@ [submodule "external/stb"] path = external/stb url = https://github.com/nothings/stb.git +[submodule "external/linalg"] + path = external/linalg + url = https://github.com/sgorsten/linalg.git diff --git a/CMakeLists.txt b/CMakeLists.txt index 69079e5d4..90c27406f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -691,8 +691,8 @@ add_custom_target(uninstall COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_D include (${CMAKE_CURRENT_SOURCE_DIR}/cmake/packages.cmake) # external targets -if (WIN32 AND TARGET stb AND TARGET flatbuffers AND TARGET protobuf-nanopb AND TARGET lunasvg AND TARGET flatc AND TARGET qmqtt AND TARGET liblzma AND TARGET sqlite3) - set_target_properties(stb qmqtt flatbuffers protobuf-nanopb lunasvg flatc resources uninstall liblzma sqlite3 PROPERTIES FOLDER ExternalLibsTargets) +if (WIN32 AND TARGET stb AND TARGET flatbuffers AND TARGET protobuf-nanopb AND TARGET lunasvg AND TARGET flatc AND TARGET qmqtt AND TARGET liblzma AND TARGET sqlite3 AND TARGET precompiled_hyperhdr_headers) + set_target_properties(stb qmqtt flatbuffers protobuf-nanopb lunasvg flatc resources uninstall liblzma sqlite3 precompiled_hyperhdr_headers PROPERTIES FOLDER ExternalLibsTargets) else() set_target_properties(resources uninstall PROPERTIES FOLDER ExternalLibsTargets) endif() diff --git a/external/CMakeLists.txt b/external/CMakeLists.txt index f4d1272c2..cc53e94fa 100644 --- a/external/CMakeLists.txt +++ b/external/CMakeLists.txt @@ -78,6 +78,13 @@ if(ENABLE_WS281XPWM) add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/rpi_ws281x) endif() +#============================================================================= +# LINALG +#============================================================================= + +add_library(linalg INTERFACE) +target_include_directories(linalg INTERFACE "${CMAKE_CURRENT_SOURCE_DIR}/linalg") + #============================================================================= # LUNASVG #============================================================================= diff --git a/external/linalg b/external/linalg new file mode 160000 index 000000000..4460f1f5b --- /dev/null +++ b/external/linalg @@ -0,0 +1 @@ +Subproject commit 4460f1f5b85ccc81ffcf49aa450d454db58ca90e diff --git a/include/base/Grabber.h b/include/base/Grabber.h index c71b1a34c..6ebdbba17 100644 --- a/include/base/Grabber.h +++ b/include/base/Grabber.h @@ -164,6 +164,8 @@ public slots: QStringList getVideoDevices() const; + void signalSetLutHandler(MemoryBuffer* lut); + signals: void SignalNewCapturedFrame(const Image& image); diff --git a/include/lut-calibrator/ColorSpace.h b/include/lut-calibrator/ColorSpace.h new file mode 100644 index 000000000..70c50e463 --- /dev/null +++ b/include/lut-calibrator/ColorSpace.h @@ -0,0 +1,391 @@ +#pragma once + +/* ColorSpace.h +* +* MIT License +* +* Copyright (c) 2020-2024 awawa-dev +* +* Project homesite: https://github.com/awawa-dev/HyperHDR +* +* Permission is hereby granted, free of charge, to any person obtaining a copy +* of this software and associated documentation files (the "Software"), to deal +* in the Software without restriction, including without limitation the rights +* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the Software is +* furnished to do so, subject to the following conditions: +* +* The above copyright notice and this permission notice shall be included in all +* copies or substantial portions of the Software. + +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +* SOFTWARE. +*/ + +#ifndef PCH_ENABLED + #include + #include + #include +#endif + +#include + +using namespace linalg; +using namespace aliases; + +namespace ColorSpaceMath +{ + enum PRIMARIES { SRGB = 0 }; + + const std::map> knownPrimaries = { + { + PRIMARIES::SRGB, + { + { 0.6400, 0.3300, 0.2126 }, + { 0.3000, 0.6000, 0.7152 }, + { 0.1500, 0.0600, 0.0722 }, + { 0.3127, 0.3290, 1.0000 } + } + } + }; + + constexpr mat matrix(std::array m) + { + double3 c1(m[0], m[3], m[6]); + double3 c2(m[1], m[4], m[7]); + double3 c3(m[2], m[5], m[8]); + return double3x3(c1, c2, c3); + } + + constexpr mat matrix4(std::array m) + { + double4 c1(m[0], m[4], m[8], m[12]); + double4 c2(m[1], m[5], m[9], m[13]); + double4 c3(m[2], m[6], m[10], m[14]); + double4 c4(m[3], m[7], m[11], m[15]); + return double4x4(c1, c2, c3, c4); + } + + constexpr double3x3 matrix_bt2020_to_XYZ = matrix({ + 0.636958, 0.144617, 0.168881, + 0.262700, 0.677998, 0.059302, + 0.000000, 0.028073, 1.060985 + }); + + constexpr double3x3 matrix_sRgb_to_XYZ = matrix({ + 0.4124564, 0.3575761, 0.1804375, + 0.2126729, 0.7151522, 0.0721750, + 0.0193339, 0.1191920, 0.9503041 + }); + + double3 from_bt2020_to_XYZ(double3 x) + { + return mul(matrix_bt2020_to_XYZ, x); + } + + double3 from_XYZ_to_bt2020(double3 x) + { + constexpr double3x3 m = inverse(matrix_bt2020_to_XYZ); + return mul(m, x); + } + + double3 from_XYZ_to_sRGB(double3 x) + { + constexpr double3x3 m = inverse(matrix_sRgb_to_XYZ); + return mul(m, x); + } + + double3 from_sRGB_to_XYZ(double3 x) + { + return mul(matrix_sRgb_to_XYZ, x); + } + + double2 XYZ_to_xy(const double3& a) + { + double len = std::max(a.x + a.y + a.z, std::numeric_limits::epsilon()); + return { a.x / len, a.y / len }; + } + + double3x3 to_XYZ( + const double2& red_xy, + const double2& green_xy, + const double2& blue_xy, + const double2& white_xy + ) + { + double3 r(red_xy.x, red_xy.y, 1.0 - (red_xy.x + red_xy.y)); + double3 g(green_xy.x, green_xy.y, 1.0 - (green_xy.x + green_xy.y)); + double3 b(blue_xy.x, blue_xy.y, 1.0 - (blue_xy.x + blue_xy.y)); + double3 w(white_xy.x, white_xy.y, 1.0 - (white_xy.x + white_xy.y)); + + w /= white_xy.y; + + double3x3 retMat(r, g, b); + + double3x3 invMat; + invMat = linalg::inverse(retMat); + + double3 scale = linalg::mul(invMat, w); + + retMat[0] *= scale.x; + retMat[1] *= scale.y; + retMat[2] *= scale.z; + + return retMat; + } + + + double3 xyz_to_lab(double3 xyz) + { + double x = xyz.x / 95.047; + double y = xyz.y / 100.00; + double z = xyz.z / 108.883; + + x = (x > 0.008856) ? std::cbrt(x) : (7.787 * x + 16.0 / 116.0); + y = (y > 0.008856) ? std::cbrt(y) : (7.787 * y + 16.0 / 116.0); + z = (z > 0.008856) ? std::cbrt(z) : (7.787 * z + 16.0 / 116.0); + + return double3( + (116.0 * y) - 16, + 500 * (x - y), + 200 * (y - z) + ); + } + + double3 lab_to_xyz(double3 lab) + { + double y = (lab.x + 16.0) / 116.0; + double x = lab.y / 500.0 + y; + double z = y - lab.z / 200.0; + + double x3 = std::pow(x, 3); + double y3 = std::pow(y, 3); + double z3 = std::pow(z, 3); + + return double3( + x = ((x3 > 0.008856) ? x3 : ((x - 16.0 / 116.0) / 7.787)) * 95.047, + y = ((y3 > 0.008856) ? y3 : ((y - 16.0 / 116.0) / 7.787)) * 100.0, + z = ((z3 > 0.008856) ? z3 : ((z - 16.0 / 116.0) / 7.787)) * 108.883 + ); + } + + double3 lab_to_lch(double3 lab) + { + auto l = lab.x; + auto a = lab.y; + auto b = lab.z; + + const auto c = std::sqrt(std::pow(a, 2) + std::pow(b, 2)); + + auto h = std::atan2(b, a); + + if (h > 0) + { + h = (h / M_PI) * 180.0; + } + else + { + h = 360.0 - (std::abs(h) / M_PI) * 180.0; + } + + return double3(l, c, h); + } + + double3 lch_to_lab(double3 lch) + { + if (lch.z > 360.0) + lch.z -= 360.0; + else if (lch.z < 360.0) + lch.z += 360.0; + + double h = lch.z * M_PI / 180.0; + + return double3( + lch.x, + std::cos(h) * lch.y, + std::sin(h) * lch.y); + } + + double3 xyz_to_lch(double3 xyz) + { + xyz = xyz_to_lab(xyz); + return lab_to_lch(xyz); + } + + double3 lch_to_xyz(double3 lch) + { + lch = lch_to_lab(lch); + return lab_to_xyz(lch); + } + + byte3 to_byte3(const double3& v) + { + return byte3( + std::round(std::max(std::min(v.x, 255.0), 0.0)), + std::round(std::max(std::min(v.y, 255.0), 0.0)), + std::round(std::max(std::min(v.z, 255.0), 0.0)) + ); + } + + double3 to_double3(const byte3& v) + { + return double3(v.x, v.y, v.z); + } + + QString vecToString(const double2& v) + { + return QString("[%1 %2]").arg(v[0], 7, 'f', 3).arg(v[1], 7, 'f', 3); + } + + QString vecToString(const double3& v) + { + return QString("[%1 %2 %3]").arg(v[0], 7, 'f', 3).arg(v[1], 7, 'f', 3).arg(v[2], 7, 'f', 3); + } + + QString vecToString(const double4& v) + { + return QString("[%1 %2 %3 %4]").arg(v[0], 6, 'f', 3).arg(v[1], 6, 'f', 3).arg(v[2], 6, 'f', 3).arg(v[3], 6, 'f', 3); + } + + QString vecToString(const byte3& v) + { + return QString("[%1 %2 %3]").arg(v[0], 3).arg(v[1], 3).arg(v[2], 3); + } + + QString matToString(double4x4 m) + { + QStringList ret; + for (int d = 0; d < 4; d++) + { + ret.append(vecToString(m.row(d))); + } + return ret.join("\r\n"); + } + + QString matToString(double3x3 m) + { + QStringList ret; + for (int d = 0; d < 3; d++) + { + ret.append(vecToString(m.row(d))); + } + return ret.join("\r\n"); + } +}; + +class YuvConverter +{ + +public: + enum YUV_COEFS { FCC = 0, BT601 = 1, BT709 = 2, BT2020 = 3 }; + enum COLOR_RANGE { FULL = 0, LIMITED = 1 }; + enum YUV_DIRECTION { FROM_RGB_TO_YUV = 0, FROM_YUV_TO_RGB = 1 }; + + const std::map knownCoeffs = { + {YUV_COEFS::FCC, {0.3, 0.11 } }, + {YUV_COEFS::BT601, {0.2990, 0.1140} }, + {YUV_COEFS::BT709, {0.2126, 0.0722} }, + {YUV_COEFS::BT2020, {0.2627, 0.0593} } + }; + + double3 toRgb(COLOR_RANGE range, YUV_COEFS coef, const double3& input) const + { + double4 ret(input, 1); + ret = mul(yuv2rgb.at(range).at(coef), ret); + return double3(ret.x, ret.y, ret.z); + } + + double3 toYuvBT709(COLOR_RANGE range, const double3& input) const + { + double4 ret(input, 1); + ret = mul(rgb2yuvBT709.at(range), ret); + return double3(ret.x, ret.y, ret.z); + } + + QString coefToString(YUV_COEFS cf) + { + switch (cf) + { + case(FCC): return "FCC"; break; + case(BT601): return "BT601"; break; + case(BT709): return "BT709"; break; + case(BT2020): return "BT2020"; break; + default: return "?"; + } + } + + YuvConverter() + { + for (const auto& coeff : knownCoeffs) + for (const COLOR_RANGE& range : { COLOR_RANGE::FULL, COLOR_RANGE::LIMITED }) + { + const double Kr = coeff.second.x; + const double Kb = coeff.second.y; + const double Kg = 1.0 - Kr - Kb; + const double Cr = 0.5 / (1.0 - Kb); + const double Cb = 0.5 / (1.0 - Kr); + + double scaleY = 1.0, addY = 0.0, scaleUV = 1.0, addUV = 128 / 255.0; + + if (range == COLOR_RANGE::LIMITED) + { + scaleY = 219 / 255.0; + addY = 16 / 255.0; + scaleUV = 224 / 255.0; + } + + double4 c1(Kr * scaleY, -Kr * Cr * scaleUV, (1 - Kr) * Cb * scaleUV, 0); + double4 c2(Kg * scaleY, -Kg * Cr * scaleUV, -Kg * Cb * scaleUV, 0); + double4 c3(Kb * scaleY, (1 - Kb) * Cr * scaleUV, -Kb * Cb * scaleUV, 0); + double4 c4(addY, addUV, addUV, 1); + + double4x4 rgb2yuvMatrix(c1, c2, c3, c4); + + double4x4 yuv2rgbMatrix = inverse(rgb2yuvMatrix); + + yuv2rgb[range][coeff.first] = yuv2rgbMatrix; + + if (coeff.first == YUV_COEFS::BT709) + { + rgb2yuvBT709[range] = rgb2yuvMatrix; + } + } + + } + + QString toString() + { + QStringList ret, report; + + for (const auto& coeff : knownCoeffs) + for (const COLOR_RANGE& range : { COLOR_RANGE::FULL, COLOR_RANGE::LIMITED }) + { + double4x4 matrix = yuv2rgb[range][coeff.first]; + ret.append(QString("YUV to RGB %1 (%2):").arg(coefToString(coeff.first), 6).arg((range == COLOR_RANGE::LIMITED) ? "Limited" : "Full")); + ret.append(ColorSpaceMath::matToString(matrix).split("\r\n")); + } + + for (const COLOR_RANGE& range : { COLOR_RANGE::FULL, COLOR_RANGE::LIMITED }) + { + ret.append(QString("RGB to YUV %1 (%2):").arg(coefToString(YUV_COEFS::BT709), 6).arg((range == COLOR_RANGE::LIMITED) ? "Limited" : "Full")); + ret.append(ColorSpaceMath::matToString(rgb2yuvBT709[range]).split("\r\n")); + } + + for (int i = 0; i + 5 < ret.size(); i += 5) + for (int j = 0; j < 5; j++) + report.append(QString("%1 %2").arg(ret[i], -32).arg(ret[(i++) + 5], -32)); + + return "Supported YUV/RGB matrix transformation:\r\n\r\n" + report.join("\r\n"); + } + + + +private: + std::map> yuv2rgb; + std::map rgb2yuvBT709; +}; diff --git a/include/lut-calibrator/LutCalibrator.h b/include/lut-calibrator/LutCalibrator.h index fc58ee831..fa1b822d0 100644 --- a/include/lut-calibrator/LutCalibrator.h +++ b/include/lut-calibrator/LutCalibrator.h @@ -43,6 +43,15 @@ class Logger; class GrabberWrapper; +enum TEST_COLOR_ID; +struct Result; +struct MappingPrime; + + +namespace linalg { + template struct mat; + template struct vec; +} class LutCalibrator : public QObject { @@ -50,6 +59,7 @@ class LutCalibrator : public QObject private: static LutCalibrator* instance; +public: struct ColorStat { double red = 0, green = 0, blue = 0, count = 0, scaledRed = 1, scaledGreen = 1, scaledBlue = 1; @@ -76,7 +86,7 @@ class LutCalibrator : public QObject scaledGreen = scale / green; scaledBlue = scale / blue; } - } + } void reset() { @@ -116,20 +126,34 @@ class LutCalibrator : public QObject public: LutCalibrator(); - ~LutCalibrator(); signals: void SignalLutCalibrationUpdated(const QJsonObject& data); public slots: - void incomingCommand(QString rootpath, GrabberWrapper* grabberWrapper, hyperhdr::Components defaultComp, int checksum, ColorRgb startColor, ColorRgb endColor, bool limitedRange, double saturation, double luminance, double gammaR, double gammaG, double gammaB, int coef); + void incomingCommand(QString rootpath, GrabberWrapper* grabberWrapper, hyperhdr::Components defaultComp, int checksum, double saturation, double luminance, double gammaR, double gammaG, double gammaB); void stopHandler(); void setVideoImage(const QString& name, const Image& image); void setSystemImage(const QString& name, const Image& image); void signalSetGlobalImageHandler(int priority, const Image& image, int timeout_ms, hyperhdr::Components origin); private: + QString generateShortReport(std::function selector); + void sendReport(QString report); + bool set1to1LUT(); + void requestNextTestBoard(int nextStep); + void error(QString message); + void applyShadow(linalg::vec& color, int shadow); + bool getSourceColor(int index, linalg::vec& color, TEST_COLOR_ID& prime, int& shadow); + linalg::vec getColor(const Image& image, double blackLevelError, int logicX, int y, double scaleX, double scaleY); + uint16_t getCrc(const Image& image, double blackLevelError, double whiteLevelError, int y, double scaleX, double scaleY); void handleImage(const Image& image); + std::list toneMapping(); + void tryHDR10(); + void setupWhitePointCorrection(); + void calibrate(); + void printFullReport(); + bool increaseColor(ColorRgb& color); void storeColor(const ColorRgb& inputColor, const ColorRgb& color); bool finalize(bool fastTrack = false); @@ -157,6 +181,7 @@ public slots: double fineTune(double& optimalRange, double& optimalScale, int& optimalWhite, int& optimalStrategy); double getError(ColorRgb first, ColorStat second); void applyFilter(); + void whitePointCorrection(double& nits, linalg::mat& convert_bt2020_to_XYZ, linalg::mat& convert_XYZ_to_corrected); Logger* _log; bool _mjpegCalibration; @@ -164,7 +189,7 @@ public slots: bool _limitedRange; int _checksum; int _currentCoef; - double _coefsResult[3]; + double _coefsResult[4]; int _warningCRC; int _warningMismatch; double _saturation; @@ -185,5 +210,5 @@ public slots: // Color coefs YUV to RGB: http://avisynth.nl/index.php/Color_conversions // FCC, Rec.709, Rec.601 coefficients - ColorStat _coefs[3] = { ColorStat(0.3, 0.59, 0.11), ColorStat(0.2126, 0.7152, 0.0722), ColorStat(0.299, 0.587, 0.114)}; + ColorStat _coefs[4] = { ColorStat(0.3, 0.59, 0.11), ColorStat(0.2126, 0.7152, 0.0722), ColorStat(0.299, 0.587, 0.114), ColorStat(0.2627, 0.678, 0.0593)}; }; diff --git a/include/utils/GlobalSignals.h b/include/utils/GlobalSignals.h index d5259cd70..4be144493 100644 --- a/include/utils/GlobalSignals.h +++ b/include/utils/GlobalSignals.h @@ -94,5 +94,7 @@ class GlobalSignals : public QObject void SignalDiscoveryRequestToScan(DiscoveryRecord::Service type); - void SignalDiscoveryEvent(DiscoveryRecord message); + void SignalDiscoveryEvent(DiscoveryRecord message); + + void SignalSetLut(MemoryBuffer* lut); }; diff --git a/include/utils/Logger.h b/include/utils/Logger.h index 847a227f8..930281855 100644 --- a/include/utils/Logger.h +++ b/include/utils/Logger.h @@ -20,6 +20,7 @@ #define LOG_MESSAGE(severity, logger, ...) (logger)->Message(severity, __FILE__, __FUNCTION__, __LINE__, __VA_ARGS__) +#define REPORT_TOKEN "" #define Debug(logger, ...) LOG_MESSAGE(Logger::DEBUG , logger, __VA_ARGS__) #define Info(logger, ...) LOG_MESSAGE(Logger::INFO , logger, __VA_ARGS__) #define Warning(logger, ...) LOG_MESSAGE(Logger::WARNING, logger, __VA_ARGS__) diff --git a/sources/api/HyperAPI.cpp b/sources/api/HyperAPI.cpp index 5475f8c2f..96c8465d3 100644 --- a/sources/api/HyperAPI.cpp +++ b/sources/api/HyperAPI.cpp @@ -1007,28 +1007,16 @@ void HyperAPI::handleLutCalibrationCommand(const QJsonObject& message, const QSt } int checksum = message["checksum"].toInt(-1); - QJsonObject startColor = message["startColor"].toObject(); - QJsonObject endColor = message["endColor"].toObject(); - bool limitedRange = message["limitedRange"].toBool(false); double saturation = message["saturation"].toDouble(1.0); double luminance = message["luminance"].toDouble(1.0); double gammaR = message["gammaR"].toDouble(1.0); double gammaG = message["gammaG"].toDouble(1.0); double gammaB = message["gammaB"].toDouble(1.0); - int coef = message["coef"].toInt(0); - ColorRgb _startColor, _endColor; - - _startColor.red = startColor["r"].toInt(128); - _startColor.green = startColor["g"].toInt(128); - _startColor.blue = startColor["b"].toInt(128); - _endColor.red = endColor["r"].toInt(255); - _endColor.green = endColor["g"].toInt(255); - _endColor.blue = endColor["b"].toInt(255); sendSuccessReply(command, tan); if (subcommand == "capture") - _lutCalibrator->incomingCommand(_instanceManager->getRootPath(), (_videoGrabber != nullptr) ? _videoGrabber->grabberWrapper() : nullptr, getActiveComponent(), checksum, _startColor, _endColor, limitedRange, saturation, luminance, gammaR, gammaG, gammaB, coef); + _lutCalibrator->incomingCommand(_instanceManager->getRootPath(), (_videoGrabber != nullptr) ? _videoGrabber->grabberWrapper() : nullptr, getActiveComponent(), checksum, saturation, luminance, gammaR, gammaG, gammaB); else _lutCalibrator->stopHandler(); } diff --git a/sources/api/JSONRPC_schema/schema-lut-calibration.json b/sources/api/JSONRPC_schema/schema-lut-calibration.json index 5a0da5908..bd4bdd76b 100644 --- a/sources/api/JSONRPC_schema/schema-lut-calibration.json +++ b/sources/api/JSONRPC_schema/schema-lut-calibration.json @@ -19,13 +19,6 @@ "type" : "integer", "required" : true }, - "coef": { - "type" : "integer", - "required" : true - }, - "limitedRange": { - "type" : "boolean" - }, "saturation": { "type" : "number" }, @@ -40,26 +33,6 @@ }, "gammaB": { "type" : "number" - }, - "startColor": { - "type": "object", - "required": true, - "properties":{ - "r" : {"type" : "integer"}, - "g" : {"type" : "integer"}, - "b" : {"type" : "integer"} - }, - "additionalProperties": false - }, - "endColor": { - "type": "object", - "required": true, - "properties":{ - "r" : {"type" : "integer"}, - "g" : {"type" : "integer"}, - "b" : {"type" : "integer"} - }, - "additionalProperties": false } }, "additionalProperties": false diff --git a/sources/base/Grabber.cpp b/sources/base/Grabber.cpp index b179bc2c2..520040d7c 100644 --- a/sources/base/Grabber.cpp +++ b/sources/base/Grabber.cpp @@ -949,3 +949,13 @@ bool Grabber::isInitialized() { return _initialized; } + +void Grabber::signalSetLutHandler(MemoryBuffer* lut) +{ + if (lut != nullptr && _lut.size() >= lut->size()) + { + memcpy(_lut.data(), lut->data(), lut->size()); + } + else + Error(_log, "Could not set LUT: current size = %i, incoming size = %i", _lut.size(), (lut != nullptr) ? lut->size() : 0); +} diff --git a/sources/grabber/linux/v4l2/V4L2Wrapper.cpp b/sources/grabber/linux/v4l2/V4L2Wrapper.cpp index 79897fbf5..2cedfab77 100644 --- a/sources/grabber/linux/v4l2/V4L2Wrapper.cpp +++ b/sources/grabber/linux/v4l2/V4L2Wrapper.cpp @@ -28,6 +28,7 @@ #include #include #include +#include V4L2Wrapper::V4L2Wrapper(const QString& device, const QString& configurationPath) @@ -38,4 +39,5 @@ V4L2Wrapper::V4L2Wrapper(const QString& device, connect(_grabber.get(), &Grabber::SignalCapturingException, this, &GrabberWrapper::capturingExceptionHandler); connect(_grabber.get(), &Grabber::SignalSetNewComponentStateToAllInstances, this, &GrabberWrapper::SignalSetNewComponentStateToAllInstances); connect(_grabber.get(), &Grabber::SignalSaveCalibration, this, &GrabberWrapper::SignalSaveCalibration); + connect(GlobalSignals::getInstance(), &GlobalSignals::SignalSetLut, _grabber.get(), &Grabber::signalSetLutHandler, Qt::BlockingQueuedConnection); } diff --git a/sources/grabber/osx/AVF/AVFWrapper.cpp b/sources/grabber/osx/AVF/AVFWrapper.cpp index 84296cf6c..7d3cc4558 100644 --- a/sources/grabber/osx/AVF/AVFWrapper.cpp +++ b/sources/grabber/osx/AVF/AVFWrapper.cpp @@ -28,6 +28,7 @@ #include #include #include +#include AVFWrapper::AVFWrapper(const QString& device, @@ -39,5 +40,6 @@ AVFWrapper::AVFWrapper(const QString& device, connect(_grabber.get(), &Grabber::SignalCapturingException, this, &GrabberWrapper::capturingExceptionHandler); connect(_grabber.get(), &Grabber::SignalSetNewComponentStateToAllInstances, this, &GrabberWrapper::SignalSetNewComponentStateToAllInstances); connect(_grabber.get(), &Grabber::SignalSaveCalibration, this, &GrabberWrapper::SignalSaveCalibration); + connect(GlobalSignals::getInstance(), &GlobalSignals::SignalSetLut, _grabber.get(), &Grabber::signalSetLutHandler, Qt::BlockingQueuedConnection); } diff --git a/sources/grabber/windows/MF/MFWrapper.cpp b/sources/grabber/windows/MF/MFWrapper.cpp index fbbfa51b2..29f4739f6 100644 --- a/sources/grabber/windows/MF/MFWrapper.cpp +++ b/sources/grabber/windows/MF/MFWrapper.cpp @@ -28,6 +28,7 @@ #include #include #include +#include MFWrapper::MFWrapper(const QString& device, @@ -39,5 +40,6 @@ MFWrapper::MFWrapper(const QString& device, connect(_grabber.get(), &Grabber::SignalCapturingException, this, &GrabberWrapper::capturingExceptionHandler); connect(_grabber.get(), &Grabber::SignalSetNewComponentStateToAllInstances, this, &GrabberWrapper::SignalSetNewComponentStateToAllInstances); connect(_grabber.get(), &Grabber::SignalSaveCalibration, this, &GrabberWrapper::SignalSaveCalibration); + connect(GlobalSignals::getInstance(), &GlobalSignals::SignalSetLut, _grabber.get(), &Grabber::signalSetLutHandler, Qt::BlockingQueuedConnection); } diff --git a/sources/lut-calibrator/CMakeLists.txt b/sources/lut-calibrator/CMakeLists.txt index 1e56cc8db..890474492 100644 --- a/sources/lut-calibrator/CMakeLists.txt +++ b/sources/lut-calibrator/CMakeLists.txt @@ -11,6 +11,7 @@ add_library(lut-calibrator OBJECT ${lut-calibrator_SOURCES}) target_link_libraries(lut-calibrator Qt${Qt_VERSION}::Core Qt${Qt_VERSION}::Network + linalg ) if(USE_PRECOMPILED_HEADERS AND COMMAND target_precompile_headers) diff --git a/sources/lut-calibrator/LutCalibrator.cpp b/sources/lut-calibrator/LutCalibrator.cpp index cc42fad27..a106e66e1 100644 --- a/sources/lut-calibrator/LutCalibrator.cpp +++ b/sources/lut-calibrator/LutCalibrator.cpp @@ -34,21 +34,32 @@ #include #include - #include - - #include + #include #endif #define STRING_CSTR(x) ((std::string)x).c_str() #include +#include #include #include #include #include #include #include +#include +#include + + +using namespace linalg; +using namespace aliases; +using namespace ColorSpaceMath; + +double3 tovec(LutCalibrator::ColorStat a) +{ + return double3(a.red, a.green, a.blue); +} ColorRgb LutCalibrator::primeColors[] = { ColorRgb(255, 0, 0), ColorRgb(0, 255, 0), ColorRgb(0, 0, 255), ColorRgb(255, 255, 0), @@ -91,143 +102,387 @@ LutCalibrator::LutCalibrator() _timeStamp = 0; } -LutCalibrator::~LutCalibrator() -{ +void LutCalibrator::error(QString message) +{ + QJsonObject report; + stopHandler(); + Error(_log, QSTRING_CSTR(message)); + report["status"] = 1; + report["error"] = message; + SignalLutCalibrationUpdated(report); } -void LutCalibrator::incomingCommand(QString rootpath, GrabberWrapper* grabberWrapper, hyperhdr::Components defaultComp, int checksum, ColorRgb startColor, ColorRgb endColor, bool limitedRange, double saturation, double luminance, double gammaR, double gammaG, double gammaB, int coef) +struct UserConfig { + + double saturation; + double luminance; + double gammaR; + double gammaG; + double gammaB; + + UserConfig() : UserConfig(0, 0, 0, 0, 0) {}; + UserConfig(double _saturation, double _luminance, double _gammaR, double _gammaG, double _gammaB): + saturation(_saturation), + luminance(_luminance), + gammaR(_gammaR), + gammaG(_gammaG), + gammaB(_gammaB) + {} + +} userConfig; + +enum TEST_COLOR_ID { + WHITE = 0, + RED = 1, GREEN = 2, BLUE = 3, + RED_GREEN = 4, GREEN_BLUE = 5, RED_BLUE = 6, + RED_GREEN2 = 7, GREEN_BLUE2 = 8, RED_BLUE2 = 9, + RED2_GREEN = 10, GREEN2_BLUE = 11, RED2_BLUE = 12 +}; + +namespace { + const int SCREEN_BLOCKS_X = 64; + const int SCREEN_BLOCKS_Y = 36; + const int LOGIC_BLOCKS_X_SIZE = 4; + const int LOGIC_BLOCKS_X = std::floor((SCREEN_BLOCKS_X - 1) / LOGIC_BLOCKS_X_SIZE); + const int COLOR_DIVIDES = 32; + + const std::map> TEST_COLORS = { + {TEST_COLOR_ID::WHITE, {{255, 255, 255}, "White"}}, + {TEST_COLOR_ID::RED, {{255, 0, 0 }, "Red"}}, + {TEST_COLOR_ID::GREEN, {{0, 255, 0 }, "Green"}}, + {TEST_COLOR_ID::BLUE, {{0, 0, 255}, "Blue"}}, + {TEST_COLOR_ID::RED_GREEN, {{255, 255, 0 }, "Yellow"}}, + {TEST_COLOR_ID::GREEN_BLUE, {{0, 255, 255}, "Cyan"}}, + {TEST_COLOR_ID::RED_BLUE, {{255, 0, 255}, "Magenta"}}, + {TEST_COLOR_ID::RED_GREEN2, {{255, 128, 0 }, "Orange"}}, + {TEST_COLOR_ID::GREEN_BLUE2,{{0, 255, 128}, "LimeBlue"}}, + {TEST_COLOR_ID::RED_BLUE2, {{255, 0, 128}, "Pink"}}, + {TEST_COLOR_ID::RED2_GREEN, {{128, 255, 0 }, "LimeRed"}}, + {TEST_COLOR_ID::GREEN2_BLUE,{{0, 128, 255}, "Azure"}}, + {TEST_COLOR_ID::RED2_BLUE, {{128, 0, 255}, "Violet"}} + }; + + + enum CALIBRATION_STEP { + NOT_ACTIVE = 0, STEP_1_RAW_YUV, STEP_2_CALIBRATE + }; + + YuvConverter yuvConverter; +} + +void LutCalibrator::applyShadow(linalg::vec& color, int shadow) { - _rootPath = rootpath; + color.x -= std::round(shadow * (color.x + ((int)color.x % 2)) / COLOR_DIVIDES) - ((shadow == 0) ? 0 : ((int)color.x % 2)); + color.y -= std::round(shadow * (color.y + ((int)color.y % 2)) / COLOR_DIVIDES) - ((shadow == 0) ? 0 : ((int)color.y % 2)); + color.z -= std::round(shadow * (color.z + ((int)color.z % 2)) / COLOR_DIVIDES) - ((shadow == 0) ? 0 : ((int)color.z % 2)); +} - if (checksum == 0) +bool LutCalibrator::getSourceColor(int index, linalg::vec& color, TEST_COLOR_ID& prime, int& shadow) +{ + int searching = 0; + + for (auto const& testColor : TEST_COLORS) { - if (grabberWrapper != nullptr && !_mjpegCalibration) + const byte3& refColor = testColor.second.first; + color = double3(refColor.x, refColor.y, refColor.z); + for (prime = testColor.first, shadow = 0; shadow < COLOR_DIVIDES; shadow++) { - if (grabberWrapper->getHdrToneMappingEnabled() != 0) + if (searching == index) { - QJsonObject report; - stopHandler(); - Error(_log, "Please disable LUT tone mapping and run the test again"); - report["status"] = 1; - report["error"] = "Please disable LUT tone mapping and run the test again"; - SignalLutCalibrationUpdated(report); - return; + applyShadow(color, shadow); + return true; } + else + searching++; + } + } - QString vidMode; + return false; +} - SAFE_CALL_0_RET(grabberWrapper, getVideoCurrentModeResolution, QString, vidMode); +struct Result +{ + /// color that was rendered by the browser + double3 source = { 0, 0, 0 }; + /// color that was rendered by the browser RGB [0 - 1] + double3 sourceScaled = { 0, 0, 0 }; + /// color that was captured RGB or YUV [0-255] + double3 input = { 0, 0, 0 }; + /// color that was captured in limited YUV [0-235-240] + double3 inputYUV = { 0, 0, 0 }; + /// converted to linear RGB + double3 inputRGB = { 0, 0, 0 }; + /// input to XYZ + double3 processingXYZ = { 0, 0, 0 }; + /// output RGB not rounded [0-1] + double3 output = { 0, 0, 0 }; + /// output scaled RGB not rounded [0-1] + double3 outputNormRGB = { 0, 0, 0 }; + /// output RGB not rounded [0 - 255 scale] + double3 outputRGB = { 0, 0, 0 }; + +}; - _mjpegCalibration = (vidMode.indexOf("mjpeg", 0, Qt::CaseInsensitive) >= 0); +struct Calibration { + CALIBRATION_STEP step; + bool isYUV; + bool isVideoCapturingEnabled; + double nits; + YuvConverter::COLOR_RANGE colorRange; + std::map> results; + std::map whitePointCorrection; + + Calibration() : Calibration(CALIBRATION_STEP::NOT_ACTIVE, false) {}; + Calibration(CALIBRATION_STEP _step, bool _isYUV) : + step(_step), + isYUV(_isYUV), + isVideoCapturingEnabled(false), + nits(0), + colorRange(YuvConverter::COLOR_RANGE::FULL) + { + for(const auto& i : TEST_COLORS) + results[i.first] = std::vector(COLOR_DIVIDES); + } +} calibration; - if (_mjpegCalibration) - { - Debug(_log, "Enabling pseudo-HDR mode for calibration to bypass TurboJPEG MJPEG to RGB processing"); +double getVecMax(const double3& rec) +{ + return ((calibration.isYUV) ? rec.x : maxelem(rec)); +} + +double getVecMin(const double3& rec) +{ + return ((calibration.isYUV) ? rec.x : minelem(rec)); +} + +/* +WHITE = 0, +RED = 1, GREEN = 2, BLUE = 3, +RED_GREEN = 4, GREEN_BLUE = 5, RED_BLUE = 6, +RED_GREEN2 = 7, GREEN_BLUE2 = 8, RED_BLUE2 = 9, +GREEN_RED2 = 10, BLUE_GREEN2 = 11, BLUE_RED2 = 12 + + +ColorRgb LutCalibrator::primeColors[] = { + ColorRgb(255, 0, 0), + ColorRgb(0, 255, 0), + ColorRgb(0, 0, 255), + ColorRgb(255, 255, 0), + ColorRgb(255, 0, 255), + ColorRgb(0, 255, 255), + ColorRgb(255, 128, 0), + ColorRgb(255, 0, 128), + ColorRgb(0, 128, 255), + ColorRgb(128, 64, 0), + ColorRgb(128, 0, 64), + ColorRgb(128, 0, 0), + ColorRgb(0, 128, 0), + ColorRgb(0, 0, 128), + ColorRgb(16, 16, 16), + ColorRgb(32, 32, 32), + ColorRgb(48, 48, 48), + ColorRgb(64, 64, 64), + ColorRgb(96, 96, 96), + ColorRgb(120, 120, 120), + ColorRgb(144, 144, 144), + ColorRgb(172, 172, 172), + ColorRgb(196, 196, 196), + ColorRgb(220, 220, 220), + ColorRgb(255, 255, 255), + ColorRgb(0, 0, 0) +}; +*/ +QString LutCalibrator::generateShortReport(std::function selector) +{ + const std::list> shortReportColors = { + { TEST_COLOR_ID::WHITE, 0 }, + { TEST_COLOR_ID::RED, 0 }, + { TEST_COLOR_ID::GREEN, 0 }, + { TEST_COLOR_ID::BLUE, 0 }, + { TEST_COLOR_ID::RED, COLOR_DIVIDES / 2 }, + { TEST_COLOR_ID::GREEN, COLOR_DIVIDES / 2 }, + { TEST_COLOR_ID::BLUE, COLOR_DIVIDES / 2 }, + { TEST_COLOR_ID::RED_GREEN, 0 }, + { TEST_COLOR_ID::RED_BLUE, 0 }, + { TEST_COLOR_ID::GREEN_BLUE, 0 }, + { TEST_COLOR_ID::RED_GREEN2, 0 }, + { TEST_COLOR_ID::GREEN_BLUE2,0 }, + { TEST_COLOR_ID::RED_BLUE2, 0 }, + { TEST_COLOR_ID::RED2_GREEN, 0 }, + { TEST_COLOR_ID::GREEN2_BLUE,0 }, + { TEST_COLOR_ID::RED2_BLUE, 0 }, + { TEST_COLOR_ID::WHITE, 0 }, + { TEST_COLOR_ID::WHITE, 1 }, + { TEST_COLOR_ID::WHITE, 2 }, + { TEST_COLOR_ID::WHITE, 5 }, + { TEST_COLOR_ID::WHITE, 9 }, + { TEST_COLOR_ID::WHITE, 12 }, + { TEST_COLOR_ID::WHITE, 15 }, + { TEST_COLOR_ID::WHITE, 18 }, + { TEST_COLOR_ID::WHITE, 21 }, + { TEST_COLOR_ID::WHITE, 24 }, + { TEST_COLOR_ID::WHITE, 26 }, + { TEST_COLOR_ID::WHITE, 29 }, + { TEST_COLOR_ID::WHITE, 30 }, + { TEST_COLOR_ID::WHITE, COLOR_DIVIDES - 1 } + }; + + QStringList rep; + for (const auto& color : shortReportColors) + if (color.second >= 0 && color.second < COLOR_DIVIDES) + { + const auto& testColor = TEST_COLORS.at(color.first); + vec sourceColor(testColor.first.x, testColor.first.y, testColor.first.z); + applyShadow(sourceColor, color.second); + rep.append(QString("%1: %2 => %3") + .arg(QString("%1").arg(testColor.second + ((color.second != 0)? QString::number(color.second) : "")), 12) + .arg(vecToString(to_byte3(sourceColor)), 12) + .arg(selector(calibration.results[color.first][color.second]))); + + }; + return rep.join("\r\n"); +} + +void LutCalibrator::requestNextTestBoard(int nextStep) +{ + QJsonObject report; + report["status"] = 0; + report["validate"] = nextStep; + SignalLutCalibrationUpdated(report); +} + +bool LutCalibrator::set1to1LUT() +{ + _lut.resize(LUT_FILE_SIZE); + + if (_lut.data() != nullptr) + { + for (int y = 0; y < 256; y++) + for (int u = 0; u < 256; u++) + for (int v = 0; v < 256; v++) + { + uint32_t ind_lutd = LUT_INDEX(y, u, v); + _lut.data()[ind_lutd] = y; + _lut.data()[ind_lutd + 1] = u; + _lut.data()[ind_lutd + 2] = v; + } + emit GlobalSignals::getInstance()->SignalSetLut(&_lut); + QThread::msleep(100); + + return true; + } + + return false; +} + + +void LutCalibrator::sendReport(QString report) +{ + int total = 0; + QStringList list; + + auto lines = report.split("\r\n"); + for (const auto& line : lines) + { + if (total + line.size() + 4 >= 1024) + { + Debug(_log, REPORT_TOKEN "\r\n%s", QSTRING_CSTR(list.join("\r\n"))); + total = 4; + list.clear(); + } + + total += line.size() + 4; + list.append(line); + } + + Debug(_log, REPORT_TOKEN "%s\r\n", QSTRING_CSTR(list.join("\r\n"))); +} + +void LutCalibrator::incomingCommand(QString rootpath, GrabberWrapper* grabberWrapper, hyperhdr::Components defaultComp, int checksum, double saturation, double luminance, double gammaR, double gammaG, double gammaB) +{ + _rootPath = rootpath; + + if (checksum == CALIBRATION_STEP::NOT_ACTIVE) + { + stopHandler(); + + bool isYuv = false; + + // check if the source is using YUV + if (grabberWrapper != nullptr) + { + QString vidMode; + SAFE_CALL_0_RET(grabberWrapper, getVideoCurrentModeResolution, QString, vidMode); + isYuv = (vidMode.indexOf(pixelFormatToString(PixelFormat::MJPEG), 0, Qt::CaseInsensitive) >= 0) || + (vidMode.indexOf(pixelFormatToString(PixelFormat::YUYV), 0, Qt::CaseInsensitive) >= 0) || + (vidMode.indexOf(pixelFormatToString(PixelFormat::NV12), 0, Qt::CaseInsensitive) >= 0) || + (vidMode.indexOf(pixelFormatToString(PixelFormat::I420), 0, Qt::CaseInsensitive) >= 0); + if (isYuv) + { emit GlobalSignals::getInstance()->SignalRequestComponent(hyperhdr::Components::COMP_HDR, -1, true); int hdrEnabled = 0; SAFE_CALL_0_RET(grabberWrapper, getHdrToneMappingEnabled, int, hdrEnabled); - Debug(_log, "HDR is %s", (hdrEnabled) ? "enabled" : "disabled"); - if (!hdrEnabled) { - QJsonObject report; - stopHandler(); - Error(_log, "Unexpected HDR state. Aborting"); - report["status"] = 1; - report["error"] = "Unexpected HDR state. Aborting"; - SignalLutCalibrationUpdated(report); + error("Unexpected HDR state. Aborting"); return; } } } - _finish = false; - _limitedRange = limitedRange; - _saturation = saturation; - _luminance = luminance; - _gammaR = gammaR; - _gammaG = gammaG; - _gammaB = gammaB; - _checksum = -1; - _currentCoef = coef % (sizeof(_coefsResult) / sizeof(double)); - _warningCRC = _warningMismatch = -1; - _minColor = ColorRgb(255, 255, 255); - _maxColor = ColorRgb(0, 0, 0); - for (capColors selector = capColors::Red; selector != capColors::None; selector = capColors(((int)selector) + 1)) - _colorBalance[(int)selector].reset(); + if (!isYuv) + emit GlobalSignals::getInstance()->SignalRequestComponent(hyperhdr::Components::COMP_HDR, -1, false); - - _lut.resize(LUT_FILE_SIZE * 2); + userConfig= UserConfig(saturation, luminance, gammaR, gammaG, gammaB); + calibration = Calibration((isYuv) ? CALIBRATION_STEP::STEP_1_RAW_YUV : CALIBRATION_STEP::STEP_2_CALIBRATE, isYuv); - if (_lut.data() != nullptr) - { - memset(_lut.data(), 0, LUT_FILE_SIZE * 2); + Info(_log, "The source is %s", (isYuv) ? "YUV" : "RGB"); - finalize(true); - memset(_lut.data(), 0, LUT_FILE_SIZE * 2); - - if (grabberWrapper != nullptr) - { - _log->disable(); - - BLOCK_CALL_0(grabberWrapper, stop); - BLOCK_CALL_0(grabberWrapper, start); - - QThread::msleep(2000); + if (isYuv && !set1to1LUT()) + { + error("Could not allocated memory (~50MB) for internal temporary buffer. Stopped."); + return; + } - _log->enable(); - } + Debug(_log, "Requested LUT calibration. User settings: saturation = %0.2f, luminance = %0.2f, gammas = (%0.2f, %0.2f, %0.2f)", + _saturation, _luminance, _gammaR, _gammaG, _gammaB); - if (defaultComp == hyperhdr::COMP_VIDEOGRABBER) - { - Debug(_log, "Using video grabber as a source"); - connect(GlobalSignals::getInstance(), &GlobalSignals::SignalNewVideoImage, this, &LutCalibrator::setVideoImage, Qt::ConnectionType::UniqueConnection); - } - else if (defaultComp == hyperhdr::COMP_SYSTEMGRABBER) - { - Debug(_log, "Using system grabber as a source"); - connect(GlobalSignals::getInstance(), &GlobalSignals::SignalNewSystemImage, this, &LutCalibrator::setSystemImage, Qt::ConnectionType::UniqueConnection); - } - else - { - Debug(_log, "Using flatbuffers/protobuffers as a source"); - connect(GlobalSignals::getInstance(), &GlobalSignals::SignalSetGlobalImage, this, &LutCalibrator::signalSetGlobalImageHandler, Qt::ConnectionType::UniqueConnection); - } + requestNextTestBoard(calibration.step); + } + else if ((checksum == CALIBRATION_STEP::STEP_1_RAW_YUV || checksum == CALIBRATION_STEP::STEP_2_CALIBRATE) && + !calibration.isVideoCapturingEnabled) + { + calibration.isVideoCapturingEnabled = true; + if (defaultComp == hyperhdr::COMP_VIDEOGRABBER) + { + Debug(_log, "Using video grabber as a source"); + connect(GlobalSignals::getInstance(), &GlobalSignals::SignalNewVideoImage, this, &LutCalibrator::setVideoImage, Qt::ConnectionType::UniqueConnection); + } + else if (defaultComp == hyperhdr::COMP_SYSTEMGRABBER) + { + Debug(_log, "Using system grabber as a source"); + connect(GlobalSignals::getInstance(), &GlobalSignals::SignalNewSystemImage, this, &LutCalibrator::setSystemImage, Qt::ConnectionType::UniqueConnection); } else { - QJsonObject report; - stopHandler(); - Error(_log, "Could not allocated memory (~100MB) for internal temporary buffer. Stopped."); - report["status"] = 1; - report["error"] = "Could not allocated memory (~100MB) for internal temporary buffer. Stopped."; - SignalLutCalibrationUpdated(report); - return; + Debug(_log, "Using flatbuffers/protobuffers as a source"); + connect(GlobalSignals::getInstance(), &GlobalSignals::SignalSetGlobalImage, this, &LutCalibrator::signalSetGlobalImageHandler, Qt::ConnectionType::UniqueConnection); } } - _checksum = checksum; - _startColor = startColor; - _endColor = endColor; - _timeStamp = InternalClock::now(); - - if (_checksum % 19 == 1) - Debug(_log, "Requested section: %i, %s, %s, YUV: %s, Coef: %s, Saturation: %f, Luminance: %f, Gammas: (%f, %f, %f)", - _checksum, STRING_CSTR(_startColor), STRING_CSTR(_endColor), (_limitedRange) ? "LIMITED" : "FULL", - REC(_currentCoef), _saturation, _luminance, _gammaR, _gammaG, _gammaB); + else + { + error("Unknown request. Stopped."); + return; + } } void LutCalibrator::stopHandler() { + disconnect(GlobalSignals::getInstance(), &GlobalSignals::SignalNewSystemImage, this, &LutCalibrator::setSystemImage); disconnect(GlobalSignals::getInstance(), &GlobalSignals::SignalNewVideoImage, this, &LutCalibrator::setVideoImage); disconnect(GlobalSignals::getInstance(), &GlobalSignals::SignalSetGlobalImage, this, &LutCalibrator::signalSetGlobalImageHandler); - _mjpegCalibration = false; - _finish = false; - _checksum = -1; - _warningCRC = _warningMismatch = -1; - std::fill_n(_coefsResult, sizeof(_coefsResult) / sizeof(double), 0.0); - _lut.releaseMemory(); } @@ -246,167 +501,510 @@ void LutCalibrator::signalSetGlobalImageHandler(int priority, const Image& image) +vec LutCalibrator::getColor(const Image& image, double blackLevelError, int logicX, int y, double scaleX, double scaleY) { - int validate = 0; - int diffColor = 0; - QJsonObject report; - QJsonArray colors; - ColorRgb white{ 128,128,128 }, black{ 16,16,16 }; - double scaleX = image.width() / 128.0; - double scaleY = image.height() / 72.0; + double3 color{ 0,0,0 }; - if (_checksum < 0) - return; + int sX = std::round((logicX * LOGIC_BLOCKS_X_SIZE + (y % 2) * 2 + 1 + 0.5) * scaleX); + int sY = std::round((y + 0.5) * scaleY); + + double cR = 0, cG = 0, cB = 0; - if (image.width() < 3 * 128 || image.height() < 3 * 72) + for (int i = -1; i <= 1; i++) + for (int j = -1; j <= 1; j++) + { + ColorRgb cur = image(sX + i, sY + j); + color.x += cur.red; + color.y += cur.green; + color.z += cur.blue; + } + + color /= 9; + + for (int i = -1; i <= 1; i += 2) { - stopHandler(); - Error(_log, "Too low resolution: 384x216 is the minimum. Received video frame: %ix%i. Stopped.", image.width(), image.height()); - report["status"] = 1; - report["error"] = "Too low resolution: 384x216 is the minimum. Received video frame: %ix%i. Stopped."; - SignalLutCalibrationUpdated(report); + ColorRgb cur = image(sX + i * scaleX, sY); + double3 control(cur.red, cur.green, cur.blue); + + if (getVecMax(control) > blackLevelError) + throw std::invalid_argument("Invalid element/color detected in test image. Reliable analysis cannot be performed. Make sure the test board takes up the entire screen in the video live preview."); + } + + return color; +} + +uint16_t LutCalibrator::getCrc(const Image& image, double blackLevelError, double whiteLevelError, int y, double scaleX, double scaleY) +{ + uint16_t retVal = 0; + for (int i = 0; i < 8; i++) + { + double3 color = getColor(image, blackLevelError, 2 + i, y, scaleX, scaleY); + + if (getVecMin(color) > whiteLevelError) + retVal |= 1 << (7 - i); + else if (getVecMax(color) > blackLevelError) + throw std::invalid_argument("Unexpected brightness value in encoded CRC. Make sure the test board takes up the entire screen in the video live preview."); + } + return retVal; +} + +class Colorspace +{ + public: + +} colorspace; + +void LutCalibrator::handleImage(const Image& image) +{ + ////////////////////////////////////////////////////////////////////////// + ///////////////////////// Verify source //////////////////////////////// + ////////////////////////////////////////////////////////////////////////// + + if (image.width() < 3 * SCREEN_BLOCKS_X || image.height() < 3 * SCREEN_BLOCKS_Y) + { + error(QString("Too low resolution: 384x216 is the minimum. Received video frame: %1x%2. Stopped.").arg(image.width()).arg(image.height())); return; } if (image.width() * 1080 != image.height() * 1920) { - stopHandler(); - Error(_log, "Invalid resolution width/height ratio. Expected aspect: 1920/1080 (or the same 1280/720 etc). Stopped."); - report["status"] = 1; - report["error"] = "Invalid resolution width/height ratio. Expected aspect: 1920/1080 (or the same 1280/720 etc). Stopped."; - SignalLutCalibrationUpdated(report); + error("Invalid resolution width/height ratio. Expected aspect: 1920/1080 (or the same 1280/720 etc). Stopped."); return; } - for (int py = 0; py < 72;) + try { - for (int px = (py < 71 && py > 0) ? _checksum % 2 : 0; px < 128; px++) - { - ColorRgb color; - int sX = (qRound(px * scaleX) + qRound((px + 1) * scaleX)) / 2; - int sY = (qRound(py * scaleY) + qRound((py + 1) * scaleY)) / 2; - int cR = 0, cG = 0, cB = 0; + ////////////////////////////////////////////////////////////////////////// + ////////////////////////// Verify frame //////////////////////////////// + ////////////////////////////////////////////////////////////////////////// - for (int i = -1; i <= 1; i++) - for (int j = -1; j <= 1; j++) - { - ColorRgb cur = image(sX + i, sY + j); - cR += cur.red; - cG += cur.green; - cB += cur.blue; - } + double blackLevelError = 48, whiteLevelError = 96; + double scaleX = image.width() / static_cast(SCREEN_BLOCKS_X); + double scaleY = image.height() / static_cast(SCREEN_BLOCKS_Y); + + double3 black = getColor(image, blackLevelError, 0, 0, scaleX, scaleY); + blackLevelError = getVecMax(black) + 8; + double3 white = getColor(image, blackLevelError, 1, 0, scaleX, scaleY); + whiteLevelError = getVecMin(white) - 8; + + if (whiteLevelError <= blackLevelError) + throw std::invalid_argument("The white color is lower than the black color in the captured image. Make sure the test board takes up the entire screen in the video live preview."); + + int topCrc = getCrc(image, blackLevelError, whiteLevelError, 0, scaleX, scaleY); + int bottomCrc = getCrc(image, blackLevelError, whiteLevelError, SCREEN_BLOCKS_Y - 1, scaleX, scaleY); + + if (topCrc != bottomCrc) + throw std::invalid_argument("The encoded CRC of the top line is different than the bottom line. Make sure the test board takes up the entire screen in the video live preview."); + + Debug(_log, "Current frame: crc = %i, black level = (%0.2f, %0.2f, %0.2f), white level = (%0.2f, %0.2f, %0.2f), ", + topCrc, black.x, black.y, black.z, white.x, white.y, white.z); - color = ColorRgb((uint8_t)qMin(qRound(cR / 9.0), 255), (uint8_t)qMin(qRound(cG / 9.0), 255), (uint8_t)qMin(qRound(cB / 9.0), 255)); + ////////////////////////////////////////////////////////////////////////// + //////////////////////// Colors Capturing ////////////////////////////// + ////////////////////////////////////////////////////////////////////////// + calibration.colorRange = (getVecMax(black) > 2 || calibration.isYUV) ? YuvConverter::COLOR_RANGE::LIMITED : YuvConverter::COLOR_RANGE::FULL; + Debug(_log, "Color range: %s", (calibration.colorRange == YuvConverter::COLOR_RANGE::LIMITED) ? "LIMITED" : "FULL"); - if (py < 71 && py > 0) + int actual = 0; + for (int py = 1; py < SCREEN_BLOCKS_Y - 1; py++) + for (int px = 0; px < LOGIC_BLOCKS_X; px++) { - if (!_finish) + TEST_COLOR_ID prime = TEST_COLOR_ID::WHITE; + int shadow = 0; + Result result; + + if (!getSourceColor(actual++, result.source, prime, shadow)) { - storeColor(_startColor, color); - _finish |= increaseColor(_startColor); + py = SCREEN_BLOCKS_Y; + break; } - else - increaseColor(_startColor); - } - else - { - if (px == 0) + + result.input = getColor(image, blackLevelError, px, py, scaleX, scaleY); + + if (calibration.isYUV) { - white = color; - validate = 0; - diffColor = qMax(white.red, qMax(white.green, white.blue)); - diffColor = qMax((diffColor * 5) / 100, 10); + result.inputYUV = result.input; } - else if (px == 1) + else { - black = color; + + result.inputYUV = yuvConverter.toYuvBT709(calibration.colorRange, result.input/255.0) * 255.0; + result.inputRGB = result.input; } - else if (px >= 8 && px < 24) - { - bool isWhite = ((diffColor + color.red) >= white.red && - (diffColor + color.green) >= white.green && - (diffColor + color.blue) >= white.blue); - bool isBlack = (color.red <= (black.red + diffColor) && - color.green <= (black.green + diffColor) && - color.blue <= (black.blue + diffColor)); + calibration.results[prime][shadow] = result; + } - if ((isWhite && isBlack) || (!isWhite && !isBlack)) - { - if (_warningCRC != _checksum && (InternalClock::now() - _timeStamp > 1000)) - { - _warningCRC = _checksum; - Warning(_log, "Invalid CRC at: %i. CurrentColor: %s, Black: %s, White: %s, StartColor: %s, EndColor: %s.", int(px - 8), - STRING_CSTR(color), STRING_CSTR(black), - STRING_CSTR(white), STRING_CSTR(_startColor), - STRING_CSTR(_endColor)); - } - return; - } + stopHandler(); + calibrate(); - auto sh = (isWhite) ? 1 << (15 - (px - 8)) : 0; + } + catch (const std::exception& ex) + { + Error(_log, ex.what()); + } + catch (...) + { + Error(_log, "General exception"); + } +} + + +struct MappingPrime { + TEST_COLOR_ID prime; + double3 org; + double3 real; + double3 delta{}; +}; + +double3 acesToneMapping(double3 input) +{ + const double3x3 aces_input_matrix = + { + {0.59719f, 0.35458f, 0.04823f}, + {0.07600f, 0.90834f, 0.01566f}, + {0.02840f, 0.13383f, 0.83777f} + }; + + const double3x3 aces_output_matrix = + { + {1.60475f, -0.53108f, -0.07367f}, + {-0.10208f, 1.10813f, -0.00605f}, + {-0.00327f, -0.07276f, 1.07602f} + }; + + auto rtt_and_odt_fit = [](double3 v) + { + double3 a = v * (v + 0.0245786) - 0.000090537; + double3 b = v * (0.983729 * v + 0.4329510) + 0.238081; + return a / b; + }; + + input = mul(aces_input_matrix, input); + input = rtt_and_odt_fit(input); + return mul(aces_output_matrix, input); +} - validate |= sh; - } - else if (px == 24) - { - if (validate != _checksum) - { - if (_warningMismatch != _checksum && (InternalClock::now() - _timeStamp > 1000)) - { - _warningMismatch = _checksum; - Warning(_log, "CRC does not match: %i but expected %i, StartColor: %s , EndColor: %s", validate, _checksum, - STRING_CSTR(_startColor), STRING_CSTR(_endColor)); - } - return; - } - } - } - } - switch (py) + +double3 uncharted2_filmic(double3 v) +{ + float exposure_bias = 2.0f; + + auto uncharted2_tonemap_partial = [](double3 x) { - case(0): py = 71; break; - case(70): py = 72; break; - case(71): py = 1; break; - default: py++; break; + float A = 0.15f; + float B = 0.50f; + float C = 0.10f; + float D = 0.20f; + float E = 0.02f; + float F = 0.30f; + return ((x * (A * x + C * B) + D * E) / (x * (A * x + B) + D * F)) - E / F; + }; + + double3 curr = uncharted2_tonemap_partial(v * exposure_bias); + + double3 W = double3(11.2f); + double3 white_scale = double3(1.0f) / uncharted2_tonemap_partial(W); + return curr * white_scale; +} + +void doToneMapping(std::list& m, double3& p) +{ + auto a = xyz_to_lch(from_sRGB_to_XYZ(p) * 100.0); + auto iter = m.begin(); + auto last = *(iter++); + for (; iter != m.end(); last = *(iter++)) + if ((last.real.z >= a.z && a.z >= (*iter).real.z)) + { + auto& current = (*iter); + double lastAsp = last.real.z - a.z; + double curAsp = a.z - current.real.z; + double prop = 1 - (lastAsp / (lastAsp + curAsp)); + double chromaLastAsp = clamp(a.y / last.real.y, 0.0, 1.0); + double chromaCurrentAsp = clamp(a.y / current.real.y, 0.0, 1.0); + a.y += prop * last.delta.y * chromaLastAsp + (1 - prop) * current.delta.y * chromaCurrentAsp; + a.z += prop * last.delta.z + (1 - prop) * current.delta.z; + p = from_XYZ_to_sRGB(lch_to_xyz(a) / 100.0); + return; + } +} + +std::list LutCalibrator::toneMapping() +{ + std::list m = { + { TEST_COLOR_ID::GREEN, {}, {} }, + { TEST_COLOR_ID::GREEN_BLUE, {}, {} }, + { TEST_COLOR_ID::BLUE, {}, {} }, + { TEST_COLOR_ID::RED_BLUE, {}, {} }, + { TEST_COLOR_ID::RED, {}, {} }, + { TEST_COLOR_ID::RED_GREEN, {}, {} }, + { TEST_COLOR_ID::RED_GREEN2, {}, {} }, + { TEST_COLOR_ID::GREEN_BLUE2, {}, {} }, + { TEST_COLOR_ID::RED_BLUE2, {}, {} }, + { TEST_COLOR_ID::RED2_GREEN, {}, {} }, + { TEST_COLOR_ID::GREEN2_BLUE, {}, {} }, + { TEST_COLOR_ID::RED2_BLUE, {}, {} } + }; + + for (auto& c : m) + { + auto a = to_double3(TEST_COLORS.at(c.prime).first) / 255.0; + c.org = xyz_to_lch(from_sRGB_to_XYZ(a) * 100.0); + + int average = 0; + for (int index = COLOR_DIVIDES - 1; index >= 0; index--) + { + auto b = calibration.results[c.prime][0].outputNormRGB; + c.real = xyz_to_lch(from_sRGB_to_XYZ(b) * 100.0); + c.delta += c.org - c.real; + average++; } + + c.delta /= average; } + m.sort([](const MappingPrime& a, const MappingPrime& b) { return a.real.z > b.real.z; }); + + auto loopEnd = m.front(); + auto loopFront = m.back(); - if (_startColor != _endColor) + loopEnd.org.z -= 360; + loopEnd.real.z -= 360; + m.push_back(loopEnd); + + loopFront.org.z += 360; + loopFront.real.z += 360; + m.push_front(loopFront); + + QStringList info, intro; + info.append("Primaries in LCH colorspace"); + info.append("name, RGB primary in LCH, captured primary in LCH | primary RGB, captured RGB | average LCH delta | LCH to RGB way back "); + info.append("--------------------------------------------------------------------------------------------------------------------------------------------------------"); + for (auto& c : m) { - stopHandler(); - Error(_log, "Unexpected color %s != %s", STRING_CSTR(_startColor), STRING_CSTR(_endColor)); - report["status"] = 1; - SignalLutCalibrationUpdated(report); - return; + auto aa = from_XYZ_to_sRGB(lch_to_xyz(c.org) / 100.0) * 255; + auto bb = from_XYZ_to_sRGB(lch_to_xyz(c.real) / 100.0) * 255; + info.append(QString("%1 %2 %3 | %4 %5 | %6 | %7 %8").arg(TEST_COLORS.at(c.prime).second + ":", 12). + arg(vecToString(c.org)). + arg(vecToString(c.real)). + arg(vecToString(TEST_COLORS.at(c.prime).first)). + arg(vecToString(to_byte3(calibration.results[c.prime][0].outputRGB))). + arg(vecToString(c.delta)). + arg(vecToString(to_byte3(aa))). + arg(vecToString(to_byte3(bb)))); + } + info.append("--------------------------------------------------------------------------------------------------------------------------------------------------------"); + info.append(""); + info.append(""); + info.append(" LCH mapping correction"); + info.append(" Source sRGB color => captured => Rec.2020 processing => LCH final correction"); + info.append("-------------------------------------------------------------------------------------------------"); + + for (auto& c : calibration.results) + { + bool firstLine = true; + for (auto& p : calibration.results[c.first]) + { + auto r = p.outputNormRGB; + doToneMapping(m, r); + + p.outputRGB = r * 255.0; + + if (std::exchange(firstLine, false)) + info.append(QString("%1: %2 => %3 => %4 => %5"). + arg(TEST_COLORS.at(c.first).second + ":", 12). + arg(vecToString(TEST_COLORS.at(c.first).first)). + arg(vecToString(to_byte3(p.inputRGB * 255))). + arg(vecToString(to_byte3(p.outputNormRGB * 255))). + arg(vecToString(to_byte3(p.outputRGB))) + ); + } + } + info.append("-------------------------------------------------------------------------------------------------"); + sendReport(info.join("\r\n")); + + return m; +} + +void LutCalibrator::tryHDR10() +{ + // data always incomes as yuv + for (auto& ref : calibration.results) + for (auto& result : ref.second) + { + result.inputRGB = yuvConverter.toRgb(YuvConverter::COLOR_RANGE::LIMITED, YuvConverter::YUV_COEFS::BT709, result.inputYUV / 255.0); + } + + // detect nits + double3 nits = calibration.results[TEST_COLOR_ID::WHITE][0].inputRGB; + + calibration.nits = 10000.0 * eotf(1.0, maxelem(nits)); + Debug(_log, "Assuming the signal is HDR, it is calibrated for %0.2f nits", calibration.nits); - report["status"] = 0; - report["validate"] = validate; - if (_checksum % 19 == 1) + /*double3 red = calibration.results[TEST_COLOR_ID::RED][0].inputRGB; + Debug(_log, "%s", QSTRING_CSTR(vecToString(XYZ_to_xy(from_bt2020_to_XYZ(red))))); + auto re = double3(eotf(10000.0 / calibration.nits, red.x), + eotf(10000.0 / calibration.nits, red.y), + eotf(10000.0 / calibration.nits, red.z)); + Debug(_log, "%s", QSTRING_CSTR(vecToString(XYZ_to_xy(from_bt2020_to_XYZ(re)))));*/ + // apply PQ and gamma + for (auto& ref : calibration.results) + for (auto& result : ref.second) + { + const auto& a = result.inputRGB; + auto e = double3(eotf(10000.0 / calibration.nits, a.x), + eotf(10000.0 / calibration.nits, a.y), + eotf(10000.0 / calibration.nits, a.z)); + + result.processingXYZ = from_bt2020_to_XYZ(e); + + //sRgbCutOff(result.processingXYZ); + + auto srgb = from_XYZ_to_sRGB(result.processingXYZ); + result.outputRGB = double3(clamp(ootf(srgb.x), 0.0, 1.0), + clamp(ootf(srgb.y), 0.0, 1.0), + clamp(ootf(srgb.z), 0.0, 1.0)); + result.outputNormRGB = result.outputRGB; + result.outputRGB *= 255.0; + } + + // correct gamut shift + auto m = toneMapping(); + + if (!true) { - Info(_log, "The video frame has been analyzed. Progress: %i/21 section", _checksum); + // build LUT table + _lut.resize(LUT_FILE_SIZE); + for (int g = 0; g <= 255; g++) + for (int b = 0; b <= 255; b++) + for (int r = 0; r <= 255; r++) + { + auto a = double3(r / 255.0, g / 255.0, b / 255.0); + + auto e = double3(eotf(10000.0 / calibration.nits, a.x), + eotf(10000.0 / calibration.nits, a.y), + eotf(10000.0 / calibration.nits, a.z)); + + auto srgb = from_XYZ_to_sRGB(from_bt2020_to_XYZ(e)); + auto ready = double3(clamp(ootf(srgb.x), 0.0, 1.0), + clamp(ootf(srgb.y), 0.0, 1.0), + clamp(ootf(srgb.z), 0.0, 1.0)); + + doToneMapping(m, ready); + + //ready = acesToneMapping(ready); + //ready = uncharted2_filmic(ready); + + byte3 result = to_byte3(ready * 255.0); + + // save it + uint32_t ind_lutd = LUT_INDEX(r, g, b); + _lut.data()[ind_lutd] = result.x; + _lut.data()[ind_lutd + 1] = result.y; + _lut.data()[ind_lutd + 2] = result.z; + } + /* + QImage f; + f.load("D:/test_image.png"); + for(int i = 0; i < f.width(); i++) + for (int j = 0; j < f.height(); j++) + { + QColor c = f.pixelColor(QPoint(i, j)); + uint32_t ind_lutd = LUT_INDEX(c.red(), c.green(), c.blue()); + QColor d(_lut.data()[ind_lutd], _lut.data()[ind_lutd + 1], _lut.data()[ind_lutd + 2]); + f.setPixelColor(QPoint(i, j), d); + } + f.save("D:/test_image_output.png"); + */ + } - _checksum = -1; + printFullReport(); + +} + +void LutCalibrator::printFullReport() +{ + + QStringList ret, report; + - if (_finish) + for (auto& c : calibration.results) { - if (!correctionEnd()) - return; + ret.append(TEST_COLORS.at(c.first).second); + for (auto& p : calibration.results[c.first]) + { + ret.append(ColorSpaceMath::vecToString(to_byte3(p.outputRGB))); + } + } + + int step = COLOR_DIVIDES + 1; + for (int i = 0; i + 4 * step < ret.size(); i+= 3 * step) + for (int j = 0; j < step; j++) + report.append(QString("%1 %2 %3 %4").arg(ret[i], -16).arg(ret[i + step], -16).arg(ret[i + step * 2], -16).arg(ret[(i++) + step * 3], -32)); + + sendReport(report.join("\r\n")); +} + +void LutCalibrator::setupWhitePointCorrection() +{ + - if (!finalize()) - report["status"] = 1; + for (const auto& coeff : yuvConverter.knownCoeffs) + { + /* + QString selected; + double min = std::numeric_limits::max(); + for (int w = WHITE_POINT_D65; w < WHITE_POINT_XY.size(); w++) + { + const vec& TEST_WHITE = WHITE_POINT_XY[w]; + + auto convert_bt2020_to_XYZ = to_XYZ(PRIMARIES[w][0], PRIMARIES[w][1], PRIMARIES[w][2], TEST_WHITE); + auto white_XYZ = mul(convert_bt2020_to_XYZ, whiteLinRGB); + auto white_xy = from_XYZ_to_xy(white_XYZ); + auto difference = TEST_WHITE - white_xy; + auto distance = length2((TEST_WHITE - white_xy) * 1000000); + if (distance < min) + { + min = distance; + selected = yuvConverter.coefToString(YuvConverter::YUV_COEFS(coef)) + " => "; + selected += (w == WHITE_POINT_D65) ? "D65" : ((w == WHITE_POINT_DCI_P3) ? "DCI_P3" : "unknowm"); + selected += QString(" (x: %1, y: %2)").arg(TEST_WHITE.x, 0, 'f', 3).arg(TEST_WHITE.y, 0, 'f', 3); + calibration.inputBT2020toXYZ[coef] = convert_bt2020_to_XYZ; + } + } + Debug(_log, QSTRING_CSTR(selected)); + */ } +} - SignalLutCalibrationUpdated(report); +void LutCalibrator::calibrate() +{ + #ifndef NDEBUG + sendReport(yuvConverter.toString()); + #endif + + sendReport("Captured colors:\r\n" + + generateShortReport([](const Result& res) { + return vecToString( + to_byte3( + + res.inputRGB)); + })); + + tryHDR10(); + + sendReport("HDR10:\r\n" + + generateShortReport([](const Result& res) { + return vecToString(to_byte3(res.outputRGB)); + })); } + void LutCalibrator::storeColor(const ColorRgb& inputColor, const ColorRgb& color) { @@ -966,14 +1564,39 @@ bool LutCalibrator::correctionEnd() Debug(_log, "YUV range: %s", (floor >= 2 || _limitedRange) ? "LIMITED" : "FULL"); Debug(_log, "YUV coefs: %s", REC(_currentCoef)); + // white point correction + double nits = 200; + linalg::mat convert_bt2020_to_XYZ; + linalg::mat convert_XYZ_to_sRgb; + whitePointCorrection(nits, convert_bt2020_to_XYZ, convert_XYZ_to_sRgb); + // build LUT table for (int g = 0; g <= 255; g++) for (int b = 0; b <= 255; b++) for (int r = 0; r <= 255; r++) { - double Ri = clampDouble((r * whiteBalance.scaledRed - floor) / scale, 0, 1.0); - double Gi = clampDouble((g * whiteBalance.scaledGreen - floor) / scale, 0, 1.0); - double Bi = clampDouble((b * whiteBalance.scaledBlue - floor) /scale, 0, 1.0); + double Ri, Gi, Bi; + + if (strategy == 3) + { + linalg::vec inputPoint(r, g, b); + inputPoint.x = eotf(10000.0 / nits, inputPoint.x / 255.0); + inputPoint.y = eotf(10000.0 / nits, inputPoint.y / 255.0); + inputPoint.z = eotf(10000.0 / nits, inputPoint.z / 255.0); + + inputPoint = linalg::mul(convert_bt2020_to_XYZ, inputPoint); + inputPoint = linalg::mul(convert_XYZ_to_sRgb, inputPoint); + + Ri = inputPoint.x; + Gi = inputPoint.y; + Bi = inputPoint.z; + } + else + { + Ri = clampDouble((r * whiteBalance.scaledRed - floor) / scale, 0, 1.0); + Gi = clampDouble((g * whiteBalance.scaledGreen - floor) / scale, 0, 1.0); + Bi = clampDouble((b * whiteBalance.scaledBlue - floor) / scale, 0, 1.0); + } // ootf if (strategy == 1) @@ -994,11 +1617,19 @@ bool LutCalibrator::correctionEnd() // bt2020 if (strategy == 0 || strategy == 1) { - fromBT2020toBT709(Ri, Gi, Bi, Ri, Gi, Bi); + //fromBT2020toBT709(Ri, Gi, Bi, Ri, Gi, Bi); + linalg::vec inputPoint(Ri, Gi, Bi); + + inputPoint = mul(convert_bt2020_to_XYZ, inputPoint); + inputPoint = mul(convert_XYZ_to_sRgb, inputPoint); + + Ri = inputPoint.x; + Gi = inputPoint.y; + Bi = inputPoint.z; } // ootf - if (strategy == 0 || strategy == 1) + if (strategy == 0 || strategy == 1 || strategy == 3) { Ri = ootf(Ri); Gi = ootf(Gi); @@ -1009,7 +1640,7 @@ bool LutCalibrator::correctionEnd() double finalG = clampDouble(Gi, 0, 1.0); double finalB = clampDouble(Bi, 0, 1.0); - balanceGray(r, g, b, finalR, finalG, finalB); + //balanceGray(r, g, b, finalR, finalG, finalB); if (_saturation != 1.0 || _luminance != 1.0) colorCorrection(finalR, finalG, finalB); @@ -1087,6 +1718,64 @@ void LutCalibrator::applyFilter() memcpy(_lut.data(), _secondBuffer, LUT_FILE_SIZE); } +void LutCalibrator::whitePointCorrection(double& nits, linalg::mat& convert_bt2020_to_XYZ, linalg::mat& convert_XYZ_to_corrected) +{ + double max = std::min(_maxColor.red, std::min( _maxColor.green , _maxColor.blue)); + nits = 10000.0 * eotf(1.0, max / 255.0); + + std::vector> actualPrimaries{ + tovec(_colorBalance[capColors::Red]), tovec(_colorBalance[capColors::Green]), tovec(_colorBalance[capColors::Blue]), tovec(_colorBalance[capColors::White]) + }; + + for (auto& c : actualPrimaries) + { + c.x = eotf(10000.0 / nits, c.x / 255.0); + c.y = eotf(10000.0 / nits, c.y / 255.0); + c.z = eotf(10000.0 / nits, c.z / 255.0); + } + + linalg::vec bt2020_red_xy(0.708, 0.292); + linalg::vec bt2020_green_xy(0.17, 0.797); + linalg::vec bt2020_blue_xy(0.131, 0.046); + linalg::vec bt2020_white_xy(0.3127, 0.3290); + + + convert_bt2020_to_XYZ = to_XYZ(bt2020_red_xy, bt2020_green_xy, bt2020_blue_xy, bt2020_white_xy); + + vec sRgb_red_xy = { 0.64f, 0.33f }; + vec sRgb_green_xy = { 0.30f, 0.60f }; + vec sRgb_blue_xy = { 0.15f, 0.06f }; + vec sRgb_white_xy = { 0.3127f, 0.3290f }; + + vec actual_red_xy(actualPrimaries[0]); + actual_red_xy = linalg::mul(convert_bt2020_to_XYZ, actual_red_xy); + sRgb_red_xy = XYZ_to_xy(actual_red_xy); + + vec actual_green_xy(actualPrimaries[1]); + actual_green_xy = mul(convert_bt2020_to_XYZ, actual_green_xy); + sRgb_green_xy = XYZ_to_xy(actual_green_xy); + + vec actual_blue_xy(actualPrimaries[2]); + actual_blue_xy = mul(convert_bt2020_to_XYZ, actual_blue_xy); + sRgb_blue_xy = XYZ_to_xy(actual_blue_xy); + + vec actual_white_xy(actualPrimaries[3]); + actual_white_xy = mul(convert_bt2020_to_XYZ, actual_white_xy); + sRgb_white_xy = XYZ_to_xy(actual_white_xy); + + mat convert_sRgb_to_XYZ; + convert_sRgb_to_XYZ = to_XYZ(sRgb_red_xy, sRgb_green_xy, sRgb_blue_xy, sRgb_white_xy); + + convert_XYZ_to_corrected = inverse(convert_sRgb_to_XYZ); + + Debug(_log, "YUV coefs: %s", REC(_currentCoef)); + Debug(_log, "Nits: %f", nits); + Debug(_log, "r: (%.3f, %.3f) vs (%.3f, %.3f)", sRgb_red_xy.x, sRgb_red_xy.y, 0.64f, 0.33f); + Debug(_log, "g: (%.3f, %.3f) vs (%.3f, %.3f)", sRgb_green_xy.x, sRgb_green_xy.y, 0.30f, 0.60f); + Debug(_log, "b: (%.3f, %.3f) vs (%.3f, %.3f)", sRgb_blue_xy.x, sRgb_blue_xy.y, 0.15f, 0.06f); + Debug(_log, "w: (%.3f, %.3f) vs (%.3f, %.3f)", sRgb_white_xy.x, sRgb_white_xy.y, 0.3127f, 0.3290f); +} + double LutCalibrator::fineTune(double& optimalRange, double& optimalScale, int& optimalWhite, int& optimalStrategy) { QString optimalColor; @@ -1104,14 +1793,20 @@ double LutCalibrator::fineTune(double& optimalRange, double& optimalScale, int& double rangeStart = 20, rangeLimit = 150; bool restart = false; + // white point correction + double nits = 200; + mat convert_bt2020_to_XYZ; + mat convert_XYZ_to_sRgb; + whitePointCorrection(nits, convert_bt2020_to_XYZ, convert_XYZ_to_sRgb); + for (int whiteIndex = capColors::Gray1; whiteIndex <= capColors::White; whiteIndex++) { ColorStat whiteBalance = _colorBalance[whiteIndex]; for (int scale = (qRound(ceiling) / 8) * 8, limitScale = 512; scale <= limitScale; scale = (scale == limitScale) ? limitScale + 1 : qMin(scale + 4, limitScale)) - for (int strategy = 0; strategy < 3; strategy++) + for (int strategy = 0; strategy < 4; strategy++) for (double range = rangeStart; range <= rangeLimit; range += (range < 5) ? 0.1 : 0.5) - if (strategy != 2 || range == rangeLimit) + if ((strategy != 2 && strategy != 3) || range == rangeStart) { double currentError = 0; QList colors; @@ -1120,11 +1815,27 @@ double LutCalibrator::fineTune(double& optimalRange, double& optimalScale, int& for (int ind : primaries) { ColorStat calculated, normalized = _colorBalance[ind]; - normalized /= (double)scale; - normalized.red *= whiteBalance.scaledRed; - normalized.green *= whiteBalance.scaledGreen; - normalized.blue *= whiteBalance.scaledBlue; + if (strategy == 3) + { + vec inputPoint(_colorBalance[ind].red, _colorBalance[ind].green, _colorBalance[ind].blue); + inputPoint.x = eotf(10000.0 / nits, inputPoint.x / 255.0); + inputPoint.y = eotf(10000.0 / nits, inputPoint.y / 255.0); + inputPoint.z = eotf(10000.0 / nits, inputPoint.z / 255.0); + + inputPoint = mul(convert_bt2020_to_XYZ, inputPoint); + inputPoint = mul(convert_XYZ_to_sRgb, inputPoint); + + calculated = ColorStat(inputPoint.x, inputPoint.y, inputPoint.z); + } + else + { + normalized /= (double)scale; + + normalized.red *= whiteBalance.scaledRed; + normalized.green *= whiteBalance.scaledGreen; + normalized.blue *= whiteBalance.scaledBlue; + } // ootf if (strategy == 1) @@ -1145,11 +1856,18 @@ double LutCalibrator::fineTune(double& optimalRange, double& optimalScale, int& // bt2020 if (strategy == 0 || strategy == 1) { - fromBT2020toBT709(normalized.red, normalized.green, normalized.blue, calculated.red, calculated.green, calculated.blue); + //fromBT2020toBT709(normalized.red, normalized.green, normalized.blue, calculated.red, calculated.green, calculated.blue); + + vec inputPoint(normalized.red, normalized.green, normalized.blue); + + inputPoint = mul(convert_bt2020_to_XYZ, inputPoint); + inputPoint = mul(convert_XYZ_to_sRgb, inputPoint); + + calculated = ColorStat(inputPoint.x, inputPoint.y, inputPoint.z); } // ootf - if (strategy == 0 || strategy == 1) + if (strategy == 0 || strategy == 1 || strategy == 3) { calculated.red = ootf(calculated.red); calculated.green = ootf(calculated.green); diff --git a/sources/utils/FrameDecoder.cpp b/sources/utils/FrameDecoder.cpp index 47c7d5224..4b8d60f11 100644 --- a/sources/utils/FrameDecoder.cpp +++ b/sources/utils/FrameDecoder.cpp @@ -101,7 +101,7 @@ void FrameDecoder::processImage( #ifdef TAKE_SCREEN_SHOT if (screenShotTaken > 0 && screenShotTaken-- == 1) { - QImage jpgImage((const uint8_t*)outputImage.memptr(), outputImage.width(), outputImage.height(), 3 * outputImage.width(), QImage::Format_RGB888); + QImage jpgImage((const uint8_t*)outputImage.rawMem(), outputImage.width(), outputImage.height(), 3 * outputImage.width(), QImage::Format_RGB888); jpgImage.save("D:/grabber_yuv.png", "png"); } #endif @@ -267,7 +267,7 @@ void FrameDecoder::processImage( #ifdef TAKE_SCREEN_SHOT if (screenShotTaken > 0 && screenShotTaken-- == 1) { - QImage jpgImage((const uint8_t*)outputImage.memptr(), outputImage.width(), outputImage.height(), 3 * outputImage.width(), QImage::Format_RGB888); + QImage jpgImage((const uint8_t*)outputImage.rawMem(), outputImage.width(), outputImage.height(), 3 * outputImage.width(), QImage::Format_RGB888); jpgImage.save("D:/grabber_nv12.png", "png"); } #endif diff --git a/www/js/grabber_calibration.js b/www/js/grabber_calibration.js index c7c0a0823..f23bac0a8 100644 --- a/www/js/grabber_calibration.js +++ b/www/js/grabber_calibration.js @@ -1,4 +1,16 @@ $(document).ready( function(){ + if (window.matchMedia("(color-gamut: srgb)").matches) { + console.log(`Screen supports approximately the sRGB gamut or more.`); + } + + if (window.matchMedia("(color-gamut: p3)").matches) { + console.log(`Screen supports approximately the gamut specified by the DCI P3 Color Space or more.`); + } + + if (window.matchMedia("(color-gamut: rec2020)").matches) { + console.log(`Screen supports approximately the gamut specified by the ITU-R Recommendation BT.2020 Color Space or more.`); + } + class ColorRgb { constructor(_R,_G,_B) { @@ -6,44 +18,43 @@ $(document).ready( function(){ this.g = _G; this.b = _B; } - set(_R,_G,_B) + clone(_p) { - this.r = _R; - this.g = _G; - this.b = _B; + this.r = _p.r; + this.g = _p.g; + this.b = _p.b; } - equal(x) + divide(_m, _n) { - if (this.r == x.r && this.g == x.g && this.b == x.b) - return true; - else - return false; + this.r -= Math.round(_m * (this.r + (Math.trunc(this.r) % 2)) / _n) - ((_m == 0) ? 0 : (Math.trunc(this.r % 2))); + this.g -= Math.round(_m * (this.g + (Math.trunc(this.g) % 2)) / _n) - ((_m == 0) ? 0 : (Math.trunc(this.g % 2))); + this.b -= Math.round(_m * (this.b + (Math.trunc(this.b) % 2)) / _n) - ((_m == 0) ? 0 : (Math.trunc(this.b % 2))); } } - + let primeColors = [ - new ColorRgb(255, 0, 0), new ColorRgb(0, 255, 0), new ColorRgb(0, 0, 255), new ColorRgb(255, 255, 0), - new ColorRgb(255, 0, 255), new ColorRgb(0, 255, 255), new ColorRgb(255, 128, 0), new ColorRgb(255, 0, 128), new ColorRgb(0, 128, 255), - new ColorRgb(128, 64, 0), new ColorRgb(128, 0, 64), - new ColorRgb(128, 0, 0), new ColorRgb(0, 128, 0), new ColorRgb(0, 0, 128), - new ColorRgb(16, 16, 16), new ColorRgb(32, 32, 32), new ColorRgb(48, 48, 48), new ColorRgb(64, 64, 64), new ColorRgb(96, 96, 96), new ColorRgb(120, 120, 120), new ColorRgb(144, 144, 144), new ColorRgb(172, 172, 172), new ColorRgb(196, 196, 196), new ColorRgb(220, 220, 220), - new ColorRgb(255, 255, 255), - new ColorRgb(0, 0, 0) - ]; + new ColorRgb(255, 255, 255), + new ColorRgb(255, 0, 0 ), + new ColorRgb(0, 255, 0 ), + new ColorRgb(0, 0, 255), + new ColorRgb(255, 255, 0 ), + new ColorRgb(0, 255, 255), + new ColorRgb(255, 0, 255), + new ColorRgb(255, 128, 0 ), + new ColorRgb(0, 255, 128), + new ColorRgb(255, 0, 128), + new ColorRgb(128, 255, 0 ), + new ColorRgb(0, 128, 255), + new ColorRgb(128, 0, 255) ]; - let currentColor = new ColorRgb(0,0,0); - let startColor = new ColorRgb(0,0,0); let checksum = 0; - let maxLimit = 255; let finish = false; let running = false; - let limited = false; let saturation = 1; let luminance = 1; let gammaR = 1; let gammaG = 1; let gammaB = 1; - let coef = 0; const canvas = document.getElementById("canvas"); const ctx = canvas.getContext("2d"); @@ -58,11 +69,8 @@ $(document).ready( function(){ }, false); performTranslation(); - $("#grabber_calibration_intro").html($.i18n("grabber_calibration_expl")); - - - $("#startCalibration").off('click').on('click', function() { limited = false; coef = 0; startCalibration(); }); + $("#grabber_calibration_intro").html($.i18n("grabber_calibration_expl")); sendToHyperhdr("serverinfo", "", '"subscribe":["lut-calibration-update"]'); @@ -70,6 +78,8 @@ $(document).ready( function(){ { handleMessage(event); }); + + $("#startCalibration").off('click').on('click', function() { startCalibration(); }); resetImage(); @@ -108,22 +118,7 @@ $(document).ready( function(){ if (!running) return; - - if (json.limited == 1 && !limited) - { - limited = true; - startCalibration(); - return; - } - - if (typeof json.coef != 'undefined' && json.coef != null && !isNaN(json.coef)) - { - console.log(json.coef); - coef = json.coef; - startCalibration(); - return; - } - + if (json.status != 0) { document.body.style.overflow = 'visible'; @@ -133,30 +128,21 @@ $(document).ready( function(){ return; } - if (json.validate != checksum) + if (json.validate < 0) { - document.body.style.overflow = 'visible'; - canvas.classList.remove("fullscreen-canvas"); - running = false; - alert("Unexpected CRC: "+json.validate+", waiting for: "+checksum); - return; - } - - if (finish) - { + finish = true; canvas.classList.remove("fullscreen-canvas"); running = false; - //alert(`Finished!\n\nFinal section: ${checksum}.\nIf the new LUT file was successfully created then you can find the path in the HyperHDR logs.\n\nUsually it's 'lut_lin_tables.3d' in your home HyperHDR folder.`); document.body.style.overflow = 'visible'; resetImage(); } else { - checksum++; + checksum = json.validate; drawImage(); setTimeout(() => { - requestLutCalibration("capture", checksum, startColor, currentColor, limited, saturation, luminance, gammaR, gammaG, gammaB, coef); - }, 15); + requestLutCalibration("capture", checksum, saturation, luminance, gammaR, gammaG, gammaB); + }, 500); } } @@ -166,8 +152,6 @@ $(document).ready( function(){ { document.body.style.overflow = 'hidden'; canvas.classList.add("fullscreen-canvas"); - currentColor = new ColorRgb(0,0,0); - startColor = new ColorRgb(0,0,0); checksum = 0; finish = false; running = true; @@ -182,90 +166,82 @@ $(document).ready( function(){ drawImage(); setTimeout(() => { - requestLutCalibration("capture", checksum, startColor, currentColor, limited, saturation, luminance, gammaR, gammaG, gammaB, coef); - }, 1000); + requestLutCalibration("capture", checksum, saturation, luminance, gammaR, gammaG, gammaB); + }, 100); } else alert('Please run fullscreen mode (F11)'); }; - function drawImage() - { - startColor = Object.assign({}, currentColor); - - let scaleX = canvas.width / 128; - let scaleY = canvas.height / 72; - - for(let py = 0; py < 72; py++) - for(let px = (py < 71 && py > 0) ? checksum % 2: 0; px < 128; px++) + const SCREEN_BLOCKS_X = 64; + const SCREEN_BLOCKS_Y = 36; + const LOGIC_BLOCKS_X_SIZE = 4; + const LOGIC_BLOCKS_X = Math.floor((SCREEN_BLOCKS_X - 1) / LOGIC_BLOCKS_X_SIZE); + const COLOR_DIVIDES = 32; + + function draw(x, y, scaleX, scaleY) + { + let sX = Math.round((x * LOGIC_BLOCKS_X_SIZE + (y % 2) * 2 + 1)* scaleX); + let sY = Math.round(y * scaleY); + + ctx.fillRect(sX, sY, scaleX, scaleY); + } + + function getColor(index) + { + let color = new ColorRgb(0,0,0); + let searching = 0; + + for(let i = 0; i < primeColors.length; i++) + for(let j = 0; j < COLOR_DIVIDES; j++) { - let sx = px * scaleX; - let ex = (px + 1) * scaleX; - let sy = py * scaleY; - let ey = (py + 1) * scaleY; - - if (py < 71 && py > 0) + if (searching == index) { - ctx.fillStyle = `rgb(${currentColor.r}, ${currentColor.g}, ${currentColor.b})`; - ctx.fillRect(sx, sy, ex - sx, ey - sy); + color.clone(primeColors[i]); + color.divide(j, COLOR_DIVIDES); + console.log(`[${color.r}, ${color.g}, ${color.b}]`) - increaseColor(currentColor); + return color; } else - { - if (px == 0) - ctx.fillStyle = `rgb(255,255,255)`; - else if (px == 1) - ctx.fillStyle = `rgb(0,0,0)`; - else if (px == 2) - ctx.fillStyle = `rgb(255,0,0)`; - else if (px == 3) - ctx.fillStyle = `rgb(255,255,0)`; - else if (px == 4) - ctx.fillStyle = `rgb(255,0,255)`; - else if (px == 5) - ctx.fillStyle = `rgb(0,255,0)`; - else if (px == 6) - ctx.fillStyle = `rgb(0,255,255)`; - else if (px == 7) - ctx.fillStyle = `rgb(0,0,255)`; - else if (px >= 8 && px < 24) - { - let sh = 1 << (15 - (px - 8)); - - if (checksum & sh) - ctx.fillStyle = `rgb(255,255,255)`; - else - ctx.fillStyle = `rgb(0,0,0)`; - } - else if (px >= 24 && px < 60) - ctx.fillStyle = `rgb(255,255,255)`; - else if (px >= 60 && px < 96) - ctx.fillStyle = `rgb(0,0,0)`; - else if (px >= 96) - ctx.fillStyle = `rgb(128,128,128)`; - - ctx.fillRect(sx, sy, ex - sx, ey - sy); - } + searching++; } + + finish = true; + return color; } + + function drawImage() + { + let scaleX = canvas.width / SCREEN_BLOCKS_X; + let scaleY = canvas.height / SCREEN_BLOCKS_Y; + let actual = 0; + for(let py = 1; py < SCREEN_BLOCKS_Y - 1; py++) + for(let px = 0; px < LOGIC_BLOCKS_X; px++) + { + let currentColor = getColor(actual++); + ctx.fillStyle = `rgb(${currentColor.r}, ${currentColor.g}, ${currentColor.b})`; + draw(px, py, scaleX, scaleY); + } + + ctx.fillStyle = `rgb(0, 0, 0)`; + draw(0, 0, scaleX, scaleY); draw(0, SCREEN_BLOCKS_Y - 1, scaleX, scaleY); + ctx.fillStyle = `rgb(255, 255, 255)`; + draw(1, 0, scaleX, scaleY); draw(1, SCREEN_BLOCKS_Y - 1, scaleX, scaleY); + + for(let py = 0; py < SCREEN_BLOCKS_Y; py += SCREEN_BLOCKS_Y - 1) + for(let px = 2; px < 8 + 2; px++) + { + let sh = 1 << (7 - (px - 2)); + + if (checksum & sh) + ctx.fillStyle = `rgb(255,255,255)`; + else + ctx.fillStyle = `rgb(0,0,0)`; + + draw(px, py, scaleX, scaleY); + } + } - function increaseColor(color) - { - debugger; - if (color.equal(primeColors[primeColors.length -1])) - color.set(primeColors[0].r, primeColors[0].g, primeColors[0].b); - else - { - for (let i = 0; i < primeColors.length; i++ ) - if (color.equal(primeColors[i])) - { - i++; - color.set(primeColors[i].r, primeColors[i].g, primeColors[i].b); - break; - } - } - finish = (checksum > 20) ? true : false; - } startCalibration(); }); diff --git a/www/js/hyperhdr.js b/www/js/hyperhdr.js index 6b0231024..d0c263607 100644 --- a/www/js/hyperhdr.js +++ b/www/js/hyperhdr.js @@ -653,11 +653,9 @@ function requestLutInstall(address, hardware_brightness, hardware_contrast, hard "now":${now}`); } -async function requestLutCalibration(mode, params, startColor, endColor, limitedRange, saturation, luminance, gammaR, gammaG, gammaB, coef) +async function requestLutCalibration(mode, params, saturation, luminance, gammaR, gammaG, gammaB) { - var sColor = JSON.stringify(startColor); - var eColor = JSON.stringify(endColor); - sendToHyperhdr("lut-calibration", mode, `"checksum":${params}, "limitedRange":${limitedRange}, "saturation":${saturation}, "luminance":${luminance}, "gammaR":${gammaR}, "gammaG":${gammaG}, "gammaB":${gammaB}, "startColor":${sColor}, "endColor":${eColor}, "coef":${coef}`); + sendToHyperhdr("lut-calibration", mode, `"checksum":${params}, "saturation":${saturation}, "luminance":${luminance}, "gammaR":${gammaR}, "gammaG":${gammaG}, "gammaB":${gammaB}`); } async function requestHasLedClock() diff --git a/www/js/logs.js b/www/js/logs.js index cc77e22e4..d0133a587 100644 --- a/www/js/logs.js +++ b/www/js/logs.js @@ -98,18 +98,25 @@ $(document).ready(function() { function LogLine(logger,date,logger_name,level_string,debug,msg) { + const REPORT_TOKEN = ""; + var style=""; if (level_string=="INFO") - style = " class='db_info'"; + style = "db_info"; else if (level_string=="DEBUG") - style = " class='db_debug'"; + style = "db_debug"; else if (level_string=="WARNING") - style = " class='db_warning'"; + style = "db_warning"; else if (level_string=="ERROR") - style = " class='db_error'"; - - if (logger.text().length > 0) - logger.append("\n"+date.toISOString()+" ["+(logger_name).trim()+"] "+debug+msg+""); + style = "db_error"; + + if (msg.indexOf(REPORT_TOKEN) == 0) + { + msg = msg.substring(REPORT_TOKEN.length, msg.length); + logger.append(`${msg}\n`); + } else - logger.append(""+date.toISOString()+" ["+(logger_name).trim()+"] "+debug+msg+""); + { + logger.append(`${date.toISOString()} [${logger_name.trim()}] ${debug}${msg}\n`); + } }