diff --git a/src/aliceVision/image/dcp.cpp b/src/aliceVision/image/dcp.cpp index 45079d7813..beb308b865 100644 --- a/src/aliceVision/image/dcp.cpp +++ b/src/aliceVision/image/dcp.cpp @@ -2,6 +2,7 @@ #include #include +#include #include #include @@ -1294,6 +1295,63 @@ void DCPProfile::Load(const std::string& filename) igammatab_srgb.Set(igammatab_srgb_data); } +void DCPProfile::Load(const std::map& metadata) +{ + bool dcpMetadataOK = aliceVision::map_has_non_empty_value(metadata, "AliceVision:DCP:Temp1") && + aliceVision::map_has_non_empty_value(metadata, "AliceVision:DCP:Temp2") && + aliceVision::map_has_non_empty_value(metadata, "AliceVision:DCP:ForwardMatrixNumber") && + aliceVision::map_has_non_empty_value(metadata, "AliceVision:DCP:ColorMatrixNumber"); + + int colorMatrixNb; + int fwdMatrixNb; + + if (dcpMetadataOK) + { + colorMatrixNb = std::stoi(metadata.at("AliceVision:DCP:ColorMatrixNumber")); + fwdMatrixNb = std::stoi(metadata.at("AliceVision:DCP:ForwardMatrixNumber")); + + ALICEVISION_LOG_INFO("Matrix Number : " << colorMatrixNb << " ; " << fwdMatrixNb); + + dcpMetadataOK = !((colorMatrixNb == 0) || + ((colorMatrixNb > 0) && !aliceVision::map_has_non_empty_value(metadata, "AliceVision:DCP:ColorMat1")) || + ((colorMatrixNb > 1) && !aliceVision::map_has_non_empty_value(metadata, "AliceVision:DCP:ColorMat2")) || + ((fwdMatrixNb > 0) && !aliceVision::map_has_non_empty_value(metadata, "AliceVision:DCP:ForwardMat1")) || + ((fwdMatrixNb > 1) && !aliceVision::map_has_non_empty_value(metadata, "AliceVision:DCP:ForwardMat2"))); + } + + if (!dcpMetadataOK) + { + ALICEVISION_THROW_ERROR("Image Processing: All required DCP metadata cannot be found.\n" << metadata); + } + + info.temperature_1 = std::stof(metadata.at("AliceVision:DCP:Temp1")); + info.temperature_2 = std::stof(metadata.at("AliceVision:DCP:Temp2")); + info.has_color_matrix_1 = colorMatrixNb > 0; + info.has_color_matrix_2 = colorMatrixNb > 1; + info.has_forward_matrix_1 = fwdMatrixNb > 0; + info.has_forward_matrix_2 = fwdMatrixNb > 1; + + std::vector v_str; + + v_str.push_back(metadata.at("AliceVision:DCP:ColorMat1")); + if (colorMatrixNb > 1) + { + v_str.push_back(metadata.at("AliceVision:DCP:ColorMat2")); + } + setMatricesFromStrings("color", v_str); + + v_str.clear(); + if (fwdMatrixNb > 0) + { + v_str.push_back(metadata.at("AliceVision:DCP:ForwardMat1")); + if (fwdMatrixNb > 1) + { + v_str.push_back(metadata.at("AliceVision:DCP:ForwardMat2")); + } + setMatricesFromStrings("forward", v_str); + } +} + void DCPProfile::apply(OIIO::ImageBuf& image, const DCPProfileApplyParams& params) { // Compute matrices to and from selected working space @@ -2106,27 +2164,28 @@ DCPProfile::Matrix DCPProfile::getCameraToSrgbLinearMatrix(const double x, const return cameraToSrgbLinear; } -DCPProfile::Matrix DCPProfile::getCameraToACES2065Matrix(const Triple& asShotNeutral, const bool sourceIsRaw) const +DCPProfile::Matrix DCPProfile::getCameraToACES2065Matrix(const Triple& asShotNeutral, const bool sourceIsRaw, const bool useColorMatrixOnly) const { + const Triple asShotNeutralInv = { 1.0 / asShotNeutral[0] , 1.0 / asShotNeutral[1] , 1.0 / asShotNeutral[2] }; + double x, y; - getChromaticityCoordinatesFromCameraNeutral(IdentityMatrix, asShotNeutral, x, y); + getChromaticityCoordinatesFromCameraNeutral(IdentityMatrix, asShotNeutralInv, x, y); double cct, tint; setChromaticityCoordinates(x, y, cct, tint); ALICEVISION_LOG_INFO("Estimated illuminant (cct; tint) : (" << cct << "; " << tint << ")"); Matrix neutral = IdentityMatrix; - if (sourceIsRaw) { - neutral[0][0] = 1.0 / asShotNeutral[0]; - neutral[1][1] = 1.0 / asShotNeutral[1]; - neutral[2][2] = 1.0 / asShotNeutral[2]; + neutral[0][0] = asShotNeutral[0]; + neutral[1][1] = asShotNeutral[1]; + neutral[2][2] = asShotNeutral[2]; } Matrix cameraToXyzD50 = IdentityMatrix; - if ((!info.has_forward_matrix_1) && (!info.has_forward_matrix_2)) + if (useColorMatrixOnly || ((!info.has_forward_matrix_1) && (!info.has_forward_matrix_2))) { Matrix xyzToCamera = IdentityMatrix; if (info.has_color_matrix_1 && info.has_color_matrix_2) @@ -2137,11 +2196,22 @@ DCPProfile::Matrix DCPProfile::getCameraToACES2065Matrix(const Triple& asShotNeu { xyzToCamera = color_matrix_1; } - const Matrix cameraToXyz = matInv(xyzToCamera); + + Matrix wbInv = IdentityMatrix; + if (!sourceIsRaw) + { + // White balancing has been applied before demosaicing but color matrix is supposed to work on non white balanced data + // The white balance operation must be reversed + wbInv[0][0] = asShotNeutralInv[0]; + wbInv[1][1] = asShotNeutralInv[1]; + wbInv[2][2] = asShotNeutralInv[2]; + } + const Matrix cameraToXyz = matMult(matInv(xyzToCamera), wbInv); const double D50_cct = 5000.706605070579; // const double D50_tint = 9.562965495510433; // Using x, y = 0.3457, 0.3585 const Matrix cat = getChromaticAdaptationMatrix(getXyzFromChromaticityCoordinates(x, y), getXyzFromTemperature(D50_cct, D50_tint)); + cameraToXyzD50 = matMult(cat, cameraToXyz); } else if ((info.has_forward_matrix_1) && (info.has_forward_matrix_2)) @@ -2152,6 +2222,9 @@ DCPProfile::Matrix DCPProfile::getCameraToACES2065Matrix(const Triple& asShotNeu { cameraToXyzD50 = matMult(forward_matrix_1, neutral); } + + ALICEVISION_LOG_INFO("cameraToXyzD50Matrix : " << cameraToXyzD50); + Matrix cameraToACES2065 = matMult(xyzD50ToACES2065Matrix, cameraToXyzD50); return cameraToACES2065; @@ -2268,9 +2341,11 @@ void DCPProfile::setMatricesFromStrings(const std::string& type, std::vector& image, const Triple& neutral, const bool sourceIsRaw) const +void DCPProfile::applyLinear(Image& image, const Triple& neutral, const bool sourceIsRaw, const bool useColorMatrixOnly) const { - const Matrix cameraToACES2065Matrix = getCameraToACES2065Matrix(neutral, sourceIsRaw); + const Matrix cameraToACES2065Matrix = getCameraToACES2065Matrix(neutral, sourceIsRaw, useColorMatrixOnly); + + ALICEVISION_LOG_INFO("cameraToACES2065Matrix : " << cameraToACES2065Matrix); #pragma omp parallel for for (int i = 0; i < image.Height(); ++i) diff --git a/src/aliceVision/image/dcp.hpp b/src/aliceVision/image/dcp.hpp index f96a06bfde..620a65e525 100644 --- a/src/aliceVision/image/dcp.hpp +++ b/src/aliceVision/image/dcp.hpp @@ -168,6 +168,12 @@ class DCPProfile final */ void Load(const std::string& filename); + /** + * @brief DCPProfile loader + * @param[in] map of metadata + */ + void Load(const std::map& metadata); + /** * @brief getMatrices gets some matrices contained in the profile * param[in] type The matrices to get, "color" or "forward" @@ -201,16 +207,18 @@ class DCPProfile final * param[in] image The OIIO image on which the profile must be applied * param[in] neutral The neutral value calculated from the camera multiplicators contained in the cam_mul OIIO metadata * param[in] sourceIsRaw indicates that the image buffer contains data in raw space (no neutralization <=> cam_mul not applied) + * param[in] useColorMatrixOnly indicates to apply a DCP profile computed only from the color matrices */ - void applyLinear(OIIO::ImageBuf& image, const Triple& neutral, const bool sourceIsRaw = false) const; + void applyLinear(OIIO::ImageBuf& image, const Triple& neutral, const bool sourceIsRaw = false, const bool useColorMatrixOnly = true) const; /** * @brief applyLinear applies the linear part of a DCP profile on an aliceVision image * param[in] image The aliceVision image on which the profile must be applied * param[in] neutral The neutral value calculated from the camera multiplicators contained in the cam_mul OIIO metadata * param[in] sourceIsRaw indicates that the image buffer contains data in raw space (no neutralization <=> cam_mul not applied) + * param[in] useColorMatrixOnly indicates to apply a DCP profile computed only from the color matrices */ - void applyLinear(Image& image, const Triple& neutral, const bool sourceIsRaw = false) const; + void applyLinear(Image& image, const Triple& neutral, const bool sourceIsRaw = false, const bool useColorMatrixOnly = false) const; /** * @brief apply applies the non linear part of a DCP profile on an OIIO image buffer @@ -270,7 +278,7 @@ class DCPProfile final Matrix getChromaticAdaptationMatrix(const Triple& xyzSource, const Triple& xyzTarget) const; Matrix getCameraToXyzD50Matrix(const double x, const double y) const; Matrix getCameraToSrgbLinearMatrix(const double x, const double y) const; - Matrix getCameraToACES2065Matrix(const Triple& asShotNeutral, const bool sourceIsRaw = false) const; + Matrix getCameraToACES2065Matrix(const Triple& asShotNeutral, const bool sourceIsRaw = false, const bool useColorMatrixOnly = false) const; Matrix ws_sRGB; // working color space to sRGB Matrix sRGB_ws; // sRGB to working color space diff --git a/src/aliceVision/image/io.cpp b/src/aliceVision/image/io.cpp index 7cbded20db..6da0ca4e8e 100644 --- a/src/aliceVision/image/io.cpp +++ b/src/aliceVision/image/io.cpp @@ -15,6 +15,7 @@ #include #include +#include #include #include @@ -436,9 +437,39 @@ void readImage(const std::string& path, oiio::ImageSpec configSpec; const bool isRawImage = isRawFormat(path); + image::DCPProfile::Triple neutral = {1.0,1.0,1.0}; if (isRawImage) { + if ((imageReadOptions.rawColorInterpretation == ERawColorInterpretation::DcpLinearProcessing) || + (imageReadOptions.rawColorInterpretation == ERawColorInterpretation::DcpMetadata)) + { + oiio::ParamValueList imgMetadata = readImageMetadata(path); + std::string cam_mul = ""; + if (!imgMetadata.getattribute("raw:cam_mul", cam_mul)) + { + cam_mul = "{1024, 1024, 1024, 1024}"; + ALICEVISION_LOG_WARNING("[readImage]: cam_mul metadata not available, the openImageIO version might be too old (>= 2.4.5.0 requested for dcp management)."); + } + + std::vector v_mult; + size_t last = 1; + size_t next = 1; + while ((next = cam_mul.find(",", last)) != std::string::npos) + { + v_mult.push_back(std::stof(cam_mul.substr(last, next - last))); + last = next + 1; + } + v_mult.push_back(std::stof(cam_mul.substr(last, cam_mul.find("}", last) - last))); + + for (int i = 0; i < 3; i++) + { + neutral[i] = v_mult[i] / v_mult[1]; + } + } + + ALICEVISION_LOG_INFO("Neutral from camera = {" << neutral[0] << ", " << neutral[1] << ", " << neutral[2] << "}"); + // libRAW configuration // See https://openimageio.readthedocs.io/en/master/builtinplugins.html#raw-digital-camera-files @@ -456,20 +487,21 @@ void readImage(const std::string& path, configSpec.attribute("raw:user_mul", oiio::TypeDesc(oiio::TypeDesc::FLOAT, 4), user_mul); // no neutralization configSpec.attribute("raw:use_camera_matrix", 0); // do not use embeded color profile if any configSpec.attribute("raw:ColorSpace", "raw"); // use raw data - configSpec.attribute("raw:HighlightMode", 1); // unclip + configSpec.attribute("raw:HighlightMode", imageReadOptions.highlightMode); + configSpec.attribute("raw:Demosaic", imageReadOptions.demosaicingAlgo); } else if (imageReadOptions.rawColorInterpretation == ERawColorInterpretation::LibRawNoWhiteBalancing) { configSpec.attribute("raw:auto_bright", 0); // disable exposure correction - configSpec.attribute("raw:use_camera_wb", 0); // white balance correction - configSpec.attribute("raw:use_camera_matrix", 0); // do not use embeded color profile if any + configSpec.attribute("raw:use_camera_wb", 0); // no white balance correction + configSpec.attribute("raw:use_camera_matrix", 1); // do not use embeded color profile if any, except for dng files configSpec.attribute("raw:ColorSpace", "Linear"); // use linear colorspace with sRGB primaries } else if (imageReadOptions.rawColorInterpretation == ERawColorInterpretation::LibRawWhiteBalancing) { configSpec.attribute("raw:auto_bright", 0); // disable exposure correction configSpec.attribute("raw:use_camera_wb", 1); // white balance correction - configSpec.attribute("raw:use_camera_matrix", 0); // do not use embeded color profile if any + configSpec.attribute("raw:use_camera_matrix", 1); // do not use embeded color profile if any, except for dng files configSpec.attribute("raw:ColorSpace", "Linear"); // use linear colorspace with sRGB primaries } else if (imageReadOptions.rawColorInterpretation == ERawColorInterpretation::DcpLinearProcessing) @@ -478,13 +510,21 @@ void readImage(const std::string& path, { ALICEVISION_THROW_ERROR("A DCP color profile is required but cannot be found"); } - float user_mul[4] = { 1,1,1,1 }; - + float user_mul[4] = { neutral[0],neutral[1],neutral[2],neutral[1] }; + if (imageReadOptions.doWBAfterDemosaicing) + { + for (int i = 0; i < 4; ++i) + { + user_mul[i] = 1.0; + } + } configSpec.attribute("raw:auto_bright", 0); // disable exposure correction - configSpec.attribute("raw:use_camera_wb", 0); // no white balance correction - configSpec.attribute("raw:user_mul", oiio::TypeDesc(oiio::TypeDesc::FLOAT, 4), user_mul); // no neutralization + configSpec.attribute("raw:use_camera_wb", 0); // No White balance correction => user_mul is used + configSpec.attribute("raw:user_mul", oiio::TypeDesc(oiio::TypeDesc::FLOAT, 4), user_mul); configSpec.attribute("raw:use_camera_matrix", 0); // do not use embeded color profile if any configSpec.attribute("raw:ColorSpace", "raw"); + configSpec.attribute("raw:HighlightMode", imageReadOptions.highlightMode); + configSpec.attribute("raw:Demosaic", imageReadOptions.demosaicingAlgo); } else if (imageReadOptions.rawColorInterpretation == ERawColorInterpretation::DcpMetadata) { @@ -492,14 +532,21 @@ void readImage(const std::string& path, { ALICEVISION_THROW_ERROR("A DCP color profile is required but cannot be found"); } - float user_mul[4] = { 1,1,1,1 }; - + float user_mul[4] = { neutral[0],neutral[1],neutral[2],neutral[1] }; + if (imageReadOptions.doWBAfterDemosaicing) + { + for (int i = 0; i < 4; ++i) + { + user_mul[i] = 1.0; + } + } configSpec.attribute("raw:auto_bright", 0); // disable exposure correction configSpec.attribute("raw:use_camera_wb", 0); // no white balance correction configSpec.attribute("raw:user_mul", oiio::TypeDesc(oiio::TypeDesc::FLOAT, 4), user_mul); // no neutralization configSpec.attribute("raw:use_camera_matrix", 0); // do not use embeded color profile if any configSpec.attribute("raw:ColorSpace", "raw"); // use raw data - configSpec.attribute("raw:HighlightMode", 1); // unclip + configSpec.attribute("raw:HighlightMode", imageReadOptions.highlightMode); + configSpec.attribute("raw:Demosaic", imageReadOptions.demosaicingAlgo); } else { @@ -531,7 +578,7 @@ void readImage(const std::string& path, if (!imgMetadata.getattribute("raw:cam_mul", cam_mul)) { cam_mul = "{1024, 1024, 1024, 1024}"; - ALICEVISION_LOG_WARNING("[readImage]: cam_mul metadata not availbale, the openImageIO version might be too old (>= 2.4.5.0 requested for dcp management)."); + ALICEVISION_LOG_WARNING("[readImage]: cam_mul metadata not available, the openImageIO version might be too old (>= 2.4.5.0 requested for dcp management)."); } std::vector v_mult; @@ -547,12 +594,12 @@ void readImage(const std::string& path, image::DCPProfile::Triple neutral; for (int i = 0; i < 3; i++) { - neutral[i] = v_mult[1] / v_mult[i]; + neutral[i] = v_mult[i] / v_mult[1]; } - ALICEVISION_LOG_TRACE("Apply DCP Linear processing with neutral = {" << neutral[0] << ", " << neutral[1] << ", " << neutral[2] << "}"); + ALICEVISION_LOG_INFO("Apply DCP Linear processing with neutral = " << neutral); - dcpProfile.applyLinear(inBuf, neutral, true); + dcpProfile.applyLinear(inBuf, neutral, imageReadOptions.doWBAfterDemosaicing, imageReadOptions.useDCPColorMatrixOnly); } // color conversion @@ -560,12 +607,48 @@ void readImage(const std::string& path, ALICEVISION_THROW_ERROR("You must specify a requested color space for image file '" + path + "'."); // Get color space name. Default image color space is sRGB - const std::string fromColorSpaceName = (isRawImage && imageReadOptions.rawColorInterpretation == ERawColorInterpretation::DcpLinearProcessing) ? "aces2065-1" : - (isRawImage ? "linear" : - inBuf.spec().get_string_attribute("aliceVision:ColorSpace", inBuf.spec().get_string_attribute("oiio:ColorSpace", "sRGB"))); + std::string fromColorSpaceName = (isRawImage && imageReadOptions.rawColorInterpretation == ERawColorInterpretation::DcpLinearProcessing) ? "aces2065-1" : + (isRawImage ? "linear" : + inBuf.spec().get_string_attribute("aliceVision:ColorSpace", inBuf.spec().get_string_attribute("oiio:ColorSpace", "sRGB"))); ALICEVISION_LOG_TRACE("Read image " << path << " (encoded in " << fromColorSpaceName << " colorspace)."); - + + DCPProfile dcpProf; + if ((fromColorSpaceName == "no_conversion") && (imageReadOptions.workingColorSpace != EImageColorSpace::NO_CONVERSION)) + { + ALICEVISION_LOG_INFO("Source image is in a raw color space and must be converted into " << imageReadOptions.workingColorSpace << "."); + ALICEVISION_LOG_INFO("Check if a DCP profile is available in the metadata to be applied."); + if (inBuf.spec().nchannels < 3) + { + ALICEVISION_THROW_ERROR("A DCP profile cannot be applied on an image containing less than 3 channels."); + } + + int width, height; + const std::map imageMetadata = getMapFromMetadata(readImageMetadata(path, width, height)); + + // load DCP metadata from metadata. An error will be thrown if all required metadata are not there. + dcpProf.Load(imageMetadata); + + std::string cam_mul = map_has_non_empty_value(imageMetadata, "raw:cam_mul") ? imageMetadata.at("raw:cam_mul") : imageMetadata.at("AliceVision:raw:cam_mul"); + std::vector v_mult; + size_t last = 0; + size_t next = 1; + while ((next = cam_mul.find(",", last)) != std::string::npos) + { + v_mult.push_back(std::stof(cam_mul.substr(last, next - last))); + last = next + 1; + } + v_mult.push_back(std::stof(cam_mul.substr(last, cam_mul.find("}", last) - last))); + + for (int i = 0; i < 3; i++) + { + neutral[i] = v_mult[i] / v_mult[1]; + } + + dcpProf.applyLinear(inBuf, neutral, imageReadOptions.doWBAfterDemosaicing, imageReadOptions.useDCPColorMatrixOnly); + fromColorSpaceName = "aces2065-1"; + } + if ((imageReadOptions.workingColorSpace == EImageColorSpace::NO_CONVERSION) || (imageReadOptions.workingColorSpace == EImageColorSpace_stringToEnum(fromColorSpaceName))) { diff --git a/src/aliceVision/image/io.hpp b/src/aliceVision/image/io.hpp index 78d75ac3c2..37ca0a57ae 100644 --- a/src/aliceVision/image/io.hpp +++ b/src/aliceVision/image/io.hpp @@ -183,15 +183,19 @@ struct ImageReadOptions { ImageReadOptions(EImageColorSpace colorSpace = EImageColorSpace::AUTO, ERawColorInterpretation rawColorInterpretation = ERawColorInterpretation::LibRawWhiteBalancing, - const std::string& colorProfile = "", const oiio::ROI& roi = oiio::ROI()) : - workingColorSpace(colorSpace), rawColorInterpretation(rawColorInterpretation), colorProfileFileName(colorProfile), subROI(roi) + const std::string& colorProfile = "", const bool useDCPColorMatrixOnly = true, const oiio::ROI& roi = oiio::ROI()) : + workingColorSpace(colorSpace), rawColorInterpretation(rawColorInterpretation), colorProfileFileName(colorProfile), useDCPColorMatrixOnly(useDCPColorMatrixOnly), + doWBAfterDemosaicing(false), demosaicingAlgo("AHD"), highlightMode(0), subROI(roi) { } EImageColorSpace workingColorSpace; ERawColorInterpretation rawColorInterpretation; std::string colorProfileFileName; - + bool useDCPColorMatrixOnly; + bool doWBAfterDemosaicing; + std::string demosaicingAlgo; + int highlightMode; //ROI for this image. //If the image contains an roi, this is the roi INSIDE the roi. oiio::ROI subROI; diff --git a/src/software/pipeline/main_LdrToHdrMerge.cpp b/src/software/pipeline/main_LdrToHdrMerge.cpp index 66dfff46fa..04a53a52b0 100644 --- a/src/software/pipeline/main_LdrToHdrMerge.cpp +++ b/src/software/pipeline/main_LdrToHdrMerge.cpp @@ -339,10 +339,27 @@ int aliceVision_main(int argc, char** argv) oiio::ParamValueList targetMetadata; for (const auto& meta : viewMetadata) { - targetMetadata.add_or_replace(oiio::ParamValue(meta.first, meta.second)); + if (meta.first.compare(0, 3, "raw") == 0) + { + targetMetadata.add_or_replace(oiio::ParamValue("AliceVision:" + meta.first, meta.second)); + } + else + { + targetMetadata.add_or_replace(oiio::ParamValue(meta.first, meta.second)); + } } - targetMetadata.add_or_replace(oiio::ParamValue("AliceVision:ColorSpace", image::EImageColorSpace_enumToString(image::EImageColorSpace::LINEAR))); - image::writeImage(hdrImagePath, HDRimage, image::ImageWriteOptions().storageDataType(storageDataType), targetMetadata); + + // Fusion always produces linear image. sRGB is the only non linear color space that must be changed to linear (sRGB linear). + image::EImageColorSpace mergedColorSpace = (workingColorSpace == image::EImageColorSpace::SRGB) ? image::EImageColorSpace::LINEAR : workingColorSpace; + + targetMetadata.add_or_replace(oiio::ParamValue("AliceVision:ColorSpace", image::EImageColorSpace_enumToString(mergedColorSpace))); + + image::ImageWriteOptions writeOptions; + writeOptions.fromColorSpace(mergedColorSpace); + writeOptions.toColorSpace(mergedColorSpace); + writeOptions.storageDataType(storageDataType); + + image::writeImage(hdrImagePath, HDRimage, writeOptions, targetMetadata); } return EXIT_SUCCESS; diff --git a/src/software/pipeline/main_panoramaPostProcessing.cpp b/src/software/pipeline/main_panoramaPostProcessing.cpp index c0494491fa..1bc6de326f 100644 --- a/src/software/pipeline/main_panoramaPostProcessing.cpp +++ b/src/software/pipeline/main_panoramaPostProcessing.cpp @@ -17,6 +17,8 @@ #include #include +#include + #include #include #include @@ -142,12 +144,58 @@ bool readFullTile(image::Image & output, std::unique_ptr& inputImage, image::EImageColorSpace fromColorSpace, image::EImageColorSpace toColorSpace, image::DCPProfile dcpProf, image::DCPProfile::Triple neutral) +{ + const int width = inputImage.Width(); + const int tileSize = inputImage.Height(); + oiio::ImageBuf inBuf = oiio::ImageBuf(oiio::ImageSpec(width, tileSize, 4, oiio::TypeDesc::FLOAT), const_cast(inputImage.data())); + oiio::ImageBuf* outBuf = &inBuf; + + if (fromColorSpace == image::EImageColorSpace::NO_CONVERSION) + { + dcpProf.applyLinear(inBuf, neutral, true, true); + fromColorSpace = image::EImageColorSpace::ACES2065_1; + } + + oiio::ImageBuf colorspaceBuf = oiio::ImageBuf(oiio::ImageSpec(width, tileSize, 4, oiio::TypeDesc::FLOAT), const_cast(inputImage.data())); // buffer for image colorspace modification + if ((fromColorSpace == toColorSpace) || (toColorSpace == image::EImageColorSpace::NO_CONVERSION)) + { + // Do nothing. Note that calling imageAlgo::colorconvert() will copy the source buffer + // even if no conversion is needed. + } + else if ((toColorSpace == image::EImageColorSpace::ACES2065_1) || (toColorSpace == image::EImageColorSpace::ACEScg) || + (fromColorSpace == image::EImageColorSpace::ACES2065_1) || (fromColorSpace == image::EImageColorSpace::ACEScg)) + { + const auto colorConfigPath = image::getAliceVisionOCIOConfig(); + if (colorConfigPath.empty()) + { + throw std::runtime_error("ALICEVISION_ROOT is not defined, OCIO config file cannot be accessed."); + } + oiio::ColorConfig colorConfig(colorConfigPath); + oiio::ImageBufAlgo::colorconvert(colorspaceBuf, *outBuf, + EImageColorSpace_enumToOIIOString(fromColorSpace), + EImageColorSpace_enumToOIIOString(toColorSpace), true, "", "", &colorConfig); + outBuf = &colorspaceBuf; + } + else + { + oiio::ImageBufAlgo::colorconvert(colorspaceBuf, *outBuf, EImageColorSpace_enumToOIIOString(fromColorSpace), EImageColorSpace_enumToOIIOString(toColorSpace)); + outBuf = &colorspaceBuf; + } + + oiio::ROI exportROI = outBuf->roi(); + exportROI.chbegin = 0; + exportROI.chend = inputImage.Channels(); + outBuf->get_pixels(exportROI, outBuf->pixeltype(), inputImage.data()); +} + int aliceVision_main(int argc, char** argv) { std::string inputPanoramaPath; std::string outputPanoramaPath; image::EStorageDataType storageDataType = image::EStorageDataType::Float; - const size_t maxProcessingSize = 2000; + image::EImageColorSpace outputColorSpace = image::EImageColorSpace::LINEAR; + const size_t maxProcessingSize = 2000; bool fillHoles = false; // Description of mandatory parameters @@ -160,7 +208,8 @@ int aliceVision_main(int argc, char** argv) po::options_description optionalParams("Optional parameters"); optionalParams.add_options() ("storageDataType", po::value(&storageDataType)->default_value(storageDataType), ("Storage data type: " + image::EStorageDataType_informations()).c_str()) - ("fillHoles", po::value(&fillHoles)->default_value(fillHoles), "Execute fill holes algorithm"); + ("fillHoles", po::value(&fillHoles)->default_value(fillHoles), "Execute fill holes algorithm") + ("outputColorSpace", po::value(&outputColorSpace)->default_value(outputColorSpace), "Color space for the output panorama."); CmdLine cmdline("This program performs estimation of cameras orientation around a nodal point for 360° panorama.\n" "AliceVision PanoramaPostProcessing"); @@ -182,12 +231,41 @@ int aliceVision_main(int argc, char** argv) const oiio::ImageSpec &inputSpec = panoramaInput->spec(); const int tileWidth = inputSpec.tile_width; const int tileHeight = inputSpec.tile_height; + image::EImageColorSpace fromColorSpace = image::EImageColorSpace_stringToEnum(inputSpec.get_string_attribute("AliceVision:ColorSpace", "linear")); + if (tileWidth != tileHeight) { ALICEVISION_LOG_ERROR("non square tiles !"); return EXIT_FAILURE; } + int tmpWidth, tmpHeight; + std::map imageMetadata = image::getMapFromMetadata(image::readImageMetadata(inputPanoramaPath, tmpWidth, tmpHeight)); + + image::DCPProfile dcpProf; + image::DCPProfile::Triple neutral = { 1.0,1.0,1.0 }; + if (fromColorSpace == image::EImageColorSpace::NO_CONVERSION) + { + // load DCP metadata + dcpProf.Load(imageMetadata); + + std::string cam_mul = map_has_non_empty_value(imageMetadata, "raw:cam_mul") ? imageMetadata.at("raw:cam_mul") : imageMetadata.at("AliceVision:raw:cam_mul"); + std::vector v_mult; + size_t last = 0; + size_t next = 1; + while ((next = cam_mul.find(",", last)) != std::string::npos) + { + v_mult.push_back(std::stof(cam_mul.substr(last, next - last))); + last = next + 1; + } + v_mult.push_back(std::stof(cam_mul.substr(last, cam_mul.find("}", last) - last))); + + for (int i = 0; i < 3; i++) + { + neutral[i] = v_mult[i] / v_mult[1]; + } + } + //Create output panorama std::unique_ptr panoramaOutput = oiio::ImageOutput::create(outputPanoramaPath); oiio::ImageSpec outputSpec(inputSpec); @@ -195,6 +273,8 @@ int aliceVision_main(int argc, char** argv) outputSpec.tile_height = 0; outputSpec.attribute("compression", "zip"); outputSpec.extra_attribs.remove("openexr:lineOrder"); + outputSpec.attribute("AliceVision:ColorSpace",image::EImageColorSpace_enumToString(outputColorSpace)); + if (!panoramaOutput->open(outputPanoramaPath, outputSpec)) { ALICEVISION_LOG_ERROR("Impossible to write to destination path"); @@ -209,7 +289,6 @@ int aliceVision_main(int argc, char** argv) const int countHeight = std::ceil(double(height) / double(tileSize)); const int rowSize = countWidth + 2; - if (fillHoles) { ALICEVISION_LOG_INFO("Reduce image (" << width << "x" << height << ")"); @@ -349,7 +428,6 @@ int aliceVision_main(int argc, char** argv) cs = ns; } - pyramid[pyramid.size() - 1] = subFiled; for (int level = pyramid.size() - 2; level >= 0; level--) @@ -415,6 +493,8 @@ int aliceVision_main(int argc, char** argv) final.block(0, 0, tileSize, width) = finalTile.block(tileSize, tileSize, tileSize, width); + colorSpaceTransform(final, fromColorSpace, outputColorSpace, dcpProf, neutral); + panoramaOutput->write_scanlines(ty * tileSize, (ty + 1) * tileSize, 0, oiio::TypeDesc::FLOAT, final.data()); } } @@ -437,7 +517,6 @@ int aliceVision_main(int argc, char** argv) ALICEVISION_LOG_ERROR("Error reading from image"); } - int available = width - tx*tileSize; if (available < tileSize) { @@ -449,6 +528,8 @@ int aliceVision_main(int argc, char** argv) } } + colorSpaceTransform(final, fromColorSpace, outputColorSpace, dcpProf, neutral); + panoramaOutput->write_scanlines(ybegin, yend, 0, oiio::TypeDesc::FLOAT, final.data()); } } diff --git a/src/software/utils/main_imageProcessing.cpp b/src/software/utils/main_imageProcessing.cpp index 3389f80c4c..99f2c06c11 100644 --- a/src/software/utils/main_imageProcessing.cpp +++ b/src/software/utils/main_imageProcessing.cpp @@ -265,6 +265,7 @@ struct ProcessingParams bool fillHoles = false; bool fixNonFinite = false; bool applyDcpMetadata = false; + bool useDCPColorMatrixOnly = false; SharpenParams sharpen = { @@ -490,10 +491,10 @@ void processImage(image::Image& image, const ProcessingParams ALICEVISION_LOG_INFO("Matrix Number : " << colorMatrixNb << " ; " << fwdMatrixNb); dcpMetadataOK = !((colorMatrixNb == 0) || - ((colorMatrixNb > 0) && map_has_non_empty_value(imageMetadata, "AliceVision:DCP:ColorMat1")) || - ((colorMatrixNb > 1) && map_has_non_empty_value(imageMetadata, "AliceVision:DCP:ColorMat2")) || - ((fwdMatrixNb > 0) && map_has_non_empty_value(imageMetadata, "AliceVision:DCP:ForwardMat1")) || - ((fwdMatrixNb > 1) && map_has_non_empty_value(imageMetadata, "AliceVision:DCP:ForwardMat2"))); + ((colorMatrixNb > 0) && !map_has_non_empty_value(imageMetadata, "AliceVision:DCP:ColorMat1")) || + ((colorMatrixNb > 1) && !map_has_non_empty_value(imageMetadata, "AliceVision:DCP:ColorMat2")) || + ((fwdMatrixNb > 0) && !map_has_non_empty_value(imageMetadata, "AliceVision:DCP:ForwardMat1")) || + ((fwdMatrixNb > 1) && !map_has_non_empty_value(imageMetadata, "AliceVision:DCP:ForwardMat2"))); } if (!dcpMetadataOK) @@ -530,7 +531,7 @@ void processImage(image::Image& image, const ProcessingParams dcpProf.setMatricesFromStrings("forward", v_str); } - std::string cam_mul = imageMetadata.at("raw:cam_mul"); + std::string cam_mul = map_has_non_empty_value(imageMetadata, "raw:cam_mul") ? imageMetadata.at("raw:cam_mul") : imageMetadata.at("AliceVision:raw:cam_mul"); std::vector v_mult; size_t last = 0; size_t next = 1; @@ -544,10 +545,10 @@ void processImage(image::Image& image, const ProcessingParams image::DCPProfile::Triple neutral; for (int i = 0; i < 3; i++) { - neutral[i] = v_mult[1] / v_mult[i]; + neutral[i] = v_mult[i] / v_mult[1]; } - dcpProf.applyLinear(image, neutral, true); + dcpProf.applyLinear(image, neutral, true, pParams.useDCPColorMatrixOnly); } } @@ -657,9 +658,13 @@ int aliceVision_main(int argc, char * argv[]) image::EImageColorSpace outputColorSpace = image::EImageColorSpace::LINEAR; image::EStorageDataType storageDataType = image::EStorageDataType::Float; std::string extension; - image::ERawColorInterpretation rawColorInterpretation = image::ERawColorInterpretation::LibRawNoWhiteBalancing; + image::ERawColorInterpretation rawColorInterpretation = image::ERawColorInterpretation::DcpLinearProcessing; std::string colorProfileDatabaseDirPath = ""; bool errorOnMissingColorProfile = true; + bool useDCPColorMatrixOnly = true; + bool doWBAfterDemosaicing = false; + std::string demosaicingAlgo = "AHD"; + int highlightMode = 0; ProcessingParams pParams; @@ -749,16 +754,31 @@ int aliceVision_main(int argc, char * argv[]) ("Output color space: " + image::EImageColorSpace_informations()).c_str()) ("rawColorInterpretation", po::value(&rawColorInterpretation)->default_value(rawColorInterpretation), - ("RAW color interpretation: " + image::ERawColorInterpretation_informations() + "\ndefault : librawnowhitebalancing").c_str()) + ("RAW color interpretation: " + image::ERawColorInterpretation_informations() + "\ndefault : DcpLinearProcessing").c_str()) ("applyDcpMetadata", po::value(&pParams.applyDcpMetadata)->default_value(pParams.applyDcpMetadata), "Apply after all processings a linear dcp profile generated from the image DCP metadata if any") ("colorProfileDatabase,c", po::value(&colorProfileDatabaseDirPath)->default_value(""), - "DNG Color Profiles (DCP) database path.") + "DNG Color Profiles (DCP) database path.") ("errorOnMissingColorProfile", po::value(&errorOnMissingColorProfile)->default_value(errorOnMissingColorProfile), - "Rise an error if a DCP color profiles database is specified but no DCP file matches with the camera model (maker+name) extracted from metadata (Only for raw images)") + "Rise an error if a DCP color profiles database is specified but no DCP file matches with the camera model (maker+name) extracted from metadata (Only for raw images)") + + ("useDCPColorMatrixOnly", po::value(&useDCPColorMatrixOnly)->default_value(useDCPColorMatrixOnly), + "Use only Color matrices of DCP profile, ignoring Forward matrices if any. Default: False.\n" + "In case white balancing has been done before demosaicing, the reverse operation is done before applying the color matrix.") + + ("doWBAfterDemosaicing", po::value(&doWBAfterDemosaicing)->default_value(doWBAfterDemosaicing), + "Do not use libRaw white balancing. White balancing is applied just before DCP profile if useDCPColorMatrixOnly is set to False. Default: False.") + + ("demosaicingAlgo", po::value(&demosaicingAlgo)->default_value(demosaicingAlgo), + "Demosaicing algorithm (see libRaw documentation).\n" + "Possible algos are: linear, VNG, PPG, AHD (default), DCB, AHD-Mod, AFD, VCD, Mixed, LMMSE, AMaZE, DHT, AAHD, none.") + + ("highlightMode", po::value(&highlightMode)->default_value(highlightMode), + "Highlight management (see libRaw documentation).\n" + "0 = clip (default), 1 = unclip, 2 = blend, 3+ = rebuild.") ("storageDataType", po::value(&storageDataType)->default_value(storageDataType), ("Storage data type: " + image::EStorageDataType_informations()).c_str()) @@ -869,17 +889,38 @@ int aliceVision_main(int argc, char * argv[]) ALICEVISION_LOG_INFO(++i << "/" << size << " - Process view '" << viewId << "'."); + auto metadata = view.getMetadata(); + + if (pParams.applyDcpMetadata && metadata["AliceVision:ColorSpace"] != "no_conversion") + { + ALICEVISION_LOG_WARNING("A dcp profile will be applied on an image containing non raw data!"); + } + image::ImageReadOptions options; - options.workingColorSpace = workingColorSpace; + options.workingColorSpace = pParams.applyDcpMetadata ? image::EImageColorSpace::NO_CONVERSION : workingColorSpace; if (rawColorInterpretation == image::ERawColorInterpretation::Auto) { options.rawColorInterpretation = image::ERawColorInterpretation_stringToEnum(view.getRawColorInterpretation()); + if (options.rawColorInterpretation == image::ERawColorInterpretation::DcpMetadata) + { + options.useDCPColorMatrixOnly = false; + options.doWBAfterDemosaicing = true; + } + else + { + options.useDCPColorMatrixOnly = useDCPColorMatrixOnly; + options.doWBAfterDemosaicing = doWBAfterDemosaicing; + } } else { options.rawColorInterpretation = rawColorInterpretation; + options.useDCPColorMatrixOnly = useDCPColorMatrixOnly; + options.doWBAfterDemosaicing = doWBAfterDemosaicing; } options.colorProfileFileName = view.getColorProfileFileName(); + options.demosaicingAlgo = demosaicingAlgo; + options.highlightMode = highlightMode; // Read original image image::Image image; @@ -902,6 +943,11 @@ int aliceVision_main(int argc, char * argv[]) // Image processing processImage(image, pParams, view.getMetadata()); + if (pParams.applyDcpMetadata) + { + workingColorSpace = image::EImageColorSpace::ACES2065_1; + } + // Save the image saveImage(image, viewPath, outputfilePath, view.getMetadata(), metadataFolders, workingColorSpace, outputFormat, outputColorSpace, storageDataType); @@ -909,6 +955,7 @@ int aliceVision_main(int argc, char * argv[]) view.setImagePath(outputfilePath); view.setWidth(image.Width()); view.setHeight(image.Height()); + view.addMetadata("AliceVision:ColorSpace", image::EImageColorSpace_enumToString(outputColorSpace)); } if (pParams.scaleFactor != 1.0f) @@ -1012,6 +1059,10 @@ int aliceVision_main(int argc, char * argv[]) image::DCPProfile dcpProf; sfmData::View view; // used to extract and complete metadata + view.setImagePath(inputFilePath); + int width, height; + const auto metadata = image::readImageMetadata(inputFilePath, width, height); + view.setMetadata(image::getMapFromMetadata(metadata)); if (rawColorInterpretation == image::ERawColorInterpretation::DcpLinearProcessing || rawColorInterpretation == image::ERawColorInterpretation::DcpMetadata) @@ -1019,12 +1070,7 @@ int aliceVision_main(int argc, char * argv[]) // Load DCP color profiles database if not already loaded dcpDatabase.load(colorProfileDatabaseDirPath.empty() ? getColorProfileDatabaseFolder() : colorProfileDatabaseDirPath, false); - // Get DSLR maker and model by creating a view and picking values up in it. - view.setImagePath(inputFilePath); - int width, height; - const auto metadata = image::readImageMetadata(inputFilePath, width, height); - view.setMetadata(image::getMapFromMetadata(metadata)); - + // Get DSLR maker and model in view metadata. const std::string& make = view.getMetadataMake(); const std::string& model = view.getMetadataModel(); @@ -1060,19 +1106,36 @@ int aliceVision_main(int argc, char * argv[]) { readOptions.rawColorInterpretation = rawColorInterpretation; } - readOptions.workingColorSpace = workingColorSpace; + + std::map md = view.getMetadata(); + + if (pParams.applyDcpMetadata && md["AliceVision::ColorSpace"] != "no_conversion") + { + ALICEVISION_LOG_WARNING("A dcp profile will be applied on an image containing non raw data!"); + } + + readOptions.workingColorSpace = pParams.applyDcpMetadata ? image::EImageColorSpace::NO_CONVERSION : workingColorSpace; + readOptions.useDCPColorMatrixOnly = useDCPColorMatrixOnly; + readOptions.doWBAfterDemosaicing = doWBAfterDemosaicing; + readOptions.demosaicingAlgo = demosaicingAlgo; + readOptions.highlightMode = highlightMode; // Read original image image::Image image; image::readImage(inputFilePath, image, readOptions); - std::map metadata = view.getMetadata(); + pParams.useDCPColorMatrixOnly = useDCPColorMatrixOnly; // Image processing - processImage(image, pParams,metadata); + processImage(image, pParams, md); + + if (pParams.applyDcpMetadata) + { + workingColorSpace = image::EImageColorSpace::ACES2065_1; + } // Save the image - saveImage(image, inputFilePath, outputFilePath, metadata, metadataFolders, workingColorSpace, outputFormat, outputColorSpace, storageDataType); + saveImage(image, inputFilePath, outputFilePath, md, metadataFolders, workingColorSpace, outputFormat, outputColorSpace, storageDataType); } }