From 3c910a339f52e1e329bbe74b557329f4d5031797 Mon Sep 17 00:00:00 2001 From: demoulinv Date: Mon, 12 Jun 2023 07:51:26 +0200 Subject: [PATCH 1/6] [HDR merge] Add a test on mean luminance of set of images with same exposition in order to discard too dark LDR images before merging. --- src/aliceVision/hdr/brackets.cpp | 22 +++++++++++++++++++- src/aliceVision/hdr/brackets.hpp | 5 ++++- src/software/pipeline/main_LdrToHdrMerge.cpp | 6 +++++- 3 files changed, 30 insertions(+), 3 deletions(-) diff --git a/src/aliceVision/hdr/brackets.cpp b/src/aliceVision/hdr/brackets.cpp index 4a2719b221..443485e8e7 100644 --- a/src/aliceVision/hdr/brackets.cpp +++ b/src/aliceVision/hdr/brackets.cpp @@ -127,7 +127,9 @@ bool estimateBracketsFromSfmData(std::vector> & out_targetViews, const std::vector>> & groups, int offsetRefBracketIndex, const std::string& lumaStatFilepath, const double meanTargetedLuma) +void selectTargetViews(std::vector>& out_targetViews, + std::vector>>& groups, int offsetRefBracketIndex, + const std::string& lumaStatFilepath, const double meanTargetedLuma, const double minLuma) { // If targetIndexesFilename cannot be opened or is not valid an error is thrown // For odd number, there is no ambiguity on the middle image. @@ -196,6 +198,7 @@ void selectTargetViews(std::vector> & out_targetV double minDiffWithLumaTarget = 1000.0; targetIndex = 0; + int firstValidIndex = 0; // A valid index corresponds to a mean luminance higher than minLuma for (int k = 0; k < lastIdx; ++k) { @@ -205,8 +208,25 @@ void selectTargetViews(std::vector> & out_targetV minDiffWithLumaTarget = diffWithLumaTarget; targetIndex = k; } + if (v_lumaMeanMean[k] < minLuma) + { + ++firstValidIndex; + } } ALICEVISION_LOG_INFO("offsetRefBracketIndex parameter automaticaly set to " << targetIndex - middleIndex); + + firstValidIndex = std::min(firstValidIndex, targetIndex - 1); + + ALICEVISION_LOG_INFO("Index of first image to be considered for merging: " << firstValidIndex); + + if (firstValidIndex > 0) + { + for (auto& group : groups) + { + group.erase(group.begin(), group.begin() + firstValidIndex); + } + targetIndex -= firstValidIndex; + } } for (auto& group : groups) diff --git a/src/aliceVision/hdr/brackets.hpp b/src/aliceVision/hdr/brackets.hpp index 756a0ac92a..50f22f1424 100644 --- a/src/aliceVision/hdr/brackets.hpp +++ b/src/aliceVision/hdr/brackets.hpp @@ -94,7 +94,10 @@ bool estimateBracketsFromSfmData(std::vector> & out_targetViews, const std::vector>>& groups, int offsetRefBracketIndex, const std::string& targetIndexesFilename = "", const double meanTargetedLuma = 0.4); +void selectTargetViews(std::vector>& out_targetViews, + std::vector>>& groups, int offsetRefBracketIndex, + const std::string& targetIndexesFilename = "", const double meanTargetedLuma = 0.4, + const double minLuma = 0.25); } } diff --git a/src/software/pipeline/main_LdrToHdrMerge.cpp b/src/software/pipeline/main_LdrToHdrMerge.cpp index 8bdd7e9dbf..346ee04b91 100644 --- a/src/software/pipeline/main_LdrToHdrMerge.cpp +++ b/src/software/pipeline/main_LdrToHdrMerge.cpp @@ -64,6 +64,7 @@ int aliceVision_main(int argc, char** argv) int channelQuantizationPower = 10; int offsetRefBracketIndex = 1000; // By default, use the automatic selection double meanTargetedLumaForMerging = 0.4; + double minLumaForMerging = 0.25; image::EImageColorSpace workingColorSpace = image::EImageColorSpace::SRGB; hdr::EFunctionType fusionWeightFunction = hdr::EFunctionType::GAUSSIAN; @@ -103,6 +104,8 @@ int aliceVision_main(int argc, char** argv) "Zero to use the center bracket. +N to use a more exposed bracket or -N to use a less exposed backet.") ("meanTargetedLumaForMerging", po::value(&meanTargetedLumaForMerging)->default_value(meanTargetedLumaForMerging), "Mean expected luminance after merging step when input LDR images are decoded in sRGB color space. Must be in the range [0, 1].") + ("minLumaForMerging", po::value(&minLumaForMerging)->default_value(minLumaForMerging), + "Minimum mean luminance of LDR images for merging. Must be in the range [0, 1].") ("highlightTargetLux", po::value(&highlightTargetLux)->default_value(highlightTargetLux), "Highlights maximum luminance.") ("highlightCorrectionFactor", po::value(&highlightCorrectionFactor)->default_value(highlightCorrectionFactor), @@ -214,8 +217,9 @@ int aliceVision_main(int argc, char** argv) if (workingColorSpace != image::EImageColorSpace::SRGB) { meanTargetedLumaForMerging = std::pow((meanTargetedLumaForMerging + 0.055) / 1.055, 2.2); + minLumaForMerging = std::pow((minLumaForMerging + 0.055) / 1.055, 2.2); } - hdr::selectTargetViews(targetViews, groupedViews, offsetRefBracketIndex, lumaStatFilepath.string(), meanTargetedLumaForMerging); + hdr::selectTargetViews(targetViews, groupedViews, offsetRefBracketIndex, lumaStatFilepath.string(), meanTargetedLumaForMerging, minLumaForMerging); if ((targetViews.empty() || targetViews.size() != groupedViews.size()) && !isOffsetRefBracketIndexValid) { From 86cfafbad12fceebff4f43b51fdf8d1de4f4cfa8 Mon Sep 17 00:00:00 2001 From: demoulinv Date: Wed, 14 Jun 2023 09:07:01 +0200 Subject: [PATCH 2/6] [HDR merging] For each pixel, select exposures to be merged. --- src/aliceVision/hdr/brackets.cpp | 8 +- src/aliceVision/hdr/brackets.hpp | 9 +- src/aliceVision/hdr/hdrMerge.cpp | 241 ++++++++++++++----- src/aliceVision/hdr/hdrMerge.hpp | 3 +- src/software/pipeline/main_LdrToHdrMerge.cpp | 38 ++- 5 files changed, 227 insertions(+), 72 deletions(-) diff --git a/src/aliceVision/hdr/brackets.cpp b/src/aliceVision/hdr/brackets.cpp index 443485e8e7..b8972606d6 100644 --- a/src/aliceVision/hdr/brackets.cpp +++ b/src/aliceVision/hdr/brackets.cpp @@ -127,9 +127,9 @@ bool estimateBracketsFromSfmData(std::vector>& out_targetViews, - std::vector>>& groups, int offsetRefBracketIndex, - const std::string& lumaStatFilepath, const double meanTargetedLuma, const double minLuma) +int selectTargetViews(std::vector>& out_targetViews, + std::vector>>& groups, int offsetRefBracketIndex, + const std::string& lumaStatFilepath, const double meanTargetedLuma, const double minLuma) { // If targetIndexesFilename cannot be opened or is not valid an error is thrown // For odd number, there is no ambiguity on the middle image. @@ -239,7 +239,7 @@ void selectTargetViews(std::vector>& out_targetVi out_targetViews.push_back(group[targetIndex]); } - return; + return targetIndex; } } diff --git a/src/aliceVision/hdr/brackets.hpp b/src/aliceVision/hdr/brackets.hpp index 50f22f1424..aae0c9477f 100644 --- a/src/aliceVision/hdr/brackets.hpp +++ b/src/aliceVision/hdr/brackets.hpp @@ -93,11 +93,12 @@ bool estimateBracketsFromSfmData(std::vector>& out_targetViews, - std::vector>>& groups, int offsetRefBracketIndex, - const std::string& targetIndexesFilename = "", const double meanTargetedLuma = 0.4, - const double minLuma = 0.25); +int selectTargetViews(std::vector>& out_targetViews, + std::vector>>& groups, int offsetRefBracketIndex, + const std::string& targetIndexesFilename = "", const double meanTargetedLuma = 0.4, + const double minLuma = 0.25); } } diff --git a/src/aliceVision/hdr/hdrMerge.cpp b/src/aliceVision/hdr/hdrMerge.cpp index 2bd2388ead..3b5d1030e2 100644 --- a/src/aliceVision/hdr/hdrMerge.cpp +++ b/src/aliceVision/hdr/hdrMerge.cpp @@ -47,7 +47,8 @@ void hdrMerge::process(const std::vector< image::Image > &imag const rgbCurve &weight, const rgbCurve &response, image::Image &radiance, - float targetCameraExposure) + float targetCameraExposure, + int refImageIndex) { //checks assert(!response.isEmpty()); @@ -72,6 +73,28 @@ void hdrMerge::process(const std::vector< image::Image > &imag rgbCurve weightLongestExposure = weight; weightLongestExposure.freezeFirstPartValues(); + const std::string mergeInfoFilename = "C:/Temp/mergeInfo.csv"; + std::ofstream file(mergeInfoFilename); + + std::vector v_minRatio; + std::vector v_maxRatio; + + // For each exposure except the longuest, + // Compute a ratio between the next and the current exposure values. + // Deduce a range of ratii that will be used for enabling input data. + // Duplicate the last range limits to associate it with the longuest exposure + for (std::size_t i = 0; i < times.size() - 1; i++) + { + const double refRatio = times[i + 1] / times[i]; + v_minRatio.push_back(0.25 * refRatio); + v_maxRatio.push_back(1.75 * refRatio); + } + v_minRatio.push_back(v_minRatio.back()); + v_maxRatio.push_back(v_maxRatio.back()); + + const double minValue = 0.05; + const double maxValue = 0.999; + #pragma omp parallel for for(int y = 0; y < height; ++y) { @@ -80,64 +103,164 @@ void hdrMerge::process(const std::vector< image::Image > &imag //for each pixels image::RGBfColor &radianceColor = radiance(y, x); - for(std::size_t channel = 0; channel < 3; ++channel) + //if ((x%1000 == 780) && (y%1000 == 446)) + //{ + // if(!file) + // { + // ALICEVISION_LOG_WARNING("Unable to create file " << mergeInfoFilename << " for storing merging info."); + // } + // else + // { + // file << x << "," << y << std::endl; + // file << "time,R,resp(R),coeff,resp(R)/time,G,resp(G),coeff,resp(G)/time,B,resp(B),coeff,resp(B)/time," << std::endl; + // for(std::size_t i = 0; i < images.size(); i++) + // { + // const double time = times[i]; + // file << time << ","; + // for(std::size_t channel = 0; channel < 3; ++channel) + // { + // const double value = images[i](y, x)(channel); + // const double r = response(value, channel); + // double w = std::max(0.001f, (i == 0) ? weightShortestExposure(value, channel) + // : (i == (images.size() - 1) ? weightLongestExposure(value, channel) + // : weight(value, channel))); + + // file << value << ","; + // file << r << ","; + // file << w << ","; + // file << r/time << ","; + // } + // file << std::endl; + // } + + // } + //} + + const double meanValueHighExp = (images[images.size() - 1](y, x)(0) + images[images.size() - 1](y, x)(1) + + images[images.size() - 1](y, x)(2)) / + 3.0; + + const double noiseThreshold = 0.1; + + if(meanValueHighExp < noiseThreshold) // Noise case { - double wsum = 0.0; - double wdiv = 0.0; - - // Merge shortest exposure - { - int exposureIndex = 0; - - // for each image - const double value = images[exposureIndex](y, x)(channel); - const double time = times[exposureIndex]; - // - // weightShortestExposure: _______ - // _______/ - // 0 1 - double w = std::max(0.001f, weightShortestExposure(value, channel)); - - const double r = response(value, channel); - - wsum += w * r / time; - wdiv += w; - } - // Merge intermediate exposures - for(std::size_t i = 1; i < images.size() - 1; ++i) - { - // for each image - const double value = images[i](y, x)(channel); - const double time = times[i]; - // - // weight: ____ - // _______/ \________ - // 0 1 - double w = std::max(0.001f, weight(value, channel)); - - const double r = response(value, channel); - wsum += w * r / time; - wdiv += w; - } - // Merge longest exposure - { - int exposureIndex = images.size() - 1; - - // for each image - const double value = images[exposureIndex](y, x)(channel); - const double time = times[exposureIndex]; - // - // weightLongestExposure: ____________ - // \_______ - // 0 1 - double w = std::max(0.001f, weightLongestExposure(value, channel)); - - const double r = response(value, channel); - - wsum += w * r / time; - wdiv += w; - } - radianceColor(channel) = wsum / std::max(0.001, wdiv) * targetCameraExposure; + for(std::size_t channel = 0; channel < 3; ++channel) + { + radianceColor(channel) = targetCameraExposure * response(meanValueHighExp, channel) / times[images.size() - 1]; + } + } + else + { + std::vector> vv_ratio; + std::vector> vv_coeff; + std::vector> vv_coeff_filt; + std::vector v_sumCoeff; + std::vector> vv_normalizedValue; + + // Per channel, and per exposition compute a mixing coefficient and the ratio between linearized values at the next and the current exposure. + // Keep the coeffcient if the computed ratio is in the predefined range and if the linearized input value is significant enough (higher than a threshold). + // To deal with highlights, keep the shortest exposure if the second one is saturated. + + for(std::size_t channel = 0; channel < 3; ++channel) + { + std::vector v_ratio; + std::vector v_coeff; + std::vector v_coeff_filt; + std::vector v_normalizedValue; + + { + const double value = response(images[0](y, x)(channel), channel); + const double ratio = (value > 0.0) ? response(images[1](y, x)(channel), channel) / value : 0.0; + const double normalizedValue = value / times[0]; + double coeff = std::max(0.001f, weightShortestExposure(images[0](y, x)(channel), channel)); + + const bool coeffOK = (value > minValue && ratio > v_minRatio[0] && ratio < v_maxRatio[0]) || + response(images[1](y, x)(channel), channel) > maxValue; + + v_normalizedValue.push_back(normalizedValue); + v_ratio.push_back(ratio); + v_coeff.push_back(coeff); + v_coeff_filt.push_back(coeffOK ? coeff : 0.0); + } + + for(std::size_t e = 1; e < images.size() - 1; e++) + { + const double value = response(images[e](y, x)(channel), channel); + const double normalizedValue = value / times[e]; + const double ratio = (value > 0.0) ? response(images[e + 1](y, x)(channel), channel) / value : 0.0; + double coeff = std::max(0.001f, weight(value, channel)); + + const bool coeffOK = (value > minValue && ratio > v_minRatio[e] && ratio < v_maxRatio[e]); + + v_normalizedValue.push_back(normalizedValue); + v_ratio.push_back(ratio); + v_coeff.push_back(coeff); + v_coeff_filt.push_back(coeffOK ? coeff : 0.0); + } + + { + const double value = response(images[images.size() - 1](y, x)(channel), channel); + const double ratio = v_ratio.back(); + const double normalizedValue = value / times[images.size() - 1]; + double coeff = std::max( + 0.001f, + weightLongestExposure(response(images[images.size() - 1](y, x)(channel), channel), channel)); + + const bool coeffOK = (value < maxValue && ratio > v_minRatio[images.size() - 1] && + ratio < v_maxRatio[images.size() - 1]); + + v_normalizedValue.push_back(normalizedValue); + //v_ratio.push_back(ratio); + v_coeff.push_back(coeff); + v_coeff_filt.push_back(coeffOK ? coeff : 0.0); + } + + //vv_ratio.push_back(v_ratio); + vv_coeff.push_back(v_coeff); + vv_coeff_filt.push_back(v_coeff_filt); + vv_normalizedValue.push_back(v_normalizedValue); + } + + std::vector> vv_coeff_final = vv_coeff_filt; + v_sumCoeff = {0.0, 0.0, 0.0}; + + // Per exposure and per channel, + // If the coeff has been discarded for the current channel but is valid for at least one of the two other channels, restore it's original value. + // Per channel, sum the valid coefficients. + for(std::size_t e = 0; e < images.size(); e++) + { + for(std::size_t channel = 0; channel < 3; ++channel) + { + if(vv_coeff_final[channel][e] == 0.0 && + (vv_coeff_filt[(channel + 1) % 3][e] != 0.0 || vv_coeff_filt[(channel + 2) % 3][e] != 0.0)) + { + vv_coeff_final[channel][e] = vv_coeff[channel][e]; + } + v_sumCoeff[channel] += vv_coeff_final[channel][e]; + } + } + + // Per channel, if the sum of the coefficients is null, restore the coefficient corresponding to the reference image. + // Due to the previous step, if the sum of the coefficients is null for a given channel it should be also null for the two other channels. + if(v_sumCoeff[0] == 0.0) + { + for(std::size_t channel = 0; channel < 3; ++channel) + { + vv_coeff_final[channel][refImageIndex] = 1.0; + v_sumCoeff[channel] = 1.0; + } + } + + // Compute the final result and adjust the exposure to the reference one. + for(std::size_t channel = 0; channel < 3; ++channel) + { + double v = 0.0; + for(std::size_t i = 0; i < images.size(); ++i) + { + v += vv_coeff_final[channel][i] * vv_normalizedValue[channel][i]; + } + radianceColor(channel) = targetCameraExposure * (v != 0.0 ? v : vv_normalizedValue[channel][refImageIndex]) / v_sumCoeff[channel]; + } } } } @@ -166,8 +289,8 @@ void hdrMerge::postProcessHighlight(const std::vector< image::Image isPixelClamped(width, height); #pragma omp parallel for diff --git a/src/aliceVision/hdr/hdrMerge.hpp b/src/aliceVision/hdr/hdrMerge.hpp index ad6035f7c5..f36192f550 100644 --- a/src/aliceVision/hdr/hdrMerge.hpp +++ b/src/aliceVision/hdr/hdrMerge.hpp @@ -29,7 +29,8 @@ class hdrMerge { const rgbCurve &weight, const rgbCurve &response, image::Image &radiance, - float targetCameraExposure); + float targetCameraExposure, + int refImageIndex); void postProcessHighlight(const std::vector< image::Image > &images, const std::vector ×, diff --git a/src/software/pipeline/main_LdrToHdrMerge.cpp b/src/software/pipeline/main_LdrToHdrMerge.cpp index 346ee04b91..3548f3d312 100644 --- a/src/software/pipeline/main_LdrToHdrMerge.cpp +++ b/src/software/pipeline/main_LdrToHdrMerge.cpp @@ -64,7 +64,7 @@ int aliceVision_main(int argc, char** argv) int channelQuantizationPower = 10; int offsetRefBracketIndex = 1000; // By default, use the automatic selection double meanTargetedLumaForMerging = 0.4; - double minLumaForMerging = 0.25; + double minLumaForMerging = 0.0; image::EImageColorSpace workingColorSpace = image::EImageColorSpace::SRGB; hdr::EFunctionType fusionWeightFunction = hdr::EFunctionType::GAUSSIAN; @@ -198,6 +198,7 @@ int aliceVision_main(int argc, char** argv) } } std::vector> targetViews; + int estimatedTargetIndex; if (!byPass) { @@ -217,9 +218,9 @@ int aliceVision_main(int argc, char** argv) if (workingColorSpace != image::EImageColorSpace::SRGB) { meanTargetedLumaForMerging = std::pow((meanTargetedLumaForMerging + 0.055) / 1.055, 2.2); - minLumaForMerging = std::pow((minLumaForMerging + 0.055) / 1.055, 2.2); + minLumaForMerging = minLumaForMerging == 0.0 ? 0.0 : std::pow((minLumaForMerging + 0.055) / 1.055, 2.2); } - hdr::selectTargetViews(targetViews, groupedViews, offsetRefBracketIndex, lumaStatFilepath.string(), meanTargetedLumaForMerging, minLumaForMerging); + estimatedTargetIndex = hdr::selectTargetViews(targetViews, groupedViews, offsetRefBracketIndex, lumaStatFilepath.string(), meanTargetedLumaForMerging, minLumaForMerging); if ((targetViews.empty() || targetViews.size() != groupedViews.size()) && !isOffsetRefBracketIndexValid) { @@ -346,7 +347,7 @@ int aliceVision_main(int argc, char** argv) hdr::hdrMerge merge; sfmData::ExposureSetting targetCameraSetting = targetView->getCameraExposureSetting(); ALICEVISION_LOG_INFO("[" << g - rangeStart << "/" << rangeSize << "] Merge " << group.size() << " LDR images " << g << "/" << groupedViews.size()); - merge.process(images, exposures, fusionWeight, response, HDRimage, targetCameraSetting.getExposure()); + merge.process(images, exposures, fusionWeight, response, HDRimage, targetCameraSetting.getExposure(), estimatedTargetIndex); if(highlightCorrectionFactor > 0.0f) { merge.postProcessHighlight(images, exposures, fusionWeight, response, HDRimage, targetCameraSetting.getExposure(), highlightCorrectionFactor, highlightTargetLux); @@ -358,6 +359,35 @@ int aliceVision_main(int argc, char** argv) HDRimage = images[0]; } + //for (int x = 252; x < 7000; x += 1000) + //{ + // for (int y = 750; y < 4000; y += 1000) + // { + // for(int l = x - 10; l <= x + 11; l++) + // { + // image::RGBfColor& pix1 = HDRimage(y - 10, l); + // pix1[0] = 1.f; + // pix1[1] = 0.f; + // pix1[2] = 0.f; + // image::RGBfColor& pix2 = HDRimage(y + 11, l); + // pix2[0] = 1.f; + // pix2[1] = 0.f; + // pix2[2] = 0.f; + // } + // for(int c = y - 10; c <= y + 11; c++) + // { + // image::RGBfColor& pix1 = HDRimage(c, x - 10); + // pix1[0] = 1.f; + // pix1[1] = 0.f; + // pix1[2] = 0.f; + // image::RGBfColor& pix2 = HDRimage(c, x + 11); + // pix2[0] = 1.f; + // pix2[1] = 0.f; + // pix2[2] = 0.f; + // } + // } + //} + boost::filesystem::path p(targetView->getImagePath()); const std::string hdrImagePath = getHdrImagePath(outputPath, g, keepSourceImageName ? p.stem().string() : ""); From 73fc65f9774ce635980084c6a8ae59319ed0c310 Mon Sep 17 00:00:00 2001 From: demoulinv Date: Mon, 19 Jun 2023 14:52:20 +0200 Subject: [PATCH 3/6] [HDR merging] Add option and parameters dedicated to advanced HDR merging. --- src/aliceVision/hdr/hdrMerge.cpp | 27 ++++++++++---------- src/aliceVision/hdr/hdrMerge.hpp | 20 ++++++++++----- src/software/pipeline/main_LdrToHdrMerge.cpp | 26 ++++++++++++++++++- 3 files changed, 52 insertions(+), 21 deletions(-) diff --git a/src/aliceVision/hdr/hdrMerge.cpp b/src/aliceVision/hdr/hdrMerge.cpp index 3b5d1030e2..c90ba01dd0 100644 --- a/src/aliceVision/hdr/hdrMerge.cpp +++ b/src/aliceVision/hdr/hdrMerge.cpp @@ -47,8 +47,7 @@ void hdrMerge::process(const std::vector< image::Image > &imag const rgbCurve &weight, const rgbCurve &response, image::Image &radiance, - float targetCameraExposure, - int refImageIndex) + MergingParams &mergingParams) { //checks assert(!response.isEmpty()); @@ -83,17 +82,19 @@ void hdrMerge::process(const std::vector< image::Image > &imag // Compute a ratio between the next and the current exposure values. // Deduce a range of ratii that will be used for enabling input data. // Duplicate the last range limits to associate it with the longuest exposure - for (std::size_t i = 0; i < times.size() - 1; i++) + const double minTolerance = (mergingParams.dataRatioTolerance < 0.0) ? 1.0 : mergingParams.dataRatioTolerance; + const double maxTolerance = (mergingParams.dataRatioTolerance < 0.0) ? 1000.0 : mergingParams.dataRatioTolerance; + for(std::size_t i = 0; i < times.size() - 1; i++) { const double refRatio = times[i + 1] / times[i]; - v_minRatio.push_back(0.25 * refRatio); - v_maxRatio.push_back(1.75 * refRatio); + v_minRatio.push_back((1.0 - minTolerance) * refRatio); + v_maxRatio.push_back((1.0 + maxTolerance) * refRatio); } v_minRatio.push_back(v_minRatio.back()); v_maxRatio.push_back(v_maxRatio.back()); - const double minValue = 0.05; - const double maxValue = 0.999; + const double minValue = mergingParams.minSignificantValue; + const double maxValue = mergingParams.maxSignificantValue; #pragma omp parallel for for(int y = 0; y < height; ++y) @@ -140,13 +141,11 @@ void hdrMerge::process(const std::vector< image::Image > &imag images[images.size() - 1](y, x)(2)) / 3.0; - const double noiseThreshold = 0.1; - - if(meanValueHighExp < noiseThreshold) // Noise case + if(meanValueHighExp < mergingParams.noiseThreshold) // Noise case { for(std::size_t channel = 0; channel < 3; ++channel) { - radianceColor(channel) = targetCameraExposure * response(meanValueHighExp, channel) / times[images.size() - 1]; + radianceColor(channel) = mergingParams.targetCameraExposure * response(meanValueHighExp, channel) / times[images.size() - 1]; } } else @@ -246,7 +245,7 @@ void hdrMerge::process(const std::vector< image::Image > &imag { for(std::size_t channel = 0; channel < 3; ++channel) { - vv_coeff_final[channel][refImageIndex] = 1.0; + vv_coeff_final[channel][mergingParams.refImageIndex] = 1.0; v_sumCoeff[channel] = 1.0; } } @@ -259,7 +258,9 @@ void hdrMerge::process(const std::vector< image::Image > &imag { v += vv_coeff_final[channel][i] * vv_normalizedValue[channel][i]; } - radianceColor(channel) = targetCameraExposure * (v != 0.0 ? v : vv_normalizedValue[channel][refImageIndex]) / v_sumCoeff[channel]; + radianceColor(channel) = mergingParams.targetCameraExposure * + (v != 0.0 ? v : vv_normalizedValue[channel][mergingParams.refImageIndex]) / + v_sumCoeff[channel]; } } } diff --git a/src/aliceVision/hdr/hdrMerge.hpp b/src/aliceVision/hdr/hdrMerge.hpp index f36192f550..fc800bee3c 100644 --- a/src/aliceVision/hdr/hdrMerge.hpp +++ b/src/aliceVision/hdr/hdrMerge.hpp @@ -12,6 +12,16 @@ namespace aliceVision { namespace hdr { + +struct MergingParams +{ + double minSignificantValue = 0.05; + double maxSignificantValue = 0.999; + double dataRatioTolerance = 0.75; // +/- 75% + double noiseThreshold = 0.1; + float targetCameraExposure; + int refImageIndex; +}; class hdrMerge { public: @@ -24,13 +34,9 @@ class hdrMerge { * @param targetCameraExposure * @param response */ - void process(const std::vector< image::Image > &images, - const std::vector ×, - const rgbCurve &weight, - const rgbCurve &response, - image::Image &radiance, - float targetCameraExposure, - int refImageIndex); + void process(const std::vector>& images, const std::vector& times, + const rgbCurve& weight, const rgbCurve& response, image::Image& radiance, + MergingParams& mergingParams); void postProcessHighlight(const std::vector< image::Image > &images, const std::vector ×, diff --git a/src/software/pipeline/main_LdrToHdrMerge.cpp b/src/software/pipeline/main_LdrToHdrMerge.cpp index 3548f3d312..449ecfb16f 100644 --- a/src/software/pipeline/main_LdrToHdrMerge.cpp +++ b/src/software/pipeline/main_LdrToHdrMerge.cpp @@ -65,6 +65,11 @@ int aliceVision_main(int argc, char** argv) int offsetRefBracketIndex = 1000; // By default, use the automatic selection double meanTargetedLumaForMerging = 0.4; double minLumaForMerging = 0.0; + bool enablePixelwiseAdvancedMerging = true; + double noiseThreshold = 0.1; + double minSignificantValue = 0.05; + double maxSignificantValue = 0.999; + double toleranceOnRatio = 0.75; image::EImageColorSpace workingColorSpace = image::EImageColorSpace::SRGB; hdr::EFunctionType fusionWeightFunction = hdr::EFunctionType::GAUSSIAN; @@ -106,6 +111,16 @@ int aliceVision_main(int argc, char** argv) "Mean expected luminance after merging step when input LDR images are decoded in sRGB color space. Must be in the range [0, 1].") ("minLumaForMerging", po::value(&minLumaForMerging)->default_value(minLumaForMerging), "Minimum mean luminance of LDR images for merging. Must be in the range [0, 1].") + ("enablePixelwiseAdvancedMerging", po::value(&enablePixelwiseAdvancedMerging)->default_value(enablePixelwiseAdvancedMerging), + "Enable pixelwise advanced merging to reduce noise.") + ("noiseThreshold", po::value(&noiseThreshold)->default_value(noiseThreshold), + "Value under which input channel value is considered as noise. Used in advanced pixelwise merging.") + ("minSignificantValue", po::value(&minSignificantValue)->default_value(minSignificantValue), + "Minimum channel input value to be considered in advanced pixelwise merging. Used in advanced pixelwise merging.") + ("maxSignificantValue", po::value(&maxSignificantValue)->default_value(maxSignificantValue), + "Maximum channel input value to be considered in advanced pixelwise merging. Used in advanced pixelwise merging.") + ("toleranceOnRatio", po::value(&toleranceOnRatio)->default_value(toleranceOnRatio), + "Tolerance on ratio between two input channel values at two consecutive exposures. Used in advanced pixelwise merging.") ("highlightTargetLux", po::value(&highlightTargetLux)->default_value(highlightTargetLux), "Highlights maximum luminance.") ("highlightCorrectionFactor", po::value(&highlightCorrectionFactor)->default_value(highlightCorrectionFactor), @@ -347,7 +362,16 @@ int aliceVision_main(int argc, char** argv) hdr::hdrMerge merge; sfmData::ExposureSetting targetCameraSetting = targetView->getCameraExposureSetting(); ALICEVISION_LOG_INFO("[" << g - rangeStart << "/" << rangeSize << "] Merge " << group.size() << " LDR images " << g << "/" << groupedViews.size()); - merge.process(images, exposures, fusionWeight, response, HDRimage, targetCameraSetting.getExposure(), estimatedTargetIndex); + + hdr::MergingParams mergingParams; + mergingParams.targetCameraExposure = targetCameraSetting.getExposure(); + mergingParams.refImageIndex = estimatedTargetIndex; + mergingParams.noiseThreshold = enablePixelwiseAdvancedMerging ? noiseThreshold : -1.0; + mergingParams.minSignificantValue = enablePixelwiseAdvancedMerging ? minSignificantValue : -1.0; + mergingParams.maxSignificantValue = enablePixelwiseAdvancedMerging ? maxSignificantValue : 1000.0; + mergingParams.dataRatioTolerance = enablePixelwiseAdvancedMerging ? toleranceOnRatio : -1.0; + + merge.process(images, exposures, fusionWeight, response, HDRimage, mergingParams);//, targetCameraSetting.getExposure(), estimatedTargetIndex); if(highlightCorrectionFactor > 0.0f) { merge.postProcessHighlight(images, exposures, fusionWeight, response, HDRimage, targetCameraSetting.getExposure(), highlightCorrectionFactor, highlightTargetLux); From fbe367405c1386f99b76954bb154f95ca3dd9a92 Mon Sep 17 00:00:00 2001 From: demoulinv Date: Tue, 27 Jun 2023 11:03:10 +0200 Subject: [PATCH 4/6] [HDR] Merging algorithm simplification. --- src/aliceVision/hdr/brackets.cpp | 20 +-- src/aliceVision/hdr/brackets.hpp | 3 +- src/aliceVision/hdr/hdrMerge.cpp | 161 +++++++++---------- src/aliceVision/hdr/hdrMerge.hpp | 5 +- src/software/pipeline/main_LdrToHdrMerge.cpp | 66 +++++--- 5 files changed, 130 insertions(+), 125 deletions(-) diff --git a/src/aliceVision/hdr/brackets.cpp b/src/aliceVision/hdr/brackets.cpp index b8972606d6..c1aa424192 100644 --- a/src/aliceVision/hdr/brackets.cpp +++ b/src/aliceVision/hdr/brackets.cpp @@ -129,7 +129,7 @@ bool estimateBracketsFromSfmData(std::vector>& out_targetViews, std::vector>>& groups, int offsetRefBracketIndex, - const std::string& lumaStatFilepath, const double meanTargetedLuma, const double minLuma) + const std::string& lumaStatFilepath, const double meanTargetedLuma) { // If targetIndexesFilename cannot be opened or is not valid an error is thrown // For odd number, there is no ambiguity on the middle image. @@ -198,7 +198,6 @@ int selectTargetViews(std::vector>& out_targetVie double minDiffWithLumaTarget = 1000.0; targetIndex = 0; - int firstValidIndex = 0; // A valid index corresponds to a mean luminance higher than minLuma for (int k = 0; k < lastIdx; ++k) { @@ -208,25 +207,8 @@ int selectTargetViews(std::vector>& out_targetVie minDiffWithLumaTarget = diffWithLumaTarget; targetIndex = k; } - if (v_lumaMeanMean[k] < minLuma) - { - ++firstValidIndex; - } } ALICEVISION_LOG_INFO("offsetRefBracketIndex parameter automaticaly set to " << targetIndex - middleIndex); - - firstValidIndex = std::min(firstValidIndex, targetIndex - 1); - - ALICEVISION_LOG_INFO("Index of first image to be considered for merging: " << firstValidIndex); - - if (firstValidIndex > 0) - { - for (auto& group : groups) - { - group.erase(group.begin(), group.begin() + firstValidIndex); - } - targetIndex -= firstValidIndex; - } } for (auto& group : groups) diff --git a/src/aliceVision/hdr/brackets.hpp b/src/aliceVision/hdr/brackets.hpp index aae0c9477f..ee483c4345 100644 --- a/src/aliceVision/hdr/brackets.hpp +++ b/src/aliceVision/hdr/brackets.hpp @@ -97,8 +97,7 @@ bool estimateBracketsFromSfmData(std::vector>& out_targetViews, std::vector>>& groups, int offsetRefBracketIndex, - const std::string& targetIndexesFilename = "", const double meanTargetedLuma = 0.4, - const double minLuma = 0.25); + const std::string& targetIndexesFilename = "", const double meanTargetedLuma = 0.4); } } diff --git a/src/aliceVision/hdr/hdrMerge.cpp b/src/aliceVision/hdr/hdrMerge.cpp index c90ba01dd0..118b4e20e1 100644 --- a/src/aliceVision/hdr/hdrMerge.cpp +++ b/src/aliceVision/hdr/hdrMerge.cpp @@ -47,6 +47,9 @@ void hdrMerge::process(const std::vector< image::Image > &imag const rgbCurve &weight, const rgbCurve &response, image::Image &radiance, + image::Image &lowLight, + image::Image &highLight, + image::Image &noMidLight, MergingParams &mergingParams) { //checks @@ -72,39 +75,28 @@ void hdrMerge::process(const std::vector< image::Image > &imag rgbCurve weightLongestExposure = weight; weightLongestExposure.freezeFirstPartValues(); - const std::string mergeInfoFilename = "C:/Temp/mergeInfo.csv"; - std::ofstream file(mergeInfoFilename); - - std::vector v_minRatio; - std::vector v_maxRatio; - - // For each exposure except the longuest, - // Compute a ratio between the next and the current exposure values. - // Deduce a range of ratii that will be used for enabling input data. - // Duplicate the last range limits to associate it with the longuest exposure - const double minTolerance = (mergingParams.dataRatioTolerance < 0.0) ? 1.0 : mergingParams.dataRatioTolerance; - const double maxTolerance = (mergingParams.dataRatioTolerance < 0.0) ? 1000.0 : mergingParams.dataRatioTolerance; - for(std::size_t i = 0; i < times.size() - 1; i++) - { - const double refRatio = times[i + 1] / times[i]; - v_minRatio.push_back((1.0 - minTolerance) * refRatio); - v_maxRatio.push_back((1.0 + maxTolerance) * refRatio); - } - v_minRatio.push_back(v_minRatio.back()); - v_maxRatio.push_back(v_maxRatio.back()); + //const std::string mergeInfoFilename = "C:/Temp/mergeInfo.csv"; + //std::ofstream file(mergeInfoFilename); const double minValue = mergingParams.minSignificantValue; const double maxValue = mergingParams.maxSignificantValue; + highLight.resize(width, height, true, image::RGBfColor(0.f, 0.f, 0.f)); + lowLight.resize(width, height, true, image::RGBfColor(0.f, 0.f, 0.f)); + noMidLight.resize(width, height, true, image::RGBfColor(0.f, 0.f, 0.f)); + #pragma omp parallel for for(int y = 0; y < height; ++y) { for(int x = 0; x < width; ++x) { //for each pixels - image::RGBfColor &radianceColor = radiance(y, x); + image::RGBfColor& radianceColor = radiance(y, x); + image::RGBfColor& highLightColor = highLight(y, x); + image::RGBfColor& lowLightColor = lowLight(y, x); + image::RGBfColor& noMidLightColor = noMidLight(y, x); - //if ((x%1000 == 780) && (y%1000 == 446)) + //if ((x%1000 == 650) && (y%1000 == 850)) //{ // if(!file) // { @@ -146,121 +138,124 @@ void hdrMerge::process(const std::vector< image::Image > &imag for(std::size_t channel = 0; channel < 3; ++channel) { radianceColor(channel) = mergingParams.targetCameraExposure * response(meanValueHighExp, channel) / times[images.size() - 1]; + highLightColor(channel) = 0.0; + lowLightColor(channel) = 1.0; + noMidLightColor(channel) = 0.0; } } else { - std::vector> vv_ratio; std::vector> vv_coeff; - std::vector> vv_coeff_filt; - std::vector v_sumCoeff; + std::vector> vv_value; std::vector> vv_normalizedValue; - // Per channel, and per exposition compute a mixing coefficient and the ratio between linearized values at the next and the current exposure. - // Keep the coeffcient if the computed ratio is in the predefined range and if the linearized input value is significant enough (higher than a threshold). - // To deal with highlights, keep the shortest exposure if the second one is saturated. + // Compute merging range + std::vector v_firstIndex; + std::vector v_lastIndex; + for(std::size_t channel = 0; channel < 3; ++channel) + { + int firstIndex = mergingParams.refImageIndex; + while(firstIndex > 0 && + (response(images[firstIndex](y, x)(channel), channel) > 0.05 || firstIndex == images.size() - 1)) + { + firstIndex--; + } + v_firstIndex.push_back(firstIndex); + + int lastIndex = v_firstIndex[channel] + 1; + while(lastIndex < images.size() - 1 && response(images[lastIndex](y, x)(channel), channel) < 0.995) + { + lastIndex++; + } + v_lastIndex.push_back(lastIndex); + } + // Compute merging coeffs and values to be merged for(std::size_t channel = 0; channel < 3; ++channel) { - std::vector v_ratio; std::vector v_coeff; - std::vector v_coeff_filt; std::vector v_normalizedValue; + std::vector v_value; { const double value = response(images[0](y, x)(channel), channel); - const double ratio = (value > 0.0) ? response(images[1](y, x)(channel), channel) / value : 0.0; const double normalizedValue = value / times[0]; - double coeff = std::max(0.001f, weightShortestExposure(images[0](y, x)(channel), channel)); - - const bool coeffOK = (value > minValue && ratio > v_minRatio[0] && ratio < v_maxRatio[0]) || - response(images[1](y, x)(channel), channel) > maxValue; + double coeff = std::max(0.001f, weightShortestExposure(value, channel)); + v_value.push_back(value); v_normalizedValue.push_back(normalizedValue); - v_ratio.push_back(ratio); v_coeff.push_back(coeff); - v_coeff_filt.push_back(coeffOK ? coeff : 0.0); } for(std::size_t e = 1; e < images.size() - 1; e++) { const double value = response(images[e](y, x)(channel), channel); const double normalizedValue = value / times[e]; - const double ratio = (value > 0.0) ? response(images[e + 1](y, x)(channel), channel) / value : 0.0; double coeff = std::max(0.001f, weight(value, channel)); - const bool coeffOK = (value > minValue && ratio > v_minRatio[e] && ratio < v_maxRatio[e]); - + v_value.push_back(value); v_normalizedValue.push_back(normalizedValue); - v_ratio.push_back(ratio); v_coeff.push_back(coeff); - v_coeff_filt.push_back(coeffOK ? coeff : 0.0); } { const double value = response(images[images.size() - 1](y, x)(channel), channel); - const double ratio = v_ratio.back(); const double normalizedValue = value / times[images.size() - 1]; - double coeff = std::max( - 0.001f, - weightLongestExposure(response(images[images.size() - 1](y, x)(channel), channel), channel)); - - const bool coeffOK = (value < maxValue && ratio > v_minRatio[images.size() - 1] && - ratio < v_maxRatio[images.size() - 1]); + double coeff = std::max(0.001f, weightLongestExposure(value, channel)); + v_value.push_back(value); v_normalizedValue.push_back(normalizedValue); - //v_ratio.push_back(ratio); v_coeff.push_back(coeff); - v_coeff_filt.push_back(coeffOK ? coeff : 0.0); } - //vv_ratio.push_back(v_ratio); vv_coeff.push_back(v_coeff); - vv_coeff_filt.push_back(v_coeff_filt); vv_normalizedValue.push_back(v_normalizedValue); + vv_value.push_back(v_value); } - std::vector> vv_coeff_final = vv_coeff_filt; - v_sumCoeff = {0.0, 0.0, 0.0}; - - // Per exposure and per channel, - // If the coeff has been discarded for the current channel but is valid for at least one of the two other channels, restore it's original value. - // Per channel, sum the valid coefficients. - for(std::size_t e = 0; e < images.size(); e++) + // Compute light masks if required (monitoring and debug purposes) + if(mergingParams.computeLightMasks) { for(std::size_t channel = 0; channel < 3; ++channel) { - if(vv_coeff_final[channel][e] == 0.0 && - (vv_coeff_filt[(channel + 1) % 3][e] != 0.0 || vv_coeff_filt[(channel + 2) % 3][e] != 0.0)) + int idxMaxValue = 0; + int idxMinValue = 0; + double maxValue = 0.0; + double minValue = 10000.0; + bool jump = true; + for(std::size_t e = 0; e < images.size(); ++e) { - vv_coeff_final[channel][e] = vv_coeff[channel][e]; + if(vv_value[channel][e] > maxValue) + { + maxValue = vv_value[channel][e]; + idxMaxValue = e; + } + if(vv_value[channel][e] < minValue) + { + minValue = vv_value[channel][e]; + idxMinValue = e; + } + jump = jump && ((vv_value[channel][e] < 0.1 && e < images.size() - 1) || + (vv_value[channel][e] > 0.9 && e > 0)); } - v_sumCoeff[channel] += vv_coeff_final[channel][e]; - } - } - - // Per channel, if the sum of the coefficients is null, restore the coefficient corresponding to the reference image. - // Due to the previous step, if the sum of the coefficients is null for a given channel it should be also null for the two other channels. - if(v_sumCoeff[0] == 0.0) - { - for(std::size_t channel = 0; channel < 3; ++channel) - { - vv_coeff_final[channel][mergingParams.refImageIndex] = 1.0; - v_sumCoeff[channel] = 1.0; + highLightColor(channel) = minValue > 0.9 ? 1.0 : 0.0; + lowLightColor(channel) = maxValue < 0.1 ? 1.0 : 0.0; + noMidLightColor(channel) = jump ? 1.0 : 0.0; } } // Compute the final result and adjust the exposure to the reference one. for(std::size_t channel = 0; channel < 3; ++channel) { - double v = 0.0; - for(std::size_t i = 0; i < images.size(); ++i) - { - v += vv_coeff_final[channel][i] * vv_normalizedValue[channel][i]; - } - radianceColor(channel) = mergingParams.targetCameraExposure * - (v != 0.0 ? v : vv_normalizedValue[channel][mergingParams.refImageIndex]) / - v_sumCoeff[channel]; + double v = 0.0; + double sumCoeff = 0.0; + for(std::size_t i = v_firstIndex[channel]; i <= v_lastIndex[channel]; ++i) + { + v += vv_coeff[channel][i] * vv_normalizedValue[channel][i]; + sumCoeff += vv_coeff[channel][i]; + } + radianceColor(channel) = mergingParams.targetCameraExposure * + (sumCoeff != 0.0 ? v / sumCoeff : vv_normalizedValue[channel][mergingParams.refImageIndex]); } } } diff --git a/src/aliceVision/hdr/hdrMerge.hpp b/src/aliceVision/hdr/hdrMerge.hpp index fc800bee3c..0725cdd5f5 100644 --- a/src/aliceVision/hdr/hdrMerge.hpp +++ b/src/aliceVision/hdr/hdrMerge.hpp @@ -16,11 +16,11 @@ namespace hdr { struct MergingParams { double minSignificantValue = 0.05; - double maxSignificantValue = 0.999; - double dataRatioTolerance = 0.75; // +/- 75% + double maxSignificantValue = 0.995; double noiseThreshold = 0.1; float targetCameraExposure; int refImageIndex; + bool computeLightMasks = false; }; class hdrMerge { @@ -36,6 +36,7 @@ class hdrMerge { */ void process(const std::vector>& images, const std::vector& times, const rgbCurve& weight, const rgbCurve& response, image::Image& radiance, + image::Image& lowLight, image::Image& highLight, image::Image& noMidLight, MergingParams& mergingParams); void postProcessHighlight(const std::vector< image::Image > &images, diff --git a/src/software/pipeline/main_LdrToHdrMerge.cpp b/src/software/pipeline/main_LdrToHdrMerge.cpp index 449ecfb16f..4416779ccd 100644 --- a/src/software/pipeline/main_LdrToHdrMerge.cpp +++ b/src/software/pipeline/main_LdrToHdrMerge.cpp @@ -36,11 +36,11 @@ using namespace aliceVision; namespace po = boost::program_options; namespace fs = boost::filesystem; -std::string getHdrImagePath(const std::string& outputPath, std::size_t g, const std::string& rootname="") +std::string getHdrImagePath(const std::string& outputPath, std::size_t g, const std::string& rootname = "") { // Output image file path std::stringstream sstream; - if (rootname == "") + if(rootname == "") { sstream << "hdr_" << std::setfill('0') << std::setw(4) << g << ".exr"; } @@ -52,6 +52,21 @@ std::string getHdrImagePath(const std::string& outputPath, std::size_t g, const return hdrImagePath; } +std::string getHdrMaskPath(const std::string& outputPath, std::size_t g, const std::string& maskname, const std::string& rootname = "") +{ + // Output image file path + std::stringstream sstream; + if(rootname == "") + { + sstream << "hdrMask_" << maskname << "_" << std::setfill('0') << std::setw(4) << g << ".exr"; + } + else + { + sstream << rootname << "_" << maskname << ".exr"; + } + const std::string hdrImagePath = (fs::path(outputPath) / sstream.str()).string(); + return hdrImagePath; +} int aliceVision_main(int argc, char** argv) { @@ -64,12 +79,10 @@ int aliceVision_main(int argc, char** argv) int channelQuantizationPower = 10; int offsetRefBracketIndex = 1000; // By default, use the automatic selection double meanTargetedLumaForMerging = 0.4; - double minLumaForMerging = 0.0; - bool enablePixelwiseAdvancedMerging = true; double noiseThreshold = 0.1; double minSignificantValue = 0.05; - double maxSignificantValue = 0.999; - double toleranceOnRatio = 0.75; + double maxSignificantValue = 0.995; + bool computeLightMasks = false; image::EImageColorSpace workingColorSpace = image::EImageColorSpace::SRGB; hdr::EFunctionType fusionWeightFunction = hdr::EFunctionType::GAUSSIAN; @@ -109,18 +122,14 @@ int aliceVision_main(int argc, char** argv) "Zero to use the center bracket. +N to use a more exposed bracket or -N to use a less exposed backet.") ("meanTargetedLumaForMerging", po::value(&meanTargetedLumaForMerging)->default_value(meanTargetedLumaForMerging), "Mean expected luminance after merging step when input LDR images are decoded in sRGB color space. Must be in the range [0, 1].") - ("minLumaForMerging", po::value(&minLumaForMerging)->default_value(minLumaForMerging), - "Minimum mean luminance of LDR images for merging. Must be in the range [0, 1].") - ("enablePixelwiseAdvancedMerging", po::value(&enablePixelwiseAdvancedMerging)->default_value(enablePixelwiseAdvancedMerging), - "Enable pixelwise advanced merging to reduce noise.") ("noiseThreshold", po::value(&noiseThreshold)->default_value(noiseThreshold), "Value under which input channel value is considered as noise. Used in advanced pixelwise merging.") ("minSignificantValue", po::value(&minSignificantValue)->default_value(minSignificantValue), "Minimum channel input value to be considered in advanced pixelwise merging. Used in advanced pixelwise merging.") ("maxSignificantValue", po::value(&maxSignificantValue)->default_value(maxSignificantValue), "Maximum channel input value to be considered in advanced pixelwise merging. Used in advanced pixelwise merging.") - ("toleranceOnRatio", po::value(&toleranceOnRatio)->default_value(toleranceOnRatio), - "Tolerance on ratio between two input channel values at two consecutive exposures. Used in advanced pixelwise merging.") + ("computeLightMasks", po::value(&computeLightMasks)->default_value(computeLightMasks), + "Compute masks of dark and high lights and missing mid lights info.") ("highlightTargetLux", po::value(&highlightTargetLux)->default_value(highlightTargetLux), "Highlights maximum luminance.") ("highlightCorrectionFactor", po::value(&highlightCorrectionFactor)->default_value(highlightCorrectionFactor), @@ -233,9 +242,8 @@ int aliceVision_main(int argc, char** argv) if (workingColorSpace != image::EImageColorSpace::SRGB) { meanTargetedLumaForMerging = std::pow((meanTargetedLumaForMerging + 0.055) / 1.055, 2.2); - minLumaForMerging = minLumaForMerging == 0.0 ? 0.0 : std::pow((minLumaForMerging + 0.055) / 1.055, 2.2); } - estimatedTargetIndex = hdr::selectTargetViews(targetViews, groupedViews, offsetRefBracketIndex, lumaStatFilepath.string(), meanTargetedLumaForMerging, minLumaForMerging); + estimatedTargetIndex = hdr::selectTargetViews(targetViews, groupedViews, offsetRefBracketIndex, lumaStatFilepath.string(), meanTargetedLumaForMerging); if ((targetViews.empty() || targetViews.size() != groupedViews.size()) && !isOffsetRefBracketIndexValid) { @@ -357,6 +365,9 @@ int aliceVision_main(int argc, char** argv) // Merge HDR images image::Image HDRimage; + image::Image lowLightMask; + image::Image highLightMask; + image::Image noMidLightMask; if(images.size() > 1) { hdr::hdrMerge merge; @@ -366,12 +377,12 @@ int aliceVision_main(int argc, char** argv) hdr::MergingParams mergingParams; mergingParams.targetCameraExposure = targetCameraSetting.getExposure(); mergingParams.refImageIndex = estimatedTargetIndex; - mergingParams.noiseThreshold = enablePixelwiseAdvancedMerging ? noiseThreshold : -1.0; - mergingParams.minSignificantValue = enablePixelwiseAdvancedMerging ? minSignificantValue : -1.0; - mergingParams.maxSignificantValue = enablePixelwiseAdvancedMerging ? maxSignificantValue : 1000.0; - mergingParams.dataRatioTolerance = enablePixelwiseAdvancedMerging ? toleranceOnRatio : -1.0; + mergingParams.noiseThreshold = noiseThreshold; + mergingParams.minSignificantValue = minSignificantValue; + mergingParams.maxSignificantValue = maxSignificantValue; + mergingParams.computeLightMasks = computeLightMasks; - merge.process(images, exposures, fusionWeight, response, HDRimage, mergingParams);//, targetCameraSetting.getExposure(), estimatedTargetIndex); + merge.process(images, exposures, fusionWeight, response, HDRimage, lowLightMask, highLightMask, noMidLightMask, mergingParams);//, targetCameraSetting.getExposure(), estimatedTargetIndex); if(highlightCorrectionFactor > 0.0f) { merge.postProcessHighlight(images, exposures, fusionWeight, response, HDRimage, targetCameraSetting.getExposure(), highlightCorrectionFactor, highlightTargetLux); @@ -439,6 +450,23 @@ int aliceVision_main(int argc, char** argv) writeOptions.storageDataType(storageDataType); image::writeImage(hdrImagePath, HDRimage, writeOptions, targetMetadata); + + if(computeLightMasks) + { + const std::string hdrMaskLowLightPath = + getHdrMaskPath(outputPath, g, "lowLight", keepSourceImageName ? p.stem().string() : ""); + const std::string hdrMaskHighLightPath = + getHdrMaskPath(outputPath, g, "highLight", keepSourceImageName ? p.stem().string() : ""); + const std::string hdrMaskNoMidLightPath = + getHdrMaskPath(outputPath, g, "noMidLight", keepSourceImageName ? p.stem().string() : ""); + + image::ImageWriteOptions maskWriteOptions; + maskWriteOptions.exrCompressionMethod(image::EImageExrCompression::None); + + image::writeImage(hdrMaskLowLightPath, lowLightMask, maskWriteOptions); + image::writeImage(hdrMaskHighLightPath, highLightMask, maskWriteOptions); + image::writeImage(hdrMaskNoMidLightPath, noMidLightMask, maskWriteOptions); + } } return EXIT_SUCCESS; From 796a992d3fc2ef126102a1c63ffe8608f2de1143 Mon Sep 17 00:00:00 2001 From: demoulinv Date: Wed, 28 Jun 2023 08:31:53 +0200 Subject: [PATCH 5/6] [HDR] merging algorithm: bugfix on min and max values. --- src/aliceVision/hdr/hdrMerge.cpp | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/aliceVision/hdr/hdrMerge.cpp b/src/aliceVision/hdr/hdrMerge.cpp index 118b4e20e1..435727812d 100644 --- a/src/aliceVision/hdr/hdrMerge.cpp +++ b/src/aliceVision/hdr/hdrMerge.cpp @@ -78,8 +78,12 @@ void hdrMerge::process(const std::vector< image::Image > &imag //const std::string mergeInfoFilename = "C:/Temp/mergeInfo.csv"; //std::ofstream file(mergeInfoFilename); - const double minValue = mergingParams.minSignificantValue; - const double maxValue = mergingParams.maxSignificantValue; + const std::vector v_minValue = {mergingParams.minSignificantValue * response(1.0, 0), + mergingParams.minSignificantValue * response(1.0, 1), + mergingParams.minSignificantValue * response(1.0, 2)}; + const std::vector v_maxValue = {mergingParams.maxSignificantValue * response(1.0, 0), + mergingParams.maxSignificantValue * response(1.0, 1), + mergingParams.maxSignificantValue * response(1.0, 2)}; highLight.resize(width, height, true, image::RGBfColor(0.f, 0.f, 0.f)); lowLight.resize(width, height, true, image::RGBfColor(0.f, 0.f, 0.f)); @@ -155,15 +159,15 @@ void hdrMerge::process(const std::vector< image::Image > &imag for(std::size_t channel = 0; channel < 3; ++channel) { int firstIndex = mergingParams.refImageIndex; - while(firstIndex > 0 && - (response(images[firstIndex](y, x)(channel), channel) > 0.05 || firstIndex == images.size() - 1)) + while(firstIndex > 0 && (response(images[firstIndex](y, x)(channel), channel) > v_minValue[channel] || + firstIndex == images.size() - 1)) { firstIndex--; } v_firstIndex.push_back(firstIndex); int lastIndex = v_firstIndex[channel] + 1; - while(lastIndex < images.size() - 1 && response(images[lastIndex](y, x)(channel), channel) < 0.995) + while(lastIndex < images.size() - 1 && response(images[lastIndex](y, x)(channel), channel) < v_maxValue[channel]) { lastIndex++; } From a95dd3c71383b139de2a7e3cead033a94fd2c7ae Mon Sep 17 00:00:00 2001 From: demoulinv Date: Wed, 28 Jun 2023 11:41:09 +0200 Subject: [PATCH 6/6] [HDR] merging algorithm: remove noise param and code cleaning --- src/aliceVision/hdr/hdrMerge.cpp | 284 +++++++------------ src/aliceVision/hdr/hdrMerge.hpp | 1 - src/software/pipeline/main_LdrToHdrMerge.cpp | 33 --- 3 files changed, 107 insertions(+), 211 deletions(-) diff --git a/src/aliceVision/hdr/hdrMerge.cpp b/src/aliceVision/hdr/hdrMerge.cpp index 435727812d..07cb0eb7f5 100644 --- a/src/aliceVision/hdr/hdrMerge.cpp +++ b/src/aliceVision/hdr/hdrMerge.cpp @@ -75,15 +75,12 @@ void hdrMerge::process(const std::vector< image::Image > &imag rgbCurve weightLongestExposure = weight; weightLongestExposure.freezeFirstPartValues(); - //const std::string mergeInfoFilename = "C:/Temp/mergeInfo.csv"; - //std::ofstream file(mergeInfoFilename); - - const std::vector v_minValue = {mergingParams.minSignificantValue * response(1.0, 0), - mergingParams.minSignificantValue * response(1.0, 1), - mergingParams.minSignificantValue * response(1.0, 2)}; - const std::vector v_maxValue = {mergingParams.maxSignificantValue * response(1.0, 0), - mergingParams.maxSignificantValue * response(1.0, 1), - mergingParams.maxSignificantValue * response(1.0, 2)}; + const std::vector v_minValue = {response(mergingParams.minSignificantValue, 0), + response(mergingParams.minSignificantValue, 1), + response(mergingParams.minSignificantValue, 2)}; + const std::vector v_maxValue = {response(mergingParams.maxSignificantValue, 0), + response(mergingParams.maxSignificantValue, 1), + response(mergingParams.maxSignificantValue, 2)}; highLight.resize(width, height, true, image::RGBfColor(0.f, 0.f, 0.f)); lowLight.resize(width, height, true, image::RGBfColor(0.f, 0.f, 0.f)); @@ -94,174 +91,107 @@ void hdrMerge::process(const std::vector< image::Image > &imag { for(int x = 0; x < width; ++x) { - //for each pixels - image::RGBfColor& radianceColor = radiance(y, x); - image::RGBfColor& highLightColor = highLight(y, x); - image::RGBfColor& lowLightColor = lowLight(y, x); - image::RGBfColor& noMidLightColor = noMidLight(y, x); - - //if ((x%1000 == 650) && (y%1000 == 850)) - //{ - // if(!file) - // { - // ALICEVISION_LOG_WARNING("Unable to create file " << mergeInfoFilename << " for storing merging info."); - // } - // else - // { - // file << x << "," << y << std::endl; - // file << "time,R,resp(R),coeff,resp(R)/time,G,resp(G),coeff,resp(G)/time,B,resp(B),coeff,resp(B)/time," << std::endl; - // for(std::size_t i = 0; i < images.size(); i++) - // { - // const double time = times[i]; - // file << time << ","; - // for(std::size_t channel = 0; channel < 3; ++channel) - // { - // const double value = images[i](y, x)(channel); - // const double r = response(value, channel); - // double w = std::max(0.001f, (i == 0) ? weightShortestExposure(value, channel) - // : (i == (images.size() - 1) ? weightLongestExposure(value, channel) - // : weight(value, channel))); - - // file << value << ","; - // file << r << ","; - // file << w << ","; - // file << r/time << ","; - // } - // file << std::endl; - // } - - // } - //} - - const double meanValueHighExp = (images[images.size() - 1](y, x)(0) + images[images.size() - 1](y, x)(1) + - images[images.size() - 1](y, x)(2)) / - 3.0; - - if(meanValueHighExp < mergingParams.noiseThreshold) // Noise case - { - for(std::size_t channel = 0; channel < 3; ++channel) - { - radianceColor(channel) = mergingParams.targetCameraExposure * response(meanValueHighExp, channel) / times[images.size() - 1]; - highLightColor(channel) = 0.0; - lowLightColor(channel) = 1.0; - noMidLightColor(channel) = 0.0; - } - } - else - { - std::vector> vv_coeff; - std::vector> vv_value; - std::vector> vv_normalizedValue; - - // Compute merging range - std::vector v_firstIndex; - std::vector v_lastIndex; - for(std::size_t channel = 0; channel < 3; ++channel) - { - int firstIndex = mergingParams.refImageIndex; - while(firstIndex > 0 && (response(images[firstIndex](y, x)(channel), channel) > v_minValue[channel] || - firstIndex == images.size() - 1)) - { - firstIndex--; - } - v_firstIndex.push_back(firstIndex); - - int lastIndex = v_firstIndex[channel] + 1; - while(lastIndex < images.size() - 1 && response(images[lastIndex](y, x)(channel), channel) < v_maxValue[channel]) - { - lastIndex++; - } - v_lastIndex.push_back(lastIndex); - } - - // Compute merging coeffs and values to be merged - for(std::size_t channel = 0; channel < 3; ++channel) - { - std::vector v_coeff; - std::vector v_normalizedValue; - std::vector v_value; - - { - const double value = response(images[0](y, x)(channel), channel); - const double normalizedValue = value / times[0]; - double coeff = std::max(0.001f, weightShortestExposure(value, channel)); - - v_value.push_back(value); - v_normalizedValue.push_back(normalizedValue); - v_coeff.push_back(coeff); - } - - for(std::size_t e = 1; e < images.size() - 1; e++) - { - const double value = response(images[e](y, x)(channel), channel); - const double normalizedValue = value / times[e]; - double coeff = std::max(0.001f, weight(value, channel)); - - v_value.push_back(value); - v_normalizedValue.push_back(normalizedValue); - v_coeff.push_back(coeff); - } - - { - const double value = response(images[images.size() - 1](y, x)(channel), channel); - const double normalizedValue = value / times[images.size() - 1]; - double coeff = std::max(0.001f, weightLongestExposure(value, channel)); - - v_value.push_back(value); - v_normalizedValue.push_back(normalizedValue); - v_coeff.push_back(coeff); - } - - vv_coeff.push_back(v_coeff); - vv_normalizedValue.push_back(v_normalizedValue); - vv_value.push_back(v_value); - } - - // Compute light masks if required (monitoring and debug purposes) - if(mergingParams.computeLightMasks) - { - for(std::size_t channel = 0; channel < 3; ++channel) - { - int idxMaxValue = 0; - int idxMinValue = 0; - double maxValue = 0.0; - double minValue = 10000.0; - bool jump = true; - for(std::size_t e = 0; e < images.size(); ++e) - { - if(vv_value[channel][e] > maxValue) - { - maxValue = vv_value[channel][e]; - idxMaxValue = e; - } - if(vv_value[channel][e] < minValue) - { - minValue = vv_value[channel][e]; - idxMinValue = e; - } - jump = jump && ((vv_value[channel][e] < 0.1 && e < images.size() - 1) || - (vv_value[channel][e] > 0.9 && e > 0)); - } - highLightColor(channel) = minValue > 0.9 ? 1.0 : 0.0; - lowLightColor(channel) = maxValue < 0.1 ? 1.0 : 0.0; - noMidLightColor(channel) = jump ? 1.0 : 0.0; - } - } - - // Compute the final result and adjust the exposure to the reference one. - for(std::size_t channel = 0; channel < 3; ++channel) - { - double v = 0.0; - double sumCoeff = 0.0; - for(std::size_t i = v_firstIndex[channel]; i <= v_lastIndex[channel]; ++i) - { - v += vv_coeff[channel][i] * vv_normalizedValue[channel][i]; - sumCoeff += vv_coeff[channel][i]; - } - radianceColor(channel) = mergingParams.targetCameraExposure * - (sumCoeff != 0.0 ? v / sumCoeff : vv_normalizedValue[channel][mergingParams.refImageIndex]); - } - } + //for each pixels + image::RGBfColor& radianceColor = radiance(y, x); + image::RGBfColor& highLightColor = highLight(y, x); + image::RGBfColor& lowLightColor = lowLight(y, x); + image::RGBfColor& noMidLightColor = noMidLight(y, x); + + std::vector> vv_coeff; + std::vector> vv_value; + std::vector> vv_normalizedValue; + + // Compute merging range + std::vector v_firstIndex; + std::vector v_lastIndex; + for(std::size_t channel = 0; channel < 3; ++channel) + { + int firstIndex = mergingParams.refImageIndex; + while(firstIndex > 0 && (response(images[firstIndex](y, x)(channel), channel) > v_minValue[channel] || + firstIndex == images.size() - 1)) + { + firstIndex--; + } + v_firstIndex.push_back(firstIndex); + + int lastIndex = v_firstIndex[channel] + 1; + while(lastIndex < images.size() - 1 && response(images[lastIndex](y, x)(channel), channel) < v_maxValue[channel]) + { + lastIndex++; + } + v_lastIndex.push_back(lastIndex); + } + + // Compute merging coeffs and values to be merged + for(std::size_t channel = 0; channel < 3; ++channel) + { + std::vector v_coeff; + std::vector v_normalizedValue; + std::vector v_value; + + for(std::size_t e = 0; e < images.size(); ++e) + { + const double value = images[e](y, x)(channel); + const double resp = response(value, channel); + const double normalizedValue = resp / times[e]; + double coeff = std::max(0.001f, e == 0 ? weightShortestExposure(value, channel) : + (e == images.size() - 1 ? weightLongestExposure(value, channel) : + weight(value, channel))); + + v_value.push_back(value); + v_normalizedValue.push_back(normalizedValue); + v_coeff.push_back(coeff); + } + + vv_coeff.push_back(v_coeff); + vv_normalizedValue.push_back(v_normalizedValue); + vv_value.push_back(v_value); + } + + // Compute light masks if required (monitoring and debug purposes) + if(mergingParams.computeLightMasks) + { + for(std::size_t channel = 0; channel < 3; ++channel) + { + int idxMaxValue = 0; + int idxMinValue = 0; + double maxValue = 0.0; + double minValue = 10000.0; + bool jump = true; + for(std::size_t e = 0; e < images.size(); ++e) + { + if(vv_value[channel][e] > maxValue) + { + maxValue = vv_value[channel][e]; + idxMaxValue = e; + } + if(vv_value[channel][e] < minValue) + { + minValue = vv_value[channel][e]; + idxMinValue = e; + } + jump = jump && ((vv_value[channel][e] < mergingParams.minSignificantValue && e < images.size() - 1) || + (vv_value[channel][e] > mergingParams.maxSignificantValue && e > 0)); + } + highLightColor(channel) = minValue > mergingParams.maxSignificantValue ? 1.0 : 0.0; + lowLightColor(channel) = maxValue < mergingParams.minSignificantValue ? 1.0 : 0.0; + noMidLightColor(channel) = jump ? 1.0 : 0.0; + } + } + + // Compute the final result and adjust the exposure to the reference one. + for(std::size_t channel = 0; channel < 3; ++channel) + { + double v = 0.0; + double sumCoeff = 0.0; + for(std::size_t i = v_firstIndex[channel]; i <= v_lastIndex[channel]; ++i) + { + v += vv_coeff[channel][i] * vv_normalizedValue[channel][i]; + sumCoeff += vv_coeff[channel][i]; + } + radianceColor(channel) = mergingParams.targetCameraExposure * + (sumCoeff != 0.0 ? v / sumCoeff : vv_normalizedValue[channel][mergingParams.refImageIndex]); + } } } } diff --git a/src/aliceVision/hdr/hdrMerge.hpp b/src/aliceVision/hdr/hdrMerge.hpp index 0725cdd5f5..140662f19a 100644 --- a/src/aliceVision/hdr/hdrMerge.hpp +++ b/src/aliceVision/hdr/hdrMerge.hpp @@ -17,7 +17,6 @@ struct MergingParams { double minSignificantValue = 0.05; double maxSignificantValue = 0.995; - double noiseThreshold = 0.1; float targetCameraExposure; int refImageIndex; bool computeLightMasks = false; diff --git a/src/software/pipeline/main_LdrToHdrMerge.cpp b/src/software/pipeline/main_LdrToHdrMerge.cpp index 4416779ccd..4b0753a227 100644 --- a/src/software/pipeline/main_LdrToHdrMerge.cpp +++ b/src/software/pipeline/main_LdrToHdrMerge.cpp @@ -79,7 +79,6 @@ int aliceVision_main(int argc, char** argv) int channelQuantizationPower = 10; int offsetRefBracketIndex = 1000; // By default, use the automatic selection double meanTargetedLumaForMerging = 0.4; - double noiseThreshold = 0.1; double minSignificantValue = 0.05; double maxSignificantValue = 0.995; bool computeLightMasks = false; @@ -122,8 +121,6 @@ int aliceVision_main(int argc, char** argv) "Zero to use the center bracket. +N to use a more exposed bracket or -N to use a less exposed backet.") ("meanTargetedLumaForMerging", po::value(&meanTargetedLumaForMerging)->default_value(meanTargetedLumaForMerging), "Mean expected luminance after merging step when input LDR images are decoded in sRGB color space. Must be in the range [0, 1].") - ("noiseThreshold", po::value(&noiseThreshold)->default_value(noiseThreshold), - "Value under which input channel value is considered as noise. Used in advanced pixelwise merging.") ("minSignificantValue", po::value(&minSignificantValue)->default_value(minSignificantValue), "Minimum channel input value to be considered in advanced pixelwise merging. Used in advanced pixelwise merging.") ("maxSignificantValue", po::value(&maxSignificantValue)->default_value(maxSignificantValue), @@ -377,7 +374,6 @@ int aliceVision_main(int argc, char** argv) hdr::MergingParams mergingParams; mergingParams.targetCameraExposure = targetCameraSetting.getExposure(); mergingParams.refImageIndex = estimatedTargetIndex; - mergingParams.noiseThreshold = noiseThreshold; mergingParams.minSignificantValue = minSignificantValue; mergingParams.maxSignificantValue = maxSignificantValue; mergingParams.computeLightMasks = computeLightMasks; @@ -394,35 +390,6 @@ int aliceVision_main(int argc, char** argv) HDRimage = images[0]; } - //for (int x = 252; x < 7000; x += 1000) - //{ - // for (int y = 750; y < 4000; y += 1000) - // { - // for(int l = x - 10; l <= x + 11; l++) - // { - // image::RGBfColor& pix1 = HDRimage(y - 10, l); - // pix1[0] = 1.f; - // pix1[1] = 0.f; - // pix1[2] = 0.f; - // image::RGBfColor& pix2 = HDRimage(y + 11, l); - // pix2[0] = 1.f; - // pix2[1] = 0.f; - // pix2[2] = 0.f; - // } - // for(int c = y - 10; c <= y + 11; c++) - // { - // image::RGBfColor& pix1 = HDRimage(c, x - 10); - // pix1[0] = 1.f; - // pix1[1] = 0.f; - // pix1[2] = 0.f; - // image::RGBfColor& pix2 = HDRimage(c, x + 11); - // pix2[0] = 1.f; - // pix2[1] = 0.f; - // pix2[2] = 0.f; - // } - // } - //} - boost::filesystem::path p(targetView->getImagePath()); const std::string hdrImagePath = getHdrImagePath(outputPath, g, keepSourceImageName ? p.stem().string() : "");