Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[software] Some adjustments to export animated camera #1047

Merged
merged 4 commits into from
May 27, 2021
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
206 changes: 143 additions & 63 deletions src/software/export/main_exportAnimatedCamera.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -103,18 +103,18 @@ int aliceVision_main(int argc, char** argv)
std::string verboseLevel = system::EVerboseLevel_enumToString(system::Logger::getDefaultVerboseLevel());
std::string sfmDataFilename;
std::string outFolder;
std::string outImageFileTypeName = image::EImageFileType_enumToString(image::EImageFileType::JPEG);
std::string outMapFileTypeName = image::EImageFileType_enumToString(image::EImageFileType::EXR);

// user optional parameters
bool undistortedImages = false;
bool exportUVMaps = false;
bool exportFullROD = false;
bool correctPrincipalPoint = true;
std::map<IndexT, oiio::ROI> roiForIntrinsic;

// user optional parameters

std::string viewFilter;

std::string sfmDataFilterFilepath;
std::string outImageFileTypeName = image::EImageFileType_enumToString(image::EImageFileType::JPEG);
std::string outMapFileTypeName = image::EImageFileType_enumToString(image::EImageFileType::EXR);

po::options_description allParams("AliceVision exportAnimatedCamera");

po::options_description requiredParams("Required parameters");
Expand All @@ -132,11 +132,13 @@ int aliceVision_main(int argc, char** argv)
("exportFullROD", po::value<bool>(&exportFullROD)->default_value(exportFullROD),
"Export undistorted images with the full Region of Definition (RoD). Only supported by the EXR image file format.")
("exportUVMaps", po::value<bool>(&exportUVMaps)->default_value(exportUVMaps),
"Export UV Maps for Nuke in exr format ")
"Export UV Maps in exr format to apply distort/undistort transformations in a compositing software.")
("correctPrincipalPoint", po::value<bool>(&correctPrincipalPoint)->default_value(correctPrincipalPoint),
"apply an offset to correct the position of the principal point")
("viewFilter", po::value<std::string>(&viewFilter)->default_value(viewFilter),
"Path to the output SfMData file (with only views and poses).")
"Select the cameras to export using an expression based on the image filepath. Export all cameras if empty.")
("sfmDataFilter", po::value<std::string>(&sfmDataFilterFilepath)->default_value(sfmDataFilterFilepath),
"Filter out cameras from the export if they are part of this SfMData. Export all cameras if empty.")
("undistortedImageType", po::value<std::string>(&outImageFileTypeName)->default_value(outImageFileTypeName),
image::EImageFileType_informations().c_str());

Expand Down Expand Up @@ -202,8 +204,51 @@ int aliceVision_main(int argc, char** argv)
ALICEVISION_LOG_ERROR("The input SfMData file '" << sfmDataFilename << "' is empty.");
return EXIT_FAILURE;
}

sfmData::SfMData sfmDataFilter;
if(!sfmDataFilterFilepath.empty())
{
if(!sfmDataIO::Load(sfmDataFilter, sfmDataFilterFilepath, sfmDataIO::ESfMData::VIEWS))
{
ALICEVISION_LOG_ERROR("The input filter SfMData file '" << sfmDataFilterFilepath << "' cannot be read.");
return EXIT_FAILURE;
}
}
system::Timer timer;

// Decide the views and instrinsics to export
sfmData::SfMData sfmDataExport;
for(auto& viewPair : sfmData.getViews())
{
sfmData::View& view = *(viewPair.second);

// regex filter
if(!viewFilter.empty())
{
// Skip the view if it does not match the expression filter
const std::regex regexFilter = utils::filterToRegex(viewFilter);
if(!std::regex_match(view.getImagePath(), regexFilter))
continue;
}

// sfmData filter
if(!sfmDataFilterFilepath.empty())
{
// Skip the view if it exist in the sfmDataFilter
if(sfmDataFilter.getViews().find(view.getViewId()) != sfmDataFilter.getViews().end())
continue;
}

sfmDataExport.getViews().emplace(view.getViewId(), viewPair.second);

// Export intrinsics with at least one view with a valid pose
if(sfmData.isPoseAndIntrinsicDefined(&view))
{
// std::map::emplace does nothing if the key already exist
sfmDataExport.getIntrinsics().emplace(view.getIntrinsicId(), sfmData.getIntrinsics().at(view.getIntrinsicId()));
}
}

const fs::path undistortedImagesFolderPath = fs::path(outFolder) / "undistort";
const bool writeUndistordedResult = undistortedImages || exportUVMaps;

Expand All @@ -216,7 +261,7 @@ int aliceVision_main(int argc, char** argv)
// export distortion map / one image per intrinsic
if(exportUVMaps)
{
for(const auto& intrinsicPair : sfmData.getIntrinsics())
for(const auto& intrinsicPair : sfmDataExport.getIntrinsics())
{
const camera::IntrinsicBase& intrinsic = *(intrinsicPair.second);
image::Image<image::RGBfColor> image_dist;
Expand All @@ -234,59 +279,76 @@ int aliceVision_main(int argc, char** argv)
}
ALICEVISION_LOG_DEBUG("ppCorrection:" + std::to_string(ppCorrection[0]) + ";" +std::to_string(ppCorrection[1]));

// flip and normalize for Nuke
#pragma omp parallel for
for(int y = 0; y < int(intrinsic.h()); ++y)
// UV Map: Undistort
{
for(int x = 0; x < int(intrinsic.w()); ++x)
// flip and normalize as UVMap
#pragma omp parallel for
for(int y = 0; y < int(intrinsic.h()); ++y)
{
const Vec2 undisto_pix(x, y);
// compute coordinates with distortion
const Vec2 disto_pix = intrinsic.get_d_pixel(undisto_pix) + ppCorrection;

image_dist(y, x).r() = (disto_pix[0]) / (intrinsic.w() - 1);
image_dist(y, x).g() = (intrinsic.h() - 1 - disto_pix[1]) / (intrinsic.h() - 1);
for(int x = 0; x < int(intrinsic.w()); ++x)
{
const Vec2 undisto_pix(x, y);
// compute coordinates with distortion
const Vec2 disto_pix = intrinsic.get_d_pixel(undisto_pix) + ppCorrection;

image_dist(y, x).r() = float((disto_pix[0]) / (intrinsic.w() - 1));
image_dist(y, x).g() = float((intrinsic.h() - 1 - disto_pix[1]) / (intrinsic.h() - 1));
}
}

const std::string dstImage =
(undistortedImagesFolderPath / (std::to_string(intrinsicPair.first) + "_UVMap_Undistort." +
image::EImageFileType_enumToString(outputMapFileType))).string();
image::writeImage(dstImage, image_dist, image::EImageColorSpace::AUTO);
}

const std::string dstImage =
(undistortedImagesFolderPath / ("Distortion_UVMap_" + std::to_string(intrinsicPair.first) + "." +
image::EImageFileType_enumToString(outputMapFileType))).string();
image::writeImage(dstImage, image_dist, image::EImageColorSpace::AUTO);
// UV Map: Distort
{
// flip and normalize as UVMap
#pragma omp parallel for
for(int y = 0; y < int(intrinsic.h()); ++y)
{
for(int x = 0; x < int(intrinsic.w()); ++x)
{
const Vec2 disto_pix(x, y);
// compute coordinates without distortion
const Vec2 undisto_pix = intrinsic.get_ud_pixel(disto_pix) - ppCorrection;

image_dist(y, x).r() = float((undisto_pix[0]) / (intrinsic.w() - 1));
image_dist(y, x).g() = float((intrinsic.h() - 1 - undisto_pix[1]) / (intrinsic.h() - 1));
}
}

const std::string dstImage =
(undistortedImagesFolderPath / (std::to_string(intrinsicPair.first) + "_UVMap_Distort." +
image::EImageFileType_enumToString(outputMapFileType))).string();
image::writeImage(dstImage, image_dist, image::EImageColorSpace::AUTO);
}
}
}

ALICEVISION_LOG_INFO("Build animated camera(s)...");

image::Image<image::RGBfColor> image, image_ud;
boost::progress_display progressBar(sfmData.getViews().size());
boost::progress_display progressBar(sfmDataExport.getViews().size());

for(const auto& viewPair : sfmData.getViews())
for(const auto& viewPair : sfmDataExport.getViews())
{
const sfmData::View& view = *(viewPair.second);

++progressBar;

// regex filter
if(!viewFilter.empty())
{
const std::regex regexFilter = utils::filterToRegex(viewFilter);
if(!std::regex_match(view.getImagePath(), regexFilter))
continue;
}

const std::string imagePathStem = fs::path(viewPair.second->getImagePath()).stem().string();

// undistort camera images
if(undistortedImages)
{
sfmData::Intrinsics::const_iterator iterIntrinsic = sfmData.getIntrinsics().find(view.getIntrinsicId());
sfmData::Intrinsics::const_iterator iterIntrinsic = sfmDataExport.getIntrinsics().find(view.getIntrinsicId());
const std::string dstImage = (undistortedImagesFolderPath / (std::to_string(view.getIntrinsicId()) + "_" + imagePathStem + "." + image::EImageFileType_enumToString(outputFileType))).string();
const camera::IntrinsicBase * cam = iterIntrinsic->second.get();

image::readImage(view.getImagePath(), image, image::EImageColorSpace::LINEAR);
oiio::ParamValueList metadata = image::readImageMetadata(view.getImagePath());
oiio::ROI roiNuke;

if(cam->isValid() && cam->hasDistortion())
{
Expand Down Expand Up @@ -327,6 +389,7 @@ int aliceVision_main(int argc, char** argv)
}

// pose and intrinsic defined
// Note: we use "sfmData" and not "sfmDataExport" to have access to poses
if(!sfmData.isPoseAndIntrinsicDefined(&view))
continue;

Expand All @@ -349,32 +412,34 @@ int aliceVision_main(int argc, char** argv)
std::smatch matches;
if(std::regex_search(imagePathStem, matches, regexFrame))
{
const std::string prefix = matches[1];
const std::string suffix = matches[3];
frameN = std::stoi(matches[2]);
const std::string prefix = matches[1];
const std::string suffix = matches[3];
frameN = std::stoi(matches[2]);

if(prefix.empty() && suffix.empty())
cameraName = std::string("Undefined") + "_" + cameraName;
else
cameraName = prefix + "frame" + suffix + "_" + cameraName;
if(prefix.empty() && suffix.empty())
cameraName = std::string("Undefined") + "_" + cameraName;
else
cameraName = prefix + "frame" + suffix + "_" + cameraName;

isSequence = true;
isSequence = true;
}

if(view.hasMetadataDateTimeOriginal()) // picture
{
const std::size_t key = view.getMetadataDateTimestamp();
ALICEVISION_LOG_TRACE("imagePathStem: " << imagePathStem << ", frameN: " << frameN << ", isSequence: " << isSequence << ", cameraName: " << cameraName);

dslrViewPerKey[cameraName].push_back({key, view.getViewId()});
}
else if(isSequence) // video
if(isSequence) // video
{
const std::size_t frame = frameN;
videoViewPerFrame[cameraName][frame] = view.getViewId();
}
else if(view.hasMetadataDateTimeOriginal()) // picture
{
const std::size_t key = view.getMetadataDateTimestamp();

dslrViewPerKey[cameraName].push_back({key, view.getViewId()});
}
else // no time or sequence information
{
dslrViewPerKey[cameraName].push_back({0, view.getViewId()});
dslrViewPerKey[cameraName].push_back({0, view.getViewId()});
}
}

Expand All @@ -392,6 +457,18 @@ int aliceVision_main(int argc, char** argv)
for(const auto& camera : dslrViewPerKey)
ss << "\t - " << camera.first << " | " << camera.second.size() << " image(s)" << std::endl;

ss << "\t- # Used camera intrinsics: " << sfmDataExport.getIntrinsics().size() << std::endl;

for(const auto& intrinsicIt : sfmDataExport.getIntrinsics())
{
const auto intrinsic = intrinsicIt.second;
ss << "\t - "
<< intrinsicIt.first << " | "
<< intrinsic->w() << "x" << intrinsic->h()
<< " " << intrinsic->serialNumber()
<< std::endl;
}

ALICEVISION_LOG_INFO(ss.str());
}

Expand All @@ -414,21 +491,24 @@ int aliceVision_main(int argc, char** argv)
{
const IndexT viewId = findFrameIt->second;

// Note: we use "sfmData" and not "sfmDataExport" to have access to poses

const auto findViewIt = sfmData.getViews().find(viewId);
if(findViewIt != sfmData.getViews().end())
{
ALICEVISION_LOG_DEBUG("[" + cameraViews.first +"][video] Keyframe added");
const IndexT intrinsicId = findViewIt->second->getIntrinsicId();
const camera::Pinhole* cam = dynamic_cast<camera::Pinhole*>(sfmData.getIntrinsicPtr(intrinsicId));
const sfmData::CameraPose pose = sfmData.getPose(*findViewIt->second);
const std::string& imagePath = findViewIt->second->getImagePath();
const std::string undistortedImagePath = (undistortedImagesFolderPath / (std::to_string(intrinsicId) + "_" + fs::path(imagePath).stem().string() + "." + image::EImageFileType_enumToString(outputFileType))).string();

exporter.addCameraKeyframe(pose.getTransform(), cam, (undistortedImages) ? undistortedImagePath : imagePath, viewId, intrinsicId);
continue;
}
assert(findViewIt != sfmData.getViews().end());

ALICEVISION_LOG_DEBUG("[" + cameraViews.first +"][video] Keyframe added");
const IndexT intrinsicId = findViewIt->second->getIntrinsicId();
const camera::Pinhole* cam = dynamic_cast<camera::Pinhole*>(sfmData.getIntrinsicPtr(intrinsicId));
const sfmData::CameraPose pose = sfmData.getPose(*findViewIt->second);
const std::string& imagePath = findViewIt->second->getImagePath();
const std::string undistortedImagePath = (undistortedImagesFolderPath / (std::to_string(intrinsicId) + "_" + fs::path(imagePath).stem().string() + "." + image::EImageFileType_enumToString(outputFileType))).string();

exporter.addCameraKeyframe(pose.getTransform(), cam, (undistortedImages) ? undistortedImagePath : imagePath, viewId, intrinsicId);
}
else
{
exporter.jumpKeyframe(std::to_string(frame));
}
exporter.jumpKeyframe(std::to_string(frame));
}
}

Expand Down