diff --git a/meshroom/aliceVision/ImageProcessing.py b/meshroom/aliceVision/ImageProcessing.py index 2ab272f3d9..8c8072a511 100644 --- a/meshroom/aliceVision/ImageProcessing.py +++ b/meshroom/aliceVision/ImageProcessing.py @@ -34,8 +34,8 @@ def outputImagesValueFunct(attr): class ImageProcessing(desc.AVCommandLineNode): commandLine = "aliceVision_imageProcessing {allParams}" size = desc.DynamicNodeSize("input") - # parallelization = desc.Parallelization(blockSize=40) - # commandLineRange = '--rangeStart {rangeStart} --rangeSize {rangeBlockSize}' + parallelization = desc.Parallelization(blockSize=40) + commandLineRange = '--rangeIteration {rangeIteration} --rangeBlocksCount {rangeBlocksCount}' category = "Utils" documentation = """Convert or apply filtering to the input images.""" @@ -446,6 +446,13 @@ class ImageProcessing(desc.AVCommandLineNode): value="rgba", values=["rgba", "rgb", "grayscale"], ), + desc.ChoiceParam( + name="outputChannel", + label="Output Color Channel(s)", + description="Allows you to choose the the output color channel(s).", + value="all", + values=["all", "red", "green", "blue", "alpha"], + ), desc.ChoiceParam( name="inputColorSpace", label="Input Color Space", diff --git a/src/software/utils/main_imageProcessing.cpp b/src/software/utils/main_imageProcessing.cpp index ef988e0f4c..9a946474bd 100644 --- a/src/software/utils/main_imageProcessing.cpp +++ b/src/software/utils/main_imageProcessing.cpp @@ -40,6 +40,8 @@ #include #include #include +#include +#include // These constants define the current software version. // They must be updated when the command line is changed. @@ -288,6 +290,58 @@ inline std::istream& operator>>(std::istream& in, EImageFormat& e) return in; } +enum class EImageOutputChannel +{ + All, + Red, + Green, + Blue, + Alpha +}; + +inline std::string EImageOutputChannel_enumToString(EImageOutputChannel outputChannel) +{ + switch (outputChannel) + { + case EImageOutputChannel::All: + return "all"; + case EImageOutputChannel::Red: + return "red"; + case EImageOutputChannel::Green: + return "green"; + case EImageOutputChannel::Blue: + return "blue"; + case EImageOutputChannel::Alpha: + return "alpha"; + } + throw std::invalid_argument("Invalid EImageOutputChannel Enum"); +} + +inline EImageOutputChannel EImageOutputChannel_stringToEnum(std::string outputChannel) +{ + boost::to_lower(outputChannel); + if (outputChannel == "all") + return EImageOutputChannel::All; + if (outputChannel == "red") + return EImageOutputChannel::Red; + if (outputChannel == "green") + return EImageOutputChannel::Green; + if (outputChannel == "blue") + return EImageOutputChannel::Blue; + if (outputChannel == "alpha") + return EImageOutputChannel::Alpha; + + throw std::invalid_argument("Unrecognized output channel '" + outputChannel + "'"); +} + +inline std::ostream& operator<<(std::ostream& os, EImageOutputChannel e) { return os << EImageOutputChannel_enumToString(e); } + +inline std::istream& operator>>(std::istream& in, EImageOutputChannel& e) +{ + std::string token(std::istreambuf_iterator(in), {}); + e = EImageOutputChannel_stringToEnum(token); + return in; +} struct NLMeansFilterParams { bool enabled; @@ -914,6 +968,7 @@ void saveImage(image::Image& image, std::map inputMetadata, const std::vector& metadataFolders, const EImageFormat outputFormat, + const EImageOutputChannel outputChannel, const image::ImageWriteOptions options) { // Read metadata path @@ -974,6 +1029,37 @@ void saveImage(image::Image& image, // Save image ALICEVISION_LOG_TRACE("Export image: '" << outputPath << "'."); + if (outputChannel == EImageOutputChannel::Red) + for (int j = 0; j < image.height(); ++j) + for (int i = 0; i < image.width(); ++i) + { + image(j, i).g() = image(j, i).r(); + image(j, i).b() = image(j, i).r(); + } + else if (outputChannel == EImageOutputChannel::Green) + for (int j = 0; j < image.height(); ++j) + for (int i = 0; i < image.width(); ++i) + { + image(j, i).r() = image(j, i).g(); + image(j, i).b() = image(j, i).g(); + } + else if (outputChannel == EImageOutputChannel::Blue) + for (int j = 0; j < image.height(); ++j) + for (int i = 0; i < image.width(); ++i) + { + image(j, i).r() = image(j, i).b(); + image(j, i).g() = image(j, i).b(); + } + else if (outputChannel == EImageOutputChannel::Alpha) + for (int j = 0; j < image.height(); ++j) + for (int i = 0; i < image.width(); ++i) + { + image(j, i).r() = image(j, i).a(); + image(j, i).g() = image(j, i).a(); + image(j, i).b() = image(j, i).a(); + image(j, i).a() = 1.0; + } + if (outputFormat == EImageFormat::Grayscale) { image::Image outputImage; @@ -993,6 +1079,24 @@ void saveImage(image::Image& image, } } +bool isFileReadable(const std::string& filename) +{ + if (!fs::exists(filename)) + { + return false; + } + if (!fs::is_regular_file(filename)) + { + return false; + } + std::ifstream file(filename, std::ios::binary); + if (!file.is_open()) + { + return false; + } + return true; +} + int aliceVision_main(int argc, char* argv[]) { std::string inputExpression; @@ -1000,6 +1104,7 @@ int aliceVision_main(int argc, char* argv[]) std::vector metadataFolders; std::string outputPath; EImageFormat outputFormat = EImageFormat::RGBA; + EImageOutputChannel outputChannel = EImageOutputChannel::All; image::EImageColorSpace inputColorSpace = image::EImageColorSpace::AUTO; image::EImageColorSpace workingColorSpace = image::EImageColorSpace::LINEAR; image::EImageColorSpace outputColorSpace = image::EImageColorSpace::LINEAR; @@ -1020,6 +1125,8 @@ int aliceVision_main(int argc, char* argv[]) std::string lensCorrectionProfileInfo; bool lensCorrectionProfileSearchIgnoreCameraModel = true; std::string sensorDatabasePath; + int rangeBlocksCount = 1; + int rangeIteration = 0; ProcessingParams pParams; @@ -1141,6 +1248,9 @@ int aliceVision_main(int argc, char* argv[]) ("outputFormat", po::value(&outputFormat)->default_value(outputFormat), "Output image format (rgba, rgb, grayscale).") + ("outputChannel", po::value(&outputChannel)->default_value(outputChannel), + "Output image channel(s) (all, red, green, blue, alpha). Default: all") + ("outputColorSpace", po::value(&outputColorSpace)->default_value(outputColorSpace), ("Output color space: " + image::EImageColorSpace_informations()).c_str()) @@ -1210,7 +1320,13 @@ int aliceVision_main(int argc, char* argv[]) "JPEG quality after compression (between 0 and 100).") ("extension", po::value(&extension)->default_value(extension), - "Output image extension (like exr, or empty to keep the source file format."); + "Output image extension (like exr, or empty to keep the source file format.") + + ("rangeBlocksCount", po::value(&rangeBlocksCount)->default_value(rangeBlocksCount), + "Number of blocks in which the computation is splited.") + + ("rangeIteration", po::value(&rangeIteration)->default_value(rangeIteration), + "Computation block index."); // clang-format on CmdLine cmdline("AliceVision imageProcessing"); @@ -1299,214 +1415,338 @@ int aliceVision_main(int argc, char* argv[]) } } - const int size = ViewPaths.size(); - int i = 0; - - for (auto& viewIt : ViewPaths) - { - const IndexT viewId = viewIt.first; - const std::string viewPath = viewIt.second; - sfmData::View& view = sfmData.getView(viewId); + std::vector updatedViews; - const bool isRAW = image::isRawFormat(viewPath); + const int d = ViewPaths.size() / rangeBlocksCount; + const int m = ViewPaths.size() % rangeBlocksCount; + int rangeSize = d + (rangeIteration < m ? 1 : 0); + int rangeStart = rangeIteration * d + std::min(rangeIteration, m); - const fs::path fsPath = viewPath; - const std::string fileName = fsPath.stem().string(); - const std::string fileExt = fsPath.extension().string(); - const std::string outputExt = extension.empty() ? (isRAW ? ".exr" : fileExt) : (std::string(".") + extension); - const std::string outputfilePath = - (fs::path(outputPath) / ((pParams.keepImageFilename ? fileName : std::to_string(viewId)) + outputExt)).generic_string(); + int rangeEnd = ViewPaths.size(); + // set range + if (rangeStart != -1) + { + if (rangeStart < 0 || rangeSize < 0) + { + ALICEVISION_LOG_ERROR("Range is incorrect"); + return EXIT_FAILURE; + } - ALICEVISION_LOG_INFO(++i << "/" << size << " - Process view '" << viewId << "'."); + if (rangeStart + rangeSize > ViewPaths.size()) + { + rangeSize = ViewPaths.size() - rangeStart; + } - auto metadata = view.getImage().getMetadata(); + rangeEnd = rangeStart + rangeSize; - if (pParams.applyDcpMetadata && metadata["AliceVision:ColorSpace"] != "no_conversion") + if (rangeSize <= 0) { - ALICEVISION_LOG_WARNING("A dcp profile will be applied on an image containing non raw data!"); + ALICEVISION_LOG_WARNING("Nothing to compute."); + return EXIT_SUCCESS; } + } + else + { + rangeStart = 0; + } - image::ImageReadOptions options; - options.workingColorSpace = pParams.applyDcpMetadata ? image::EImageColorSpace::NO_CONVERSION : workingColorSpace; + ALICEVISION_LOG_INFO("Launch views processing from " << rangeStart << " to " << rangeEnd - 1 << " (" << rangeSize << ")."); + + int idx = 0; - if (isRAW) + for (auto& viewIt : ViewPaths) + { + if (idx >= rangeEnd) break; + if (idx >= rangeStart) { - if (rawColorInterpretation == image::ERawColorInterpretation::Auto) + const IndexT viewId = viewIt.first; + const std::string viewPath = viewIt.second; + sfmData::View& view = sfmData.getView(viewId); + + const bool isRAW = image::isRawFormat(viewPath); + + const fs::path fsPath = viewPath; + const std::string fileName = fsPath.stem().string(); + const std::string fileExt = fsPath.extension().string(); + const std::string outputExt = extension.empty() ? (isRAW ? ".exr" : fileExt) : (std::string(".") + extension); + const std::string outputfilePath = + (fs::path(outputPath) / ((pParams.keepImageFilename ? fileName : std::to_string(viewId)) + outputExt)).generic_string(); + + ALICEVISION_LOG_INFO(idx + 1 - rangeStart << "/" << rangeSize << " - Process view '" << viewId << "'."); + + auto metadata = view.getImage().getMetadata(); + + if (pParams.applyDcpMetadata && metadata["AliceVision:ColorSpace"] != "no_conversion") + { + ALICEVISION_LOG_WARNING("A dcp profile will be applied on an image containing non raw data!"); + } + + image::ImageReadOptions options; + options.workingColorSpace = pParams.applyDcpMetadata ? image::EImageColorSpace::NO_CONVERSION : workingColorSpace; + + if (isRAW) { - options.rawColorInterpretation = image::ERawColorInterpretation_stringToEnum(view.getImage().getRawColorInterpretation()); - if (options.rawColorInterpretation == image::ERawColorInterpretation::DcpMetadata) + if (rawColorInterpretation == image::ERawColorInterpretation::Auto) { - options.useDCPColorMatrixOnly = false; - options.doWBAfterDemosaicing = true; + options.rawColorInterpretation = image::ERawColorInterpretation_stringToEnum(view.getImage().getRawColorInterpretation()); + if (options.rawColorInterpretation == image::ERawColorInterpretation::DcpMetadata) + { + options.useDCPColorMatrixOnly = false; + options.doWBAfterDemosaicing = true; + } + else + { + options.useDCPColorMatrixOnly = useDCPColorMatrixOnly; + options.doWBAfterDemosaicing = doWBAfterDemosaicing; + } } else { + options.rawColorInterpretation = rawColorInterpretation; options.useDCPColorMatrixOnly = useDCPColorMatrixOnly; options.doWBAfterDemosaicing = doWBAfterDemosaicing; } + options.colorProfileFileName = view.getImage().getColorProfileFileName(); + options.demosaicingAlgo = demosaicingAlgo; + options.highlightMode = highlightMode; + options.rawExposureAdjustment = std::pow(2.f, pParams.rawExposureAdjust); + options.rawAutoBright = pParams.rawAutoBright; + options.correlatedColorTemperature = correlatedColorTemperature; + pParams.correlatedColorTemperature = correlatedColorTemperature; + pParams.enableColorTempProcessing = options.rawColorInterpretation == image::ERawColorInterpretation::DcpLinearProcessing; } else { - options.rawColorInterpretation = rawColorInterpretation; - options.useDCPColorMatrixOnly = useDCPColorMatrixOnly; - options.doWBAfterDemosaicing = doWBAfterDemosaicing; + options.inputColorSpace = inputColorSpace; } - options.colorProfileFileName = view.getImage().getColorProfileFileName(); - options.demosaicingAlgo = demosaicingAlgo; - options.highlightMode = highlightMode; - options.rawExposureAdjustment = std::pow(2.f, pParams.rawExposureAdjust); - options.rawAutoBright = pParams.rawAutoBright; - options.correlatedColorTemperature = correlatedColorTemperature; - pParams.correlatedColorTemperature = correlatedColorTemperature; - pParams.enableColorTempProcessing = options.rawColorInterpretation == image::ERawColorInterpretation::DcpLinearProcessing; - } - else - { - options.inputColorSpace = inputColorSpace; - } - if (pParams.lensCorrection.enabled && pParams.lensCorrection.vignetting) - { - if (!view.getImage().getVignettingParams(pParams.lensCorrection.vParams)) + if (pParams.lensCorrection.enabled && pParams.lensCorrection.vignetting) { - pParams.lensCorrection.vParams.clear(); + if (!view.getImage().getVignettingParams(pParams.lensCorrection.vParams)) + { + pParams.lensCorrection.vParams.clear(); + } } - } - if (pParams.lensCorrection.enabled && pParams.lensCorrection.chromaticAberration) - { - std::vector caGParams, caBGParams, caRGParams; - view.getImage().getChromaticAberrationParams(caGParams, caBGParams, caRGParams); + if (pParams.lensCorrection.enabled && pParams.lensCorrection.chromaticAberration) + { + std::vector caGParams, caBGParams, caRGParams; + view.getImage().getChromaticAberrationParams(caGParams, caBGParams, caRGParams); - pParams.lensCorrection.caGModel.init3(caGParams); - pParams.lensCorrection.caBGModel.init3(caBGParams); - pParams.lensCorrection.caRGModel.init3(caRGParams); + pParams.lensCorrection.caGModel.init3(caGParams); + pParams.lensCorrection.caBGModel.init3(caBGParams); + pParams.lensCorrection.caRGModel.init3(caRGParams); - if (pParams.lensCorrection.caGModel.FocalLengthX == 0.0) + if (pParams.lensCorrection.caGModel.FocalLengthX == 0.0) + { + float sensorWidth = view.getImage().getSensorWidth(); + pParams.lensCorrection.caGModel.FocalLengthX = view.getImage().getWidth() * view.getImage().getMetadataFocalLength() / + sensorWidth / std::max(view.getImage().getWidth(), view.getImage().getHeight()); + } + if (pParams.lensCorrection.caGModel.FocalLengthY == 0.0) + { + float sensorHeight = view.getImage().getSensorHeight(); + pParams.lensCorrection.caGModel.FocalLengthY = view.getImage().getHeight() * view.getImage().getMetadataFocalLength() / + sensorHeight / std::max(view.getImage().getWidth(), view.getImage().getHeight()); + } + + if ((pParams.lensCorrection.caGModel.FocalLengthX <= 0.0) || (pParams.lensCorrection.caGModel.FocalLengthY <= 0.0)) + { + pParams.lensCorrection.caGModel.reset(); + pParams.lensCorrection.caBGModel.reset(); + pParams.lensCorrection.caRGModel.reset(); + } + } + + // Read original image + image::Image image; + image::readImage(viewPath, image, options); + + // If exposureCompensation is needed for sfmData files + if (pParams.exposureCompensation) { - float sensorWidth = view.getImage().getSensorWidth(); - pParams.lensCorrection.caGModel.FocalLengthX = view.getImage().getWidth() * view.getImage().getMetadataFocalLength() / - sensorWidth / std::max(view.getImage().getWidth(), view.getImage().getHeight()); + const double medianCameraExposure = sfmData.getMedianCameraExposureSetting().getExposure(); + const double cameraExposure = view.getImage().getCameraExposureSetting().getExposure(); + const double ev = std::log2(1.0 / cameraExposure); + const float compensationFactor = static_cast(medianCameraExposure / cameraExposure); + + ALICEVISION_LOG_INFO("View: " << viewId << ", Ev: " << ev << ", Ev compensation: " << compensationFactor); + + for (int i = 0; i < image.width() * image.height(); ++i) + { + image(i)[0] *= compensationFactor; + image(i)[1] *= compensationFactor; + image(i)[2] *= compensationFactor; + } } - if (pParams.lensCorrection.caGModel.FocalLengthY == 0.0) + + sfmData::Intrinsics::const_iterator iterIntrinsic = sfmData.getIntrinsics().find(view.getIntrinsicId()); + std::shared_ptr cam = iterIntrinsic->second; + + std::map viewMetadata = view.getImage().getMetadata(); + + if (pParams.par.enabled) { - float sensorHeight = view.getImage().getSensorHeight(); - pParams.lensCorrection.caGModel.FocalLengthY = view.getImage().getHeight() * view.getImage().getMetadataFocalLength() / - sensorHeight / std::max(view.getImage().getWidth(), view.getImage().getHeight()); + auto iso = camera::IntrinsicScaleOffset::cast(cam); + if (iso) + { + pParams.par.value = iso->getScale().y() / iso->getScale().x(); + } } - if ((pParams.lensCorrection.caGModel.FocalLengthX <= 0.0) || (pParams.lensCorrection.caGModel.FocalLengthY <= 0.0)) + // Image processing + processImage(image, pParams, viewMetadata, cam); + + if (pParams.applyDcpMetadata) { - pParams.lensCorrection.caGModel.reset(); - pParams.lensCorrection.caBGModel.reset(); - pParams.lensCorrection.caRGModel.reset(); + workingColorSpace = image::EImageColorSpace::ACES2065_1; } - } - // Read original image - image::Image image; - image::readImage(viewPath, image, options); + image::ImageWriteOptions writeOptions; - // If exposureCompensation is needed for sfmData files - if (pParams.exposureCompensation) - { - const double medianCameraExposure = sfmData.getMedianCameraExposureSetting().getExposure(); - const double cameraExposure = view.getImage().getCameraExposureSetting().getExposure(); - const double ev = std::log2(1.0 / cameraExposure); - const float compensationFactor = static_cast(medianCameraExposure / cameraExposure); + writeOptions.fromColorSpace(workingColorSpace); + writeOptions.toColorSpace(outputColorSpace); + writeOptions.exrCompressionMethod(exrCompressionMethod); + writeOptions.exrCompressionLevel(exrCompressionLevel); + writeOptions.jpegCompress(jpegCompress); + writeOptions.jpegQuality(jpegQuality); + + if (boost::to_lower_copy(fs::path(outputPath).extension().string()) == ".exr") + { + // Select storage data type + writeOptions.storageDataType(storageDataType); + } - ALICEVISION_LOG_INFO("View: " << viewId << ", Ev: " << ev << ", Ev compensation: " << compensationFactor); + // Save the image + saveImage(image, viewPath, outputfilePath, viewMetadata, metadataFolders, outputFormat, outputChannel, writeOptions); - for (int i = 0; i < image.width() * image.height(); ++i) + // Update view for this modification + view.getImage().setImagePath(outputfilePath); + view.getImage().setWidth(image.width()); + view.getImage().setHeight(image.height()); + view.getImage().addMetadata("AliceVision:ColorSpace", image::EImageColorSpace_enumToString(outputColorSpace)); + if (viewMetadata.find("Orientation") != viewMetadata.end()) + view.getImage().addMetadata("Orientation", viewMetadata.at("Orientation")); + + if (pParams.reorient && image.width() != cam->w() && image.width() == cam->h()) // The image has been rotated by automatic reorientation { - image(i)[0] *= compensationFactor; - image(i)[1] *= compensationFactor; - image(i)[2] *= compensationFactor; + camera::IntrinsicBase* cam2 = cam->clone(); + + cam2->setWidth(image.width()); + cam2->setHeight(image.height()); + double sensorWidth = cam->sensorWidth(); + cam2->setSensorWidth(cam->sensorHeight()); + cam2->setSensorHeight(sensorWidth); + + IndexT intrinsicId = cam2->hashValue(); + view.setIntrinsicId(intrinsicId); + sfmData.getIntrinsics().emplace(intrinsicId, cam2); } + + updatedViews.push_back(viewId); } + idx++; + } - sfmData::Intrinsics::const_iterator iterIntrinsic = sfmData.getIntrinsics().find(view.getIntrinsicId()); - std::shared_ptr cam = iterIntrinsic->second; + if (rangeIteration == 0) + { + if ((pParams.scaleFactor != 1.0f) || (pParams.par.enabled && pParams.par.value != 1.0)) + { + const bool parRowDecimation = pParams.par.enabled && pParams.par.rowDecimation; - std::map viewMetadata = view.getImage().getMetadata(); + const float scaleFactorW = pParams.scaleFactor * ((!pParams.par.enabled || parRowDecimation) ? 1.0 : pParams.par.value); + const float scaleFactorH = pParams.scaleFactor * (parRowDecimation ? (1.0 / pParams.par.value) : 1.0); + for (auto& i : sfmData.getIntrinsics()) + { + i.second->rescale(scaleFactorW, scaleFactorH); + } + } - if (pParams.par.enabled) + // Save sfmData with modified path to images + std::string sfmfilePath = (fs::path(outputPath) / fs::path(inputExpression).stem()).generic_string(); + if (rangeBlocksCount > 1) + { + sfmfilePath += "_0"; + } + sfmfilePath += (fs::path(inputExpression).extension()).generic_string(); + std::string sfmTempfilePath = (fs::path(outputPath) / fs::path(inputExpression).stem()).generic_string(); + if (rangeBlocksCount > 1) + { + sfmTempfilePath += "_0_tmp"; + } + sfmTempfilePath += (fs::path(inputExpression).extension()).generic_string(); + if (!sfmDataIO::save(sfmData, sfmTempfilePath, sfmDataIO::ESfMData(sfmDataIO::ALL))) + { + ALICEVISION_LOG_ERROR("The output SfMData file '" << sfmfilePath << "' cannot be written."); + return EXIT_FAILURE; + } + fs::rename(sfmTempfilePath, sfmfilePath); + } + else + { + std::string sfmfilePath_in = (fs::path(outputPath) / fs::path(inputExpression).stem()).generic_string(); + sfmfilePath_in += "_" + std::to_string(rangeIteration - 1); + sfmfilePath_in += (fs::path(inputExpression).extension()).generic_string(); + + std::string timeoutFilePath = (fs::path(outputPath) / fs::path("timeout.txt")).generic_string(); + + sfmData::SfMData sfmDataNew; + std::chrono::seconds interval = std::chrono::seconds(1); + std::chrono::seconds timeout = std::chrono::seconds(rangeIteration*60); + auto start_time = std::chrono::steady_clock::now(); + std::chrono::seconds durationSeconds = std::chrono::duration_cast(std::chrono::steady_clock::now() - start_time); + while (!isFileReadable(sfmfilePath_in)) { - auto iso = camera::IntrinsicScaleOffset::cast(cam); - if (iso) + auto elapsed = std::chrono::steady_clock::now() - start_time; + durationSeconds = std::chrono::duration_cast(elapsed); + if (elapsed > timeout || fs::exists(timeoutFilePath)) { - pParams.par.value = iso->getScale().y() / iso->getScale().x(); + if (!fs::exists(timeoutFilePath)) + { + std::ofstream timeoutFile(timeoutFilePath); + timeoutFile.close(); + } + ALICEVISION_LOG_ERROR("Timeout reached waiting the SfMData file '" << sfmfilePath_in << "' after " << durationSeconds.count() << "s."); + return EXIT_FAILURE; } + std::this_thread::sleep_for(interval); } - - // Image processing - processImage(image, pParams, viewMetadata, cam); - - if (pParams.applyDcpMetadata) + ALICEVISION_LOG_INFO("Temporary SfMData file '" << sfmfilePath_in << "' available after " << durationSeconds.count() << "s."); + if (!sfmDataIO::load(sfmDataNew, sfmfilePath_in, sfmDataIO::ALL)) { - workingColorSpace = image::EImageColorSpace::ACES2065_1; + ALICEVISION_LOG_ERROR("Temporary SfMData file '" << sfmfilePath_in << "' cannot be opened"); + return EXIT_FAILURE; } - image::ImageWriteOptions writeOptions; - - writeOptions.fromColorSpace(workingColorSpace); - writeOptions.toColorSpace(outputColorSpace); - writeOptions.exrCompressionMethod(exrCompressionMethod); - writeOptions.exrCompressionLevel(exrCompressionLevel); - writeOptions.jpegCompress(jpegCompress); - writeOptions.jpegQuality(jpegQuality); - - if (boost::to_lower_copy(fs::path(outputPath).extension().string()) == ".exr") + for (auto& viewId : updatedViews) { - // Select storage data type - writeOptions.storageDataType(storageDataType); + sfmDataNew.getViews().emplace(viewId, sfmData.getViews()[viewId]); } - // Save the image - saveImage(image, viewPath, outputfilePath, viewMetadata, metadataFolders, outputFormat, writeOptions); - - // Update view for this modification - view.getImage().setImagePath(outputfilePath); - view.getImage().setWidth(image.width()); - view.getImage().setHeight(image.height()); - view.getImage().addMetadata("AliceVision:ColorSpace", image::EImageColorSpace_enumToString(outputColorSpace)); - if (viewMetadata.find("Orientation") != viewMetadata.end()) - view.getImage().addMetadata("Orientation", viewMetadata.at("Orientation")); - - if (pParams.reorient && image.width() != cam->w() && image.width() == cam->h()) // The image has been rotated by automatic reorientation + std::string sfmfilePath = (fs::path(outputPath) / fs::path(inputExpression).stem()).generic_string(); + if (rangeIteration < rangeBlocksCount - 1) { - camera::IntrinsicBase* cam2 = cam->clone(); - - cam2->setWidth(image.width()); - cam2->setHeight(image.height()); - double sensorWidth = cam->sensorWidth(); - cam2->setSensorWidth(cam->sensorHeight()); - cam2->setSensorHeight(sensorWidth); + sfmfilePath += "_" + std::to_string(rangeIteration); + } + sfmfilePath += (fs::path(inputExpression).extension()).generic_string(); - IndexT intrinsicId = cam2->hashValue(); - view.setIntrinsicId(intrinsicId); - sfmData.getIntrinsics().emplace(intrinsicId, cam2); + std::string sfmTempfilePath = (fs::path(outputPath) / fs::path(inputExpression).stem()).generic_string(); + if (rangeIteration < rangeBlocksCount - 1) + { + sfmTempfilePath += "_" + std::to_string(rangeIteration) + "tmp"; } - } + sfmTempfilePath += (fs::path(inputExpression).extension()).generic_string(); - if ((pParams.scaleFactor != 1.0f) || (pParams.par.enabled && pParams.par.value != 1.0)) - { - const bool parRowDecimation = pParams.par.enabled && pParams.par.rowDecimation; - const float scaleFactorW = pParams.scaleFactor * ((!pParams.par.enabled || parRowDecimation) ? 1.0 : pParams.par.value); - const float scaleFactorH = pParams.scaleFactor * (parRowDecimation ? (1.0 / pParams.par.value) : 1.0); - for (auto& i : sfmData.getIntrinsics()) + if (!sfmDataIO::save(sfmDataNew, sfmTempfilePath, sfmDataIO::ESfMData(sfmDataIO::ALL))) { - i.second->rescale(scaleFactorW, scaleFactorH); + ALICEVISION_LOG_ERROR("The output SfMData file '" << sfmfilePath << "' cannot be written."); + return EXIT_FAILURE; } - } - // Save sfmData with modified path to images - const std::string sfmfilePath = (fs::path(outputPath) / fs::path(inputExpression).filename()).generic_string(); - if (!sfmDataIO::save(sfmData, sfmfilePath, sfmDataIO::ESfMData(sfmDataIO::ALL))) - { - ALICEVISION_LOG_ERROR("The output SfMData file '" << sfmfilePath << "' cannot be written."); - return EXIT_FAILURE; + fs::rename(sfmTempfilePath, sfmfilePath); + + fs::remove(sfmfilePath_in); } } else @@ -1540,19 +1780,50 @@ int aliceVision_main(int argc, char* argv[]) } } - const int size = filesStrPaths.size(); + const int d = filesStrPaths.size() / rangeBlocksCount; + const int m = filesStrPaths.size() % rangeBlocksCount; + int rangeSize = d + (rangeIteration < m ? 1 : 0); + int rangeStart = rangeIteration * d + std::min(rangeIteration, m); - if (!size) + int rangeEnd = filesStrPaths.size(); + // set range + if (rangeStart != -1) { - ALICEVISION_LOG_ERROR("Any images was found."); - ALICEVISION_LOG_ERROR("Input folders or input expression '" << inputExpression << "' may be incorrect ?"); - return EXIT_FAILURE; + if (rangeStart < 0 || rangeSize < 0) + { + ALICEVISION_LOG_ERROR("Range is incorrect"); + return EXIT_FAILURE; + } + + if (rangeStart + rangeSize > filesStrPaths.size()) + { + rangeSize = filesStrPaths.size() - rangeStart; + } + + rangeEnd = rangeStart + rangeSize; + + if (rangeSize <= 0) + { + if (rangeIteration > 0) + { + ALICEVISION_LOG_WARNING("Nothing to compute."); + return EXIT_SUCCESS; + } + else + { + ALICEVISION_LOG_ERROR("Any images was found."); + ALICEVISION_LOG_ERROR("Input folders or input expression '" << inputExpression << "' may be incorrect ?"); + return EXIT_FAILURE; + } + } } else { - ALICEVISION_LOG_INFO(size << " images found."); + rangeStart = 0; } + ALICEVISION_LOG_INFO("Launch images processing from " << rangeStart << " to " << rangeEnd - 1 << " (" << rangeSize << ")."); + image::DCPDatabase dcpDatabase; LCPdatabase lcpStore(lensCorrectionProfileInfo, lensCorrectionProfileSearchIgnoreCameraModel); @@ -1579,295 +1850,300 @@ int aliceVision_main(int argc, char* argv[]) } } - int i = 0; + int idx = 0; for (const std::string& inputFilePath : filesStrPaths) { - const bool isRAW = image::isRawFormat(inputFilePath); + if (idx >= rangeEnd) break; + if (idx >= rangeStart) + { + const bool isRAW = image::isRawFormat(inputFilePath); - const fs::path path = fs::path(inputFilePath); - const std::string filename = path.stem().string(); - const std::string fileExt = path.extension().string(); - const std::string outputExt = extension.empty() ? (isRAW ? ".exr" : fileExt) : (std::string(".") + extension); + const fs::path path = fs::path(inputFilePath); + const std::string filename = path.stem().string(); + const std::string fileExt = path.extension().string(); + const std::string outputExt = extension.empty() ? (isRAW ? ".exr" : fileExt) : (std::string(".") + extension); - ALICEVISION_LOG_INFO(++i << "/" << size << " - Process image '" << filename << fileExt << "'."); + ALICEVISION_LOG_INFO(idx + 1 - rangeStart << "/" << rangeSize << " - Process image '" << filename << fileExt << "'."); - const std::string userExt = fs::path(outputPath).extension().string(); - std::string outputFilePath; + const std::string userExt = fs::path(outputPath).extension().string(); + std::string outputFilePath; - if ((size == 1) && !userExt.empty()) - { - if (image::isSupported(userExt)) - { - outputFilePath = fs::path(outputPath).generic_string(); - } - else + if ((rangeSize == 1) && (rangeBlocksCount == 1) && !userExt.empty()) { - outputFilePath = (fs::path(outputPath).parent_path() / (filename + outputExt)).generic_string(); - ALICEVISION_LOG_WARNING("Extension " << userExt << " is not supported! Output image saved in " << outputFilePath); - } - // Create output directory if it does not exist - if (!fs::exists(fs::path(outputPath).parent_path())) - { - if (!fs::create_directory(fs::path(outputPath).parent_path())) + if (image::isSupported(userExt)) { - ALICEVISION_LOG_ERROR("Unexisting directory " << fs::path(outputPath).parent_path().generic_string() << " cannot be created"); - return EXIT_FAILURE; + outputFilePath = fs::path(outputPath).generic_string(); } - } - } - else - { - outputFilePath = (fs::path(outputPath) / (filename + outputExt)).generic_string(); - // Create output directory if it does not exist - if (!fs::exists(fs::path(outputPath))) - { - if (!fs::create_directory(fs::path(outputPath))) - { - ALICEVISION_LOG_ERROR("Unexisting directory " << outputPath << " cannot be created"); - return EXIT_FAILURE; - } - } - } - - image::DCPProfile dcpProf; - sfmData::View view; // used to extract and complete metadata - view.getImage().setImagePath(inputFilePath); - int width, height; - const auto metadata = image::readImageMetadata(inputFilePath, width, height); - view.getImage().setMetadata(image::getMapFromMetadata(metadata)); - view.getImage().setWidth(width); - view.getImage().setHeight(height); - std::shared_ptr intrinsicBase; - // Get DSLR maker and model in view metadata. - const std::string& make = view.getImage().getMetadataMake(); - const std::string& model = view.getImage().getMetadataModel(); - - if (isRAW && (rawColorInterpretation == image::ERawColorInterpretation::DcpLinearProcessing || - rawColorInterpretation == image::ERawColorInterpretation::DcpMetadata)) - { - // Load DCP color profiles database if not already loaded - dcpDatabase.load(colorProfileDatabaseDirPath.empty() ? getColorProfileDatabaseFolder() : colorProfileDatabaseDirPath, false); - - // Get DCP profile - if (!dcpDatabase.retrieveDcpForCamera(make, model, dcpProf)) - { - if (errorOnMissingColorProfile) + else { - ALICEVISION_LOG_ERROR("The specified DCP database does not contain an appropriate profil for DSLR " << make << " " << model); - return EXIT_FAILURE; + outputFilePath = (fs::path(outputPath).parent_path() / (filename + outputExt)).generic_string(); + ALICEVISION_LOG_WARNING("Extension " << userExt << " is not supported! Output image saved in " << outputFilePath); } - else + // Create output directory if it does not exist + if (!fs::exists(fs::path(outputPath).parent_path())) { - ALICEVISION_LOG_WARNING("Can't find color profile for input image " << inputFilePath); + if (!fs::create_directory(fs::path(outputPath).parent_path())) + { + ALICEVISION_LOG_ERROR("Unexisting directory " << fs::path(outputPath).parent_path().generic_string() << " cannot be created"); + return EXIT_FAILURE; + } } } - - // Add color profile info in metadata - view.getImage().addDCPMetadata(dcpProf); - } - - if (isRAW && pParams.lensCorrection.enabled && - (pParams.lensCorrection.geometry || pParams.lensCorrection.vignetting || pParams.lensCorrection.chromaticAberration)) - { - // try to find an appropriate Lens Correction Profile - LCPinfo* lcpData = nullptr; - if (lcpStore.size() == 1) - { - lcpData = lcpStore.retrieveLCP(); - } - else if (!lcpStore.empty()) + else { - // Find an LCP file that matches the camera model and the lens model. - const std::string& lensModel = view.getImage().getMetadataLensModel(); - const int lensID = view.getImage().getMetadataLensID(); - - if (!make.empty() && !lensModel.empty()) + outputFilePath = (fs::path(outputPath) / (filename + outputExt)).generic_string(); + // Create output directory if it does not exist + if (!fs::exists(fs::path(outputPath))) { -#pragma omp critical(lcp) - lcpData = lcpStore.findLCP(make, model, lensModel, lensID, 1); + if (!fs::create_directory(fs::path(outputPath))) + { + ALICEVISION_LOG_ERROR("Unexisting directory " << outputPath << " cannot be created"); + return EXIT_FAILURE; + } } } - if ((lcpData != nullptr) && !(lcpData->isEmpty())) + image::DCPProfile dcpProf; + sfmData::View view; // used to extract and complete metadata + view.getImage().setImagePath(inputFilePath); + int width, height; + const auto metadata = image::readImageMetadata(inputFilePath, width, height); + view.getImage().setMetadata(image::getMapFromMetadata(metadata)); + view.getImage().setWidth(width); + view.getImage().setHeight(height); + std::shared_ptr intrinsicBase; + // Get DSLR maker and model in view metadata. + const std::string& make = view.getImage().getMetadataMake(); + const std::string& model = view.getImage().getMetadataModel(); + + if (isRAW && (rawColorInterpretation == image::ERawColorInterpretation::DcpLinearProcessing || + rawColorInterpretation == image::ERawColorInterpretation::DcpMetadata)) { - double focalLengthmm = view.getImage().getMetadataFocalLength(); - const float apertureValue = 2.f * std::log(view.getImage().getMetadataFNumber()) / std::log(2.0); - const float focusDistance = 0.f; - - LensParam lensParam; - lcpData->getDistortionParams(focalLengthmm, focusDistance, lensParam); - lcpData->getVignettingParams(focalLengthmm, apertureValue, lensParam); - lcpData->getChromaticParams(focalLengthmm, focusDistance, lensParam); - - // Get sensor size by combining information from sensor database and view's metadata - double sensorWidth = -1.0; - double sensorHeight = -1.0; - camera::EInitMode intrinsicInitMode = camera::EInitMode::UNKNOWN; - view.getImage().getSensorSize(sensorDatabase, sensorWidth, sensorHeight, focalLengthmm, intrinsicInitMode, true); - - if (lensParam.hasVignetteParams() && !lensParam.vignParams.isEmpty && pParams.lensCorrection.vignetting) - { - float FocX = lensParam.vignParams.FocalLengthX != 0.0 ? lensParam.vignParams.FocalLengthX - : width * focalLengthmm / sensorWidth / std::max(width, height); - float FocY = lensParam.vignParams.FocalLengthY != 0.0 ? lensParam.vignParams.FocalLengthY - : height * focalLengthmm / sensorHeight / std::max(width, height); - - pParams.lensCorrection.vParams.clear(); + // Load DCP color profiles database if not already loaded + dcpDatabase.load(colorProfileDatabaseDirPath.empty() ? getColorProfileDatabaseFolder() : colorProfileDatabaseDirPath, false); - if (FocX == 0.0 || FocY == 0.0) + // Get DCP profile + if (!dcpDatabase.retrieveDcpForCamera(make, model, dcpProf)) + { + if (errorOnMissingColorProfile) { - ALICEVISION_LOG_WARNING("Vignetting correction is requested but cannot be applied due to missing info."); + ALICEVISION_LOG_ERROR("The specified DCP database does not contain an appropriate profil for DSLR " << make << " " << model); + return EXIT_FAILURE; } else { - pParams.lensCorrection.vParams.push_back(FocX); - pParams.lensCorrection.vParams.push_back(FocY); - pParams.lensCorrection.vParams.push_back(lensParam.vignParams.ImageXCenter); - pParams.lensCorrection.vParams.push_back(lensParam.vignParams.ImageYCenter); - pParams.lensCorrection.vParams.push_back(lensParam.vignParams.VignetteModelParam1); - pParams.lensCorrection.vParams.push_back(lensParam.vignParams.VignetteModelParam2); - pParams.lensCorrection.vParams.push_back(lensParam.vignParams.VignetteModelParam3); + ALICEVISION_LOG_WARNING("Can't find color profile for input image " << inputFilePath); } } - if (pParams.lensCorrection.chromaticAberration && lensParam.hasChromaticParams() && !lensParam.ChromaticGreenParams.isEmpty) + // Add color profile info in metadata + view.getImage().addDCPMetadata(dcpProf); + } + + if (isRAW && pParams.lensCorrection.enabled && + (pParams.lensCorrection.geometry || pParams.lensCorrection.vignetting || pParams.lensCorrection.chromaticAberration)) + { + // try to find an appropriate Lens Correction Profile + LCPinfo* lcpData = nullptr; + if (lcpStore.size() == 1) + { + lcpData = lcpStore.retrieveLCP(); + } + else if (!lcpStore.empty()) { - if (lensParam.ChromaticGreenParams.FocalLengthX == 0.0) + // Find an LCP file that matches the camera model and the lens model. + const std::string& lensModel = view.getImage().getMetadataLensModel(); + const int lensID = view.getImage().getMetadataLensID(); + + if (!make.empty() && !lensModel.empty()) { - lensParam.ChromaticGreenParams.FocalLengthX = width * focalLengthmm / sensorWidth / std::max(width, height); + #pragma omp critical(lcp) + lcpData = lcpStore.findLCP(make, model, lensModel, lensID, 1); } - if (lensParam.ChromaticGreenParams.FocalLengthY == 0.0) + } + + if ((lcpData != nullptr) && !(lcpData->isEmpty())) + { + double focalLengthmm = view.getImage().getMetadataFocalLength(); + const float apertureValue = 2.f * std::log(view.getImage().getMetadataFNumber()) / std::log(2.0); + const float focusDistance = 0.f; + + LensParam lensParam; + lcpData->getDistortionParams(focalLengthmm, focusDistance, lensParam); + lcpData->getVignettingParams(focalLengthmm, apertureValue, lensParam); + lcpData->getChromaticParams(focalLengthmm, focusDistance, lensParam); + + // Get sensor size by combining information from sensor database and view's metadata + double sensorWidth = -1.0; + double sensorHeight = -1.0; + camera::EInitMode intrinsicInitMode = camera::EInitMode::UNKNOWN; + view.getImage().getSensorSize(sensorDatabase, sensorWidth, sensorHeight, focalLengthmm, intrinsicInitMode, true); + + if (lensParam.hasVignetteParams() && !lensParam.vignParams.isEmpty && pParams.lensCorrection.vignetting) { - lensParam.ChromaticGreenParams.FocalLengthY = height * focalLengthmm / sensorHeight / std::max(width, height); + float FocX = lensParam.vignParams.FocalLengthX != 0.0 ? lensParam.vignParams.FocalLengthX + : width * focalLengthmm / sensorWidth / std::max(width, height); + float FocY = lensParam.vignParams.FocalLengthY != 0.0 ? lensParam.vignParams.FocalLengthY + : height * focalLengthmm / sensorHeight / std::max(width, height); + + pParams.lensCorrection.vParams.clear(); + + if (FocX == 0.0 || FocY == 0.0) + { + ALICEVISION_LOG_WARNING("Vignetting correction is requested but cannot be applied due to missing info."); + } + else + { + pParams.lensCorrection.vParams.push_back(FocX); + pParams.lensCorrection.vParams.push_back(FocY); + pParams.lensCorrection.vParams.push_back(lensParam.vignParams.ImageXCenter); + pParams.lensCorrection.vParams.push_back(lensParam.vignParams.ImageYCenter); + pParams.lensCorrection.vParams.push_back(lensParam.vignParams.VignetteModelParam1); + pParams.lensCorrection.vParams.push_back(lensParam.vignParams.VignetteModelParam2); + pParams.lensCorrection.vParams.push_back(lensParam.vignParams.VignetteModelParam3); + } } - if (lensParam.ChromaticGreenParams.FocalLengthX == 0.0 || lensParam.ChromaticGreenParams.FocalLengthY == 0.0) + if (pParams.lensCorrection.chromaticAberration && lensParam.hasChromaticParams() && !lensParam.ChromaticGreenParams.isEmpty) { - pParams.lensCorrection.caGModel.reset(); - pParams.lensCorrection.caBGModel.reset(); - pParams.lensCorrection.caRGModel.reset(); - - ALICEVISION_LOG_WARNING("Chromatic Aberration correction is requested but cannot be applied due to missing info."); + if (lensParam.ChromaticGreenParams.FocalLengthX == 0.0) + { + lensParam.ChromaticGreenParams.FocalLengthX = width * focalLengthmm / sensorWidth / std::max(width, height); + } + if (lensParam.ChromaticGreenParams.FocalLengthY == 0.0) + { + lensParam.ChromaticGreenParams.FocalLengthY = height * focalLengthmm / sensorHeight / std::max(width, height); + } + + if (lensParam.ChromaticGreenParams.FocalLengthX == 0.0 || lensParam.ChromaticGreenParams.FocalLengthY == 0.0) + { + pParams.lensCorrection.caGModel.reset(); + pParams.lensCorrection.caBGModel.reset(); + pParams.lensCorrection.caRGModel.reset(); + + ALICEVISION_LOG_WARNING("Chromatic Aberration correction is requested but cannot be applied due to missing info."); + } + else + { + pParams.lensCorrection.caGModel = lensParam.ChromaticGreenParams; + pParams.lensCorrection.caBGModel = lensParam.ChromaticBlueGreenParams; + pParams.lensCorrection.caRGModel = lensParam.ChromaticRedGreenParams; + } } - else + + if (pParams.lensCorrection.geometry) { - pParams.lensCorrection.caGModel = lensParam.ChromaticGreenParams; - pParams.lensCorrection.caBGModel = lensParam.ChromaticBlueGreenParams; - pParams.lensCorrection.caRGModel = lensParam.ChromaticRedGreenParams; + // build intrinsic + const camera::EINTRINSIC defaultCameraModel = camera::EINTRINSIC::PINHOLE_CAMERA; + const camera::EDISTORTION defaultDistortionModel = camera::EDISTORTION::DISTORTION_RADIALK3; + const double defaultFocalLength = -1.0; + const double defaultFieldOfView = -1.0; + const double defaultFocalRatio = 1.0; + const double defaultOffsetX = 0.0; + const double defaultOffsetY = 0.0; + intrinsicBase = sfmDataIO::getViewIntrinsic(view, + intrinsicInitMode, + focalLengthmm, + sensorWidth, + defaultFocalLength, + defaultFieldOfView, + defaultFocalRatio, + defaultOffsetX, + defaultOffsetY, + &lensParam, + defaultCameraModel, + defaultDistortionModel); + + pParams.lensCorrection.geometryModel = lensParam.perspParams; } } - - if (pParams.lensCorrection.geometry) + else { - // build intrinsic - const camera::EINTRINSIC defaultCameraModel = camera::EINTRINSIC::PINHOLE_CAMERA; - const camera::EDISTORTION defaultDistortionModel = camera::EDISTORTION::DISTORTION_RADIALK3; - const double defaultFocalLength = -1.0; - const double defaultFieldOfView = -1.0; - const double defaultFocalRatio = 1.0; - const double defaultOffsetX = 0.0; - const double defaultOffsetY = 0.0; - intrinsicBase = sfmDataIO::getViewIntrinsic(view, - intrinsicInitMode, - focalLengthmm, - sensorWidth, - defaultFocalLength, - defaultFieldOfView, - defaultFocalRatio, - defaultOffsetX, - defaultOffsetY, - &lensParam, - defaultCameraModel, - defaultDistortionModel); - - pParams.lensCorrection.geometryModel = lensParam.perspParams; + ALICEVISION_LOG_WARNING("No LCP file found for image " << inputFilePath); + ALICEVISION_LOG_WARNING("Requested lens correction(s) won't be applied"); } } - else + + std::map md = view.getImage().getMetadata(); + + pParams.par.value = 1.0; + if (pParams.par.enabled) { - ALICEVISION_LOG_WARNING("No LCP file found for image " << inputFilePath); - ALICEVISION_LOG_WARNING("Requested lens correction(s) won't be applied"); + double pixelAspectRatio = 1.0; + view.getImage().getDoubleMetadata({"PixelAspectRatio"}, pixelAspectRatio); + pParams.par.value = pixelAspectRatio; + md["PixelAspectRatio"] = "1.0"; } - } - std::map md = view.getImage().getMetadata(); + // set readOptions + image::ImageReadOptions readOptions; - pParams.par.value = 1.0; - if (pParams.par.enabled) - { - double pixelAspectRatio = 1.0; - view.getImage().getDoubleMetadata({"PixelAspectRatio"}, pixelAspectRatio); - pParams.par.value = pixelAspectRatio; - md["PixelAspectRatio"] = "1.0"; - } + if (isRAW) + { + readOptions.colorProfileFileName = dcpProf.info.filename; + if (dcpProf.info.filename.empty() && ((rawColorInterpretation == image::ERawColorInterpretation::DcpLinearProcessing) || + (rawColorInterpretation == image::ERawColorInterpretation::DcpMetadata))) + { + // Fallback case of missing profile but no error requested + readOptions.rawColorInterpretation = image::ERawColorInterpretation::LibRawWhiteBalancing; + } + else + { + readOptions.rawColorInterpretation = rawColorInterpretation; + } - // set readOptions - image::ImageReadOptions readOptions; + if (pParams.applyDcpMetadata && md["AliceVision::ColorSpace"] != "no_conversion") + { + ALICEVISION_LOG_WARNING("A dcp profile will be applied on an image containing non raw data!"); + } - if (isRAW) - { - readOptions.colorProfileFileName = dcpProf.info.filename; - if (dcpProf.info.filename.empty() && ((rawColorInterpretation == image::ERawColorInterpretation::DcpLinearProcessing) || - (rawColorInterpretation == image::ERawColorInterpretation::DcpMetadata))) - { - // Fallback case of missing profile but no error requested - readOptions.rawColorInterpretation = image::ERawColorInterpretation::LibRawWhiteBalancing; + readOptions.useDCPColorMatrixOnly = useDCPColorMatrixOnly; + readOptions.doWBAfterDemosaicing = doWBAfterDemosaicing; + readOptions.demosaicingAlgo = demosaicingAlgo; + readOptions.highlightMode = highlightMode; + readOptions.rawExposureAdjustment = std::pow(2.f, pParams.rawExposureAdjust); + readOptions.rawAutoBright = pParams.rawAutoBright; + readOptions.correlatedColorTemperature = correlatedColorTemperature; + pParams.correlatedColorTemperature = correlatedColorTemperature; + pParams.enableColorTempProcessing = readOptions.rawColorInterpretation == image::ERawColorInterpretation::DcpLinearProcessing; + + pParams.useDCPColorMatrixOnly = useDCPColorMatrixOnly; + if (pParams.applyDcpMetadata) + { + workingColorSpace = image::EImageColorSpace::ACES2065_1; + } } else { - readOptions.rawColorInterpretation = rawColorInterpretation; + readOptions.inputColorSpace = inputColorSpace; } - if (pParams.applyDcpMetadata && md["AliceVision::ColorSpace"] != "no_conversion") - { - ALICEVISION_LOG_WARNING("A dcp profile will be applied on an image containing non raw data!"); - } + readOptions.workingColorSpace = pParams.applyDcpMetadata ? image::EImageColorSpace::NO_CONVERSION : workingColorSpace; - readOptions.useDCPColorMatrixOnly = useDCPColorMatrixOnly; - readOptions.doWBAfterDemosaicing = doWBAfterDemosaicing; - readOptions.demosaicingAlgo = demosaicingAlgo; - readOptions.highlightMode = highlightMode; - readOptions.rawExposureAdjustment = std::pow(2.f, pParams.rawExposureAdjust); - readOptions.rawAutoBright = pParams.rawAutoBright; - readOptions.correlatedColorTemperature = correlatedColorTemperature; - pParams.correlatedColorTemperature = correlatedColorTemperature; - pParams.enableColorTempProcessing = readOptions.rawColorInterpretation == image::ERawColorInterpretation::DcpLinearProcessing; - - pParams.useDCPColorMatrixOnly = useDCPColorMatrixOnly; - if (pParams.applyDcpMetadata) - { - workingColorSpace = image::EImageColorSpace::ACES2065_1; - } - } - else - { - readOptions.inputColorSpace = inputColorSpace; - } + // Read original image + image::Image image; + image::readImage(inputFilePath, image, readOptions); - readOptions.workingColorSpace = pParams.applyDcpMetadata ? image::EImageColorSpace::NO_CONVERSION : workingColorSpace; + // Image processing + processImage(image, pParams, md, intrinsicBase); - // Read original image - image::Image image; - image::readImage(inputFilePath, image, readOptions); + image::ImageWriteOptions writeOptions; - // Image processing - processImage(image, pParams, md, intrinsicBase); + writeOptions.fromColorSpace(workingColorSpace); + writeOptions.toColorSpace(outputColorSpace); + writeOptions.exrCompressionMethod(exrCompressionMethod); + writeOptions.exrCompressionLevel(exrCompressionLevel); - image::ImageWriteOptions writeOptions; - - writeOptions.fromColorSpace(workingColorSpace); - writeOptions.toColorSpace(outputColorSpace); - writeOptions.exrCompressionMethod(exrCompressionMethod); - writeOptions.exrCompressionLevel(exrCompressionLevel); + if (boost::to_lower_copy(fs::path(outputPath).extension().string()) == ".exr") + { + // Select storage data type + writeOptions.storageDataType(storageDataType); + } - if (boost::to_lower_copy(fs::path(outputPath).extension().string()) == ".exr") - { - // Select storage data type - writeOptions.storageDataType(storageDataType); + // Save the image + saveImage(image, inputFilePath, outputFilePath, md, metadataFolders, outputFormat, outputChannel, writeOptions); } - - // Save the image - saveImage(image, inputFilePath, outputFilePath, md, metadataFolders, outputFormat, writeOptions); + idx++; } }