diff --git a/src/aliceVision/lensCorrectionProfile/lcp.cpp b/src/aliceVision/lensCorrectionProfile/lcp.cpp index bd218968ff..236b7e6f73 100644 --- a/src/aliceVision/lensCorrectionProfile/lcp.cpp +++ b/src/aliceVision/lensCorrectionProfile/lcp.cpp @@ -126,14 +126,47 @@ void XMLCALL LCPinfo::XmlStartHandler(void* pLCPinfo, const char* el, const char { LCPdata->_currReadingState = LCPReadingState::FillChromaticGreenModel; LCPdata->currLensParam.setChromaticParamsStatus(true); + if(attr[0]) + { + for(int i = 0; attr[i]; i += 2) + { + std::string key(attr[i]); + std::string value(attr[i + 1]); + + LCPdata->_currText = value; + LCPdata->setRectilinearModel(LCPdata->currLensParam.ChromaticGreenParams, key); + } + } } else if (element == "stCamera:ChromaticRedGreenModel") { LCPdata->_currReadingState = LCPReadingState::FillChromaticRedGreenModel; + if(attr[0]) + { + for(int i = 0; attr[i]; i += 2) + { + std::string key(attr[i]); + std::string value(attr[i + 1]); + + LCPdata->_currText = value; + LCPdata->setRectilinearModel(LCPdata->currLensParam.ChromaticRedGreenParams, key); + } + } } else if (element == "stCamera:ChromaticBlueGreenModel") { LCPdata->_currReadingState = LCPReadingState::FillChromaticBlueGreenModel; + if(attr[0]) + { + for(int i = 0; attr[i]; i += 2) + { + std::string key(attr[i]); + std::string value(attr[i + 1]); + + LCPdata->_currText = value; + LCPdata->setRectilinearModel(LCPdata->currLensParam.ChromaticBlueGreenParams, key); + } + } } else if (attr[0]) { @@ -530,6 +563,7 @@ bool LCPinfo::search(settingsInfo& settings, LCPCorrectionMode mode, int& iLow, std::vector v_isDistortionValid; std::vector v_isVignetteValid; + std::vector v_isChromaticValid; // Search the best focal lengths with respect to the target one in settings // At the end of the loop, iLow is the index in v_lensParam of the lens parameter set with the closest focal length, lower or equal to the one in settings. @@ -544,9 +578,13 @@ bool LCPinfo::search(settingsInfo& settings, LCPCorrectionMode mode, int& iLow, v_isDistortionValid.push_back(currParam.isFisheye() ? !currParam.fisheyeParams.isEmpty : !currParam.perspParams.isEmpty); v_isVignetteValid.push_back(currParam.hasVignetteParams() && !currParam.vignParams.isEmpty); + v_isChromaticValid.push_back(currParam.hasChromaticParams() && !currParam.ChromaticGreenParams.isEmpty && + !currParam.ChromaticBlueGreenParams.isEmpty && + !currParam.ChromaticRedGreenParams.isEmpty); bool isCurrentValid = (mode == LCPCorrectionMode::DISTORTION && v_isDistortionValid.back()) || - (mode == LCPCorrectionMode::VIGNETTE && v_isVignetteValid.back()); + (mode == LCPCorrectionMode::VIGNETTE && v_isVignetteValid.back()) || + (mode == LCPCorrectionMode::CA && v_isChromaticValid.back()); if (isCurrentValid) { @@ -610,7 +648,8 @@ bool LCPinfo::search(settingsInfo& settings, LCPCorrectionMode mode, int& iLow, for (int i = 0; i < v_lensParams.size(); ++i) { bool isCurrentValid = (mode == LCPCorrectionMode::DISTORTION && v_isDistortionValid[i]) || - (mode == LCPCorrectionMode::VIGNETTE && v_isVignetteValid[i]); + (mode == LCPCorrectionMode::VIGNETTE && v_isVignetteValid[i]) || + (mode == LCPCorrectionMode::CA && v_isChromaticValid[i]); if (isCurrentValid && v_lensParams[i].camData.FocalLength == v_lensParams[iLow].camData.FocalLength) { @@ -767,7 +806,7 @@ bool LCPinfo::search(settingsInfo& settings, LCPCorrectionMode mode, int& iLow, return true; } } - else if ((mode == LCPCorrectionMode::DISTORTION) && (settings.FocusDistance == 0.f)) + else if ((mode != LCPCorrectionMode::VIGNETTE) && (settings.FocusDistance == 0.f)) { if (v_lensParams[iHigh].camData.FocalLength > v_lensParams[iLow].camData.FocalLength) { @@ -835,6 +874,10 @@ void LCPinfo::combine(size_t iLow, size_t iHigh, float weightLow, LCPCorrectionM pOut.perspParams.RadialDistortParam1 = interpolate(weightLow, p1.perspParams.RadialDistortParam1, p2.perspParams.RadialDistortParam1); pOut.perspParams.RadialDistortParam2 = interpolate(weightLow, p1.perspParams.RadialDistortParam2, p2.perspParams.RadialDistortParam2); pOut.perspParams.RadialDistortParam3 = interpolate(weightLow, p1.perspParams.RadialDistortParam3, p2.perspParams.RadialDistortParam3); + pOut.perspParams.TangentialDistortParam1 = interpolate( + weightLow, p1.perspParams.TangentialDistortParam1, p2.perspParams.TangentialDistortParam1); + pOut.perspParams.TangentialDistortParam2 = interpolate( + weightLow, p1.perspParams.TangentialDistortParam2, p2.perspParams.TangentialDistortParam2); pOut.perspParams.isEmpty = false; } else @@ -846,30 +889,74 @@ void LCPinfo::combine(size_t iLow, size_t iHigh, float weightLow, LCPCorrectionM } case LCPCorrectionMode::CA: { - pOut.ChromaticGreenParams.FocalLengthX = interpolate(weightLow, p1.ChromaticGreenParams.FocalLengthX, p2.ChromaticGreenParams.FocalLengthX); - pOut.ChromaticGreenParams.FocalLengthY = interpolate(weightLow, p1.ChromaticGreenParams.FocalLengthY, p2.ChromaticGreenParams.FocalLengthY); - pOut.ChromaticGreenParams.ImageXCenter = interpolate(weightLow, p1.ChromaticGreenParams.ImageXCenter, p2.ChromaticGreenParams.ImageXCenter); - pOut.ChromaticGreenParams.ImageYCenter = interpolate(weightLow, p1.ChromaticGreenParams.ImageYCenter, p2.ChromaticGreenParams.ImageYCenter); - pOut.ChromaticGreenParams.RadialDistortParam1 = interpolate(weightLow, p1.ChromaticGreenParams.RadialDistortParam1, p2.ChromaticGreenParams.RadialDistortParam1); - pOut.ChromaticGreenParams.RadialDistortParam2 = interpolate(weightLow, p1.ChromaticGreenParams.RadialDistortParam2, p2.ChromaticGreenParams.RadialDistortParam2); - pOut.ChromaticGreenParams.RadialDistortParam3 = interpolate(weightLow, p1.ChromaticGreenParams.RadialDistortParam3, p2.ChromaticGreenParams.RadialDistortParam3); - pOut.ChromaticGreenParams.isEmpty = false; - pOut.ChromaticBlueGreenParams.FocalLengthX = interpolate(weightLow, p1.ChromaticBlueGreenParams.FocalLengthX, p2.ChromaticBlueGreenParams.FocalLengthX); - pOut.ChromaticBlueGreenParams.FocalLengthY = interpolate(weightLow, p1.ChromaticBlueGreenParams.FocalLengthY, p2.ChromaticBlueGreenParams.FocalLengthY); - pOut.ChromaticBlueGreenParams.ImageXCenter = interpolate(weightLow, p1.ChromaticBlueGreenParams.ImageXCenter, p2.ChromaticBlueGreenParams.ImageXCenter); - pOut.ChromaticBlueGreenParams.ImageYCenter = interpolate(weightLow, p1.ChromaticBlueGreenParams.ImageYCenter, p2.ChromaticBlueGreenParams.ImageYCenter); - pOut.ChromaticBlueGreenParams.RadialDistortParam1 = interpolate(weightLow, p1.ChromaticBlueGreenParams.RadialDistortParam1, p2.ChromaticBlueGreenParams.RadialDistortParam1); - pOut.ChromaticBlueGreenParams.RadialDistortParam2 = interpolate(weightLow, p1.ChromaticBlueGreenParams.RadialDistortParam2, p2.ChromaticBlueGreenParams.RadialDistortParam2); - pOut.ChromaticBlueGreenParams.RadialDistortParam3 = interpolate(weightLow, p1.ChromaticBlueGreenParams.RadialDistortParam3, p2.ChromaticBlueGreenParams.RadialDistortParam3); - pOut.ChromaticBlueGreenParams.isEmpty = false; - pOut.ChromaticRedGreenParams.FocalLengthX = interpolate(weightLow, p1.ChromaticRedGreenParams.FocalLengthX, p2.ChromaticRedGreenParams.FocalLengthX); - pOut.ChromaticRedGreenParams.FocalLengthY = interpolate(weightLow, p1.ChromaticRedGreenParams.FocalLengthY, p2.ChromaticRedGreenParams.FocalLengthY); - pOut.ChromaticRedGreenParams.ImageXCenter = interpolate(weightLow, p1.ChromaticRedGreenParams.ImageXCenter, p2.ChromaticRedGreenParams.ImageXCenter); - pOut.ChromaticRedGreenParams.ImageYCenter = interpolate(weightLow, p1.ChromaticRedGreenParams.ImageYCenter, p2.ChromaticRedGreenParams.ImageYCenter); - pOut.ChromaticRedGreenParams.RadialDistortParam1 = interpolate(weightLow, p1.ChromaticRedGreenParams.RadialDistortParam1, p2.ChromaticRedGreenParams.RadialDistortParam1); - pOut.ChromaticRedGreenParams.RadialDistortParam2 = interpolate(weightLow, p1.ChromaticRedGreenParams.RadialDistortParam2, p2.ChromaticRedGreenParams.RadialDistortParam2); - pOut.ChromaticRedGreenParams.RadialDistortParam3 = interpolate(weightLow, p1.ChromaticRedGreenParams.RadialDistortParam3, p2.ChromaticRedGreenParams.RadialDistortParam3); - pOut.ChromaticRedGreenParams.isEmpty = false; + if (p1.hasChromaticParams() && !p1.ChromaticGreenParams.isEmpty && p2.hasChromaticParams() && + !p2.ChromaticGreenParams.isEmpty) + { + pOut.setChromaticParamsStatus(true); + pOut.ChromaticGreenParams.FocalLengthX = interpolate(weightLow, p1.ChromaticGreenParams.FocalLengthX, + p2.ChromaticGreenParams.FocalLengthX); + pOut.ChromaticGreenParams.FocalLengthY = interpolate(weightLow, p1.ChromaticGreenParams.FocalLengthY, + p2.ChromaticGreenParams.FocalLengthY); + pOut.ChromaticGreenParams.ImageXCenter = interpolate(weightLow, p1.ChromaticGreenParams.ImageXCenter, + p2.ChromaticGreenParams.ImageXCenter); + pOut.ChromaticGreenParams.ImageYCenter = interpolate(weightLow, p1.ChromaticGreenParams.ImageYCenter, + p2.ChromaticGreenParams.ImageYCenter); + pOut.ChromaticGreenParams.ScaleFactor = interpolate(weightLow, p1.ChromaticGreenParams.ScaleFactor, p2.ChromaticGreenParams.ScaleFactor); + pOut.ChromaticGreenParams.RadialDistortParam1 = interpolate( + weightLow, p1.ChromaticGreenParams.RadialDistortParam1, p2.ChromaticGreenParams.RadialDistortParam1); + pOut.ChromaticGreenParams.RadialDistortParam2 = interpolate( + weightLow, p1.ChromaticGreenParams.RadialDistortParam2, p2.ChromaticGreenParams.RadialDistortParam2); + pOut.ChromaticGreenParams.RadialDistortParam3 = interpolate( + weightLow, p1.ChromaticGreenParams.RadialDistortParam3, p2.ChromaticGreenParams.RadialDistortParam3); + pOut.ChromaticGreenParams.isEmpty = false; + pOut.ChromaticBlueGreenParams.FocalLengthX = interpolate( + weightLow, p1.ChromaticBlueGreenParams.FocalLengthX, p2.ChromaticBlueGreenParams.FocalLengthX); + pOut.ChromaticBlueGreenParams.FocalLengthY = interpolate( + weightLow, p1.ChromaticBlueGreenParams.FocalLengthY, p2.ChromaticBlueGreenParams.FocalLengthY); + pOut.ChromaticBlueGreenParams.ImageXCenter = interpolate( + weightLow, p1.ChromaticBlueGreenParams.ImageXCenter, p2.ChromaticBlueGreenParams.ImageXCenter); + pOut.ChromaticBlueGreenParams.ImageYCenter = interpolate( + weightLow, p1.ChromaticBlueGreenParams.ImageYCenter, p2.ChromaticBlueGreenParams.ImageYCenter); + pOut.ChromaticBlueGreenParams.ScaleFactor = interpolate( + weightLow, p1.ChromaticBlueGreenParams.ScaleFactor, p2.ChromaticBlueGreenParams.ScaleFactor); + pOut.ChromaticBlueGreenParams.RadialDistortParam1 = + interpolate(weightLow, p1.ChromaticBlueGreenParams.RadialDistortParam1, + p2.ChromaticBlueGreenParams.RadialDistortParam1); + pOut.ChromaticBlueGreenParams.RadialDistortParam2 = + interpolate(weightLow, p1.ChromaticBlueGreenParams.RadialDistortParam2, + p2.ChromaticBlueGreenParams.RadialDistortParam2); + pOut.ChromaticBlueGreenParams.RadialDistortParam3 = + interpolate(weightLow, p1.ChromaticBlueGreenParams.RadialDistortParam3, + p2.ChromaticBlueGreenParams.RadialDistortParam3); + pOut.ChromaticBlueGreenParams.isEmpty = false; + pOut.ChromaticRedGreenParams.FocalLengthX = interpolate( + weightLow, p1.ChromaticRedGreenParams.FocalLengthX, p2.ChromaticRedGreenParams.FocalLengthX); + pOut.ChromaticRedGreenParams.FocalLengthY = interpolate( + weightLow, p1.ChromaticRedGreenParams.FocalLengthY, p2.ChromaticRedGreenParams.FocalLengthY); + pOut.ChromaticRedGreenParams.ImageXCenter = interpolate( + weightLow, p1.ChromaticRedGreenParams.ImageXCenter, p2.ChromaticRedGreenParams.ImageXCenter); + pOut.ChromaticRedGreenParams.ImageYCenter = interpolate( + weightLow, p1.ChromaticRedGreenParams.ImageYCenter, p2.ChromaticRedGreenParams.ImageYCenter); + pOut.ChromaticRedGreenParams.ScaleFactor = interpolate( + weightLow, p1.ChromaticRedGreenParams.ScaleFactor, p2.ChromaticRedGreenParams.ScaleFactor); + pOut.ChromaticRedGreenParams.RadialDistortParam1 = + interpolate(weightLow, p1.ChromaticRedGreenParams.RadialDistortParam1, + p2.ChromaticRedGreenParams.RadialDistortParam1); + pOut.ChromaticRedGreenParams.RadialDistortParam2 = + interpolate(weightLow, p1.ChromaticRedGreenParams.RadialDistortParam2, + p2.ChromaticRedGreenParams.RadialDistortParam2); + pOut.ChromaticRedGreenParams.RadialDistortParam3 = + interpolate(weightLow, p1.ChromaticRedGreenParams.RadialDistortParam3, + p2.ChromaticRedGreenParams.RadialDistortParam3); + pOut.ChromaticRedGreenParams.isEmpty = false; + } + else + { + pOut.setChromaticParamsStatus(false); + pOut.ChromaticGreenParams.isEmpty = true; + pOut.ChromaticBlueGreenParams.isEmpty = true; + pOut.ChromaticRedGreenParams.isEmpty = true; + } } } } @@ -904,6 +991,21 @@ void LCPinfo::getVignettingParams(const float& focalLength, const float& apertur } } +void LCPinfo::getChromaticParams(const float& focalLength, const float& focusDistance, LensParam& lparam) +{ + settingsInfo userSettings; + userSettings.ApertureValue = 0.f; + userSettings.FocalLength = focalLength; + userSettings.FocusDistance = focusDistance; + + int iLow, iHigh; + float weightLow; + if(search(userSettings, LCPCorrectionMode::CA, iLow, iHigh, weightLow)) + { + combine(iLow, iHigh, weightLow, LCPCorrectionMode::CA, lparam); + } +} + void LCPinfo::setCommonSettings(const std::string& name) { if (name == "stCamera:Author") @@ -968,9 +1070,9 @@ void LCPinfo::setRectilinearModel(RectilinearModel& model, const std::string& na else if (name == "stCamera:RadialDistortParam3") model.RadialDistortParam3 = std::stof(_currText.c_str()); else if (name == "stCamera:TangentiallDistortParam1") - model.RadialDistortParam1 = std::stof(_currText.c_str()); + model.TangentialDistortParam1 = std::stof(_currText.c_str()); else if (name == "stCamera:TangentiallDistortParam2") - model.RadialDistortParam2 = std::stof(_currText.c_str()); + model.TangentialDistortParam2 = std::stof(_currText.c_str()); else if (name == "stCamera:ScaleFactor") model.ScaleFactor = std::stof(_currText.c_str()); } diff --git a/src/aliceVision/lensCorrectionProfile/lcp.hpp b/src/aliceVision/lensCorrectionProfile/lcp.hpp index 25859c32a3..612906e0cb 100644 --- a/src/aliceVision/lensCorrectionProfile/lcp.hpp +++ b/src/aliceVision/lensCorrectionProfile/lcp.hpp @@ -92,33 +92,70 @@ struct RectilinearModel { *this = RectilinearModel(); } -}; -/** - * @brief PerspectiveModel contains parameters of a perspective model of distortion - * Detailed information on this model can be found in the Adobe technical report - * "Adobe Camera Model" part of the documentation of the Adobe free tool Lens Profile Creator. - */ -struct PerspectiveModel -{ - int Version = -1; - float FocalLengthX = 0.f; - float FocalLengthY = 0.f; - float ImageXCenter = 0.5f; - float ImageYCenter = 0.5f; - float ResidualMeanError = 0.f; - float ResidualStandardDeviation = 0.f; - float RadialDistortParam1 = 0.f; - float RadialDistortParam2 = 0.f; - float RadialDistortParam3 = 0.f; - bool isEmpty = true; + void distort(const float x, const float y, float& x_d, float& y_d) + { + const float rr = x * x + y * y; + const float p1 = 1.f + rr * (RadialDistortParam1 + rr * (RadialDistortParam2 + rr * RadialDistortParam3)); + const float p2 = TangentialDistortParam1 * y + TangentialDistortParam2 * x; + x_d = ScaleFactor * (p1 * x + 2 * p2 * x + TangentialDistortParam2 * rr); + y_d = ScaleFactor * (p1 * y + 2 * p2 * y + TangentialDistortParam1 * rr); + } - void reset() + bool init3(const std::vector& params) { - *this = PerspectiveModel(); + if(params.size() < 7) + { + reset(); + return false; + } + FocalLengthX = params[0]; + FocalLengthY = params[1]; + ImageXCenter = params[2]; + ImageYCenter = params[3]; + RadialDistortParam1 = params[4]; + RadialDistortParam2 = params[5]; + RadialDistortParam3 = params[6]; + ScaleFactor = (params.size() >= 8) ? params[7] : 1.0; + isEmpty = false; + return true; + } + + bool init5(const std::vector& params) + { + if(params.size() < 9) + { + reset(); + return false; + } + init3(params); + TangentialDistortParam1 = params[7]; + TangentialDistortParam2 = params[8]; + ScaleFactor = (params.size() >= 10) ? params[9] : 1.0; + isEmpty = false; + return true; } }; +inline std::ostream& operator<<(std::ostream& os, const RectilinearModel& model) +{ + if (model.isEmpty) + { + os << "Empty"; + } + else + { + os << "Focal: (" << model.FocalLengthX << ", " << model.FocalLengthY << ")" << std::endl; + os << "Center: (" << model.ImageXCenter << ", " << model.ImageYCenter << ")" << std::endl; + os << "Radial: (" << model.RadialDistortParam1 << ", " << model.RadialDistortParam2 << ", " + << model.RadialDistortParam3 << ")" << std::endl; + os << "Tangential: (" << model.TangentialDistortParam1 << ", " << model.TangentialDistortParam2 << ")" + << std::endl; + os << "scale: " << model.ScaleFactor; + } + return os; +} + /** * @brief VignetteModel contains parameters of a vignetting model of distortion * Detailed information on this model can be found in the Adobe technical report @@ -181,6 +218,12 @@ class LensParam */ void clear(); + /** + * @brief Indicate that no geometric model is available + * @return true if no geometric model is available + */ + bool isEmpty() const { return perspParams.isEmpty && fisheyeParams.isEmpty; } + /** * @brief Indicate that paramaters apply for a fisheye lens * @return true if the fisheye model is the valid one @@ -299,6 +342,14 @@ class LCPinfo */ void getVignettingParams(const float& focalLength, const float& aperture, LensParam& lparam); + /** + * @brief Get defringing parameters for a given couple focal length, focus distance. Focus distance can set to zero. + * @param[in] focalLength Focal length in mm + * @param[in] focusDistance Focus distance in meters + * @param[out] lparam Lens parameters to be populated with the R, G and B defringing models + */ + void getChromaticParams(const float& focalLength, const float& focusDistance, LensParam& lparam); + /** * @brief Indicate that no lens paramater set is available * @return true if no lens paramater set is available diff --git a/src/aliceVision/sfmData/CMakeLists.txt b/src/aliceVision/sfmData/CMakeLists.txt index 904a537463..e65c5a35ef 100644 --- a/src/aliceVision/sfmData/CMakeLists.txt +++ b/src/aliceVision/sfmData/CMakeLists.txt @@ -26,6 +26,7 @@ alicevision_add_library(aliceVision_sfmData aliceVision_feature aliceVision_geometry aliceVision_camera + aliceVision_sensorDB aliceVision_stl Boost::filesystem PRIVATE_LINKS diff --git a/src/aliceVision/sfmData/View.cpp b/src/aliceVision/sfmData/View.cpp index 5347d4c509..f1586569a0 100644 --- a/src/aliceVision/sfmData/View.cpp +++ b/src/aliceVision/sfmData/View.cpp @@ -13,9 +13,10 @@ #include #include - #include #include +#include +#include namespace aliceVision { namespace sfmData { @@ -194,6 +195,187 @@ Vec3 View::getGpsPositionWGS84FromMetadata() const return {lat, lon, alt}; } +int View::getSensorSize(const std::vector& sensorDatabase, double& sensorWidth, double& sensorHeight, + double& focalLengthmm, camera::EInitMode& intrinsicInitMode, bool verbose) +{ + int errCode = 0; + + enum class ESensorWidthSource + { + FROM_DB, + FROM_METADATA_ESTIMATION, + UNKNOWN + } sensorWidthSource = ESensorWidthSource::UNKNOWN; + + const std::string& make = getMetadataMake(); + const std::string& model = getMetadataModel(); + focalLengthmm = getMetadataFocalLength(); + const bool hasCameraMetadata = (!make.empty() || !model.empty()); + + if (hasCameraMetadata) + { + intrinsicInitMode = camera::EInitMode::UNKNOWN; + sensorDB::Datasheet datasheet; + if (sensorDB::getInfo(make, model, sensorDatabase, datasheet)) + { + if (verbose) + { + // sensor is in the database + ALICEVISION_LOG_TRACE("Sensor width found in sensor database: " << std::endl + << "\t- brand: " << make << std::endl + << "\t- model: " << model << std::endl + << "\t- sensor width: " << datasheet._sensorWidth << " mm"); + } + + if ((datasheet._model != model) && (datasheet._model != make + " " + model)) + { + // the camera model in sensor database is slightly different + errCode = 3; + + if (verbose) + { + ALICEVISION_LOG_WARNING("The camera found in the sensor database is slightly different for image " << getImagePath()); + ALICEVISION_LOG_WARNING("\t- image camera brand: " << make << std::endl + << "\t- image camera model: " << model << std::endl + << "\t- sensor database camera brand: " << datasheet._brand << std::endl + << "\t- sensor database camera model: " << datasheet._model << std::endl + << "\t- sensor database camera sensor width: " << datasheet._sensorWidth << " mm"); + ALICEVISION_LOG_WARNING("Please check and correct camera model(s) name in the sensor database." << std::endl); + } + } + + sensorWidth = datasheet._sensorWidth; + sensorWidthSource = ESensorWidthSource::FROM_DB; + + if (focalLengthmm > 0.0) + { + intrinsicInitMode = camera::EInitMode::ESTIMATED; + } + } + } + + // try to find / compute with 'FocalLengthIn35mmFilm' metadata + const bool hasFocalIn35mmMetadata = hasDigitMetadata({"Exif:FocalLengthIn35mmFilm", "FocalLengthIn35mmFilm"}); + if (hasFocalIn35mmMetadata) + { + const double imageRatio = static_cast(getWidth()) / static_cast(getHeight()); + const double diag24x36 = std::sqrt(36.0 * 36.0 + 24.0 * 24.0); + const double focalIn35mm = hasFocalIn35mmMetadata ? getDoubleMetadata({"Exif:FocalLengthIn35mmFilm", "FocalLengthIn35mmFilm"}) : -1.0; + + if (sensorWidth == -1.0) + { + const double invRatio = 1.0 / imageRatio; + + if (focalLengthmm > 0.0) + { + // no sensorWidth but valid focalLength and valid focalLengthIn35mm, so deduce + // sensorWith approximation + const double sensorDiag = (focalLengthmm * diag24x36) / focalIn35mm; // 43.3 is the diagonal of 35mm film + sensorWidth = sensorDiag * std::sqrt(1.0 / (1.0 + invRatio * invRatio)); + sensorWidthSource = ESensorWidthSource::FROM_METADATA_ESTIMATION; + } + else + { + // no sensorWidth and no focalLength but valid focalLengthIn35mm, so consider sensorWith + // as 35mm + sensorWidth = diag24x36 * std::sqrt(1.0 / (1.0 + invRatio * invRatio)); + focalLengthmm = sensorWidth * (focalIn35mm) / 36.0; + sensorWidthSource = ESensorWidthSource::UNKNOWN; + } + errCode = 4; + + if (verbose) + { + std::stringstream ss; + ss << "Intrinsic(s) initialized from 'FocalLengthIn35mmFilm' exif metadata in image " << getImagePath() << "\n"; + ss << "\t- sensor width: " << sensorWidth << "\n"; + ss << "\t- focal length: " << focalLengthmm << "\n"; + ALICEVISION_LOG_DEBUG(ss.str()); + } + + sensorHeight = (imageRatio > 1.0) ? sensorWidth / imageRatio : sensorWidth * imageRatio; + + intrinsicInitMode = camera::EInitMode::ESTIMATED; + } + else if (sensorWidth > 0 && focalLengthmm <= 0) + { + // valid sensorWidth and valid focalLengthIn35mm but no focalLength, so convert + // focalLengthIn35mm to the actual width of the sensor + const double sensorDiag = std::sqrt(std::pow(sensorWidth, 2) + std::pow(sensorWidth / imageRatio, 2)); + focalLengthmm = (sensorDiag * focalIn35mm) / diag24x36; + + errCode = 4; + + if (verbose) + { + std::stringstream ss; + ss << "Intrinsic(s) initialized from 'FocalLengthIn35mmFilm' exif metadata in image " << getImagePath() << "\n"; + ss << "\t- sensor width: " << sensorWidth << "\n"; + ss << "\t- focal length: " << focalLengthmm << "\n"; + ALICEVISION_LOG_DEBUG(ss.str()); + } + + intrinsicInitMode = camera::EInitMode::ESTIMATED; + } + } + + // error handling + if (sensorWidth == -1.0) + { + if (hasCameraMetadata) + { + // sensor is not in the database + errCode = 1; + if (verbose) + { + std::stringstream ss; + ss << "Sensor width doesn't exist in the sensor database for image " << getImagePath() << "\n"; + ss << "\t- camera brand: " << make << "\n"; + ss << "\t- camera model: " << model << "\n"; + ss << "Please add camera model and sensor width in the database."; + ALICEVISION_LOG_WARNING(ss.str()); + } + } + else + { + // no metadata 'Make' and 'Model' can't find sensor width + errCode = 2; + if (verbose) + { + std::stringstream ss; + ss << "No metadata in image " << getImagePath() << "\n"; + ALICEVISION_LOG_DEBUG(ss.str()); + } + } + } + else + { + // we have a valid sensorWidth information, so we store it into the metadata (where it would + // have been nice to have it in the first place) + if (sensorWidthSource == ESensorWidthSource::FROM_DB) + { + addMetadata("AliceVision:SensorWidth", std::to_string(sensorWidth)); + } + else if (sensorWidthSource == ESensorWidthSource::FROM_METADATA_ESTIMATION) + { + addMetadata("AliceVision:SensorWidthEstimation", std::to_string(sensorWidth)); + } + } + + if (sensorWidth < 0) + { + if (verbose) + { + ALICEVISION_LOG_WARNING("Sensor size is unknown"); + ALICEVISION_LOG_WARNING("Use default sensor size (24x36 mm)"); + } + sensorWidth = 36.0; + sensorHeight = 24.0; + } + + return errCode; +} + std::string GPSExifTags::latitude() { return "GPS:Latitude"; diff --git a/src/aliceVision/sfmData/View.hpp b/src/aliceVision/sfmData/View.hpp index 2e2fa155e3..2fd76bf4a8 100644 --- a/src/aliceVision/sfmData/View.hpp +++ b/src/aliceVision/sfmData/View.hpp @@ -14,6 +14,9 @@ #include #include #include +#include +#include +#include namespace aliceVision { namespace sfmData { @@ -296,7 +299,6 @@ class View return _intrinsicId; } - /** * @brief Get the pose id * @return pose id @@ -610,23 +612,81 @@ class View bool valid = true; double value; - valid = valid && getDoubleMetadata({ "AliceVision:VignParamFocX" }, value); + valid = valid && getDoubleMetadata({"AliceVision:VignParamFocX"}, value); v_vignParam.push_back(static_cast(value)); - valid = valid && getDoubleMetadata({ "AliceVision:VignParamFocY" }, value); + valid = valid && getDoubleMetadata({"AliceVision:VignParamFocY"}, value); v_vignParam.push_back(static_cast(value)); - valid = valid && getDoubleMetadata({ "AliceVision:VignParamCenterX" }, value); + valid = valid && getDoubleMetadata({"AliceVision:VignParamCenterX"}, value); v_vignParam.push_back(static_cast(value)); - valid = valid && getDoubleMetadata({ "AliceVision:VignParamCenterY" }, value); + valid = valid && getDoubleMetadata({"AliceVision:VignParamCenterY"}, value); v_vignParam.push_back(static_cast(value)); - valid = valid && getDoubleMetadata({ "AliceVision:VignParam1" }, value); + valid = valid && getDoubleMetadata({"AliceVision:VignParam1"}, value); v_vignParam.push_back(static_cast(value)); - valid = valid && getDoubleMetadata({ "AliceVision:VignParam2" }, value); + valid = valid && getDoubleMetadata({"AliceVision:VignParam2"}, value); v_vignParam.push_back(static_cast(value)); - valid = valid && getDoubleMetadata({ "AliceVision:VignParam3" }, value); + valid = valid && getDoubleMetadata({"AliceVision:VignParam3"}, value); v_vignParam.push_back(static_cast(value)); return valid; } + const bool getChromaticAberrationParams(std::vector& v_caGParam, std::vector& v_caBGParam, std::vector& v_caRGParam) const + { + v_caGParam.clear(); + v_caBGParam.clear(); + v_caRGParam.clear(); + bool valid = true; + double value; + + valid = valid && getDoubleMetadata({"AliceVision:CAGreenFocX"}, value); + v_caGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CAGreenFocY"}, value); + v_caGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CAGreenCenterX"}, value); + v_caGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CAGreenCenterY"}, value); + v_caGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CAGreenParam1"}, value); + v_caGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CAGreenParam2"}, value); + v_caGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CAGreenParam3"}, value); + v_caGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CABlueGreenFocX"}, value); + v_caBGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CABlueGreenFocY"}, value); + v_caBGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CABlueGreenCenterX"}, value); + v_caBGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CABlueGreenCenterY"}, value); + v_caBGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CABlueGreenParam1"}, value); + v_caBGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CABlueGreenParam2"}, value); + v_caBGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CABlueGreenParam3"}, value); + v_caBGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CABlueGreenScaleFactor"}, value); + v_caBGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CARedGreenFocX"}, value); + v_caRGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CARedGreenFocY"}, value); + v_caRGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CARedGreenCenterX"}, value); + v_caRGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CARedGreenCenterY"}, value); + v_caRGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CARedGreenParam1"}, value); + v_caRGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CARedGreenParam2"}, value); + v_caRGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CARedGreenParam3"}, value); + v_caRGParam.push_back(static_cast(value)); + valid = valid && getDoubleMetadata({"AliceVision:CARedGreenScaleFactor"}, value); + v_caRGParam.push_back(static_cast(value)); + + return valid; + } + const bool hasMetadataDateTimeOriginal() const { return hasMetadata( @@ -659,6 +719,10 @@ class View return timecode; } + double getSensorWidth() const { return getDoubleMetadata({"AliceVision:SensorWidth"}); } + + double getSensorHeight() const { return getDoubleMetadata({"AliceVision:SensorHeight"}); } + /** * @brief Get the view metadata structure * @return the view metadata @@ -847,6 +911,66 @@ class View } } + + /** + * @brief Add vignetting model parameters in metadata + * @param[in] The lens data extracted from a LCP file + */ + void addVignettingMetadata(LensParam& lensParam) + { + addMetadata("AliceVision:VignParamFocX", std::to_string(lensParam.vignParams.FocalLengthX)); + addMetadata("AliceVision:VignParamFocY", std::to_string(lensParam.vignParams.FocalLengthY)); + addMetadata("AliceVision:VignParamCenterX", std::to_string(lensParam.vignParams.ImageXCenter)); + addMetadata("AliceVision:VignParamCenterY", std::to_string(lensParam.vignParams.ImageYCenter)); + addMetadata("AliceVision:VignParam1", std::to_string(lensParam.vignParams.VignetteModelParam1)); + addMetadata("AliceVision:VignParam2", std::to_string(lensParam.vignParams.VignetteModelParam2)); + addMetadata("AliceVision:VignParam3", std::to_string(lensParam.vignParams.VignetteModelParam3)); + } + + /** + * @brief Add chromatic model parameters in metadata + * @param[in] The lens data extracted from a LCP file + */ + void addChromaticMetadata(LensParam& lensParam) + { + addMetadata("AliceVision:CAGreenFocX", std::to_string(lensParam.ChromaticGreenParams.FocalLengthX)); + addMetadata("AliceVision:CAGreenFocY", std::to_string(lensParam.ChromaticGreenParams.FocalLengthY)); + addMetadata("AliceVision:CAGreenCenterX", std::to_string(lensParam.ChromaticGreenParams.ImageXCenter)); + addMetadata("AliceVision:CAGreenCenterY", std::to_string(lensParam.ChromaticGreenParams.ImageYCenter)); + addMetadata("AliceVision:CAGreenParam1", std::to_string(lensParam.ChromaticGreenParams.RadialDistortParam1)); + addMetadata("AliceVision:CAGreenParam2", std::to_string(lensParam.ChromaticGreenParams.RadialDistortParam2)); + addMetadata("AliceVision:CAGreenParam3", std::to_string(lensParam.ChromaticGreenParams.RadialDistortParam3)); + addMetadata("AliceVision:CABlueGreenFocX", std::to_string(lensParam.ChromaticBlueGreenParams.FocalLengthX)); + addMetadata("AliceVision:CABlueGreenFocY", std::to_string(lensParam.ChromaticBlueGreenParams.FocalLengthY)); + addMetadata("AliceVision:CABlueGreenCenterX", std::to_string(lensParam.ChromaticBlueGreenParams.ImageXCenter)); + addMetadata("AliceVision:CABlueGreenCenterY", std::to_string(lensParam.ChromaticBlueGreenParams.ImageYCenter)); + addMetadata("AliceVision:CABlueGreenParam1", std::to_string(lensParam.ChromaticBlueGreenParams.RadialDistortParam1)); + addMetadata("AliceVision:CABlueGreenParam2", std::to_string(lensParam.ChromaticBlueGreenParams.RadialDistortParam2)); + addMetadata("AliceVision:CABlueGreenParam3", std::to_string(lensParam.ChromaticBlueGreenParams.RadialDistortParam3)); + addMetadata("AliceVision:CABlueGreenScaleFactor", std::to_string(lensParam.ChromaticBlueGreenParams.ScaleFactor)); + addMetadata("AliceVision:CARedGreenFocX", std::to_string(lensParam.ChromaticRedGreenParams.FocalLengthX)); + addMetadata("AliceVision:CARedGreenFocY", std::to_string(lensParam.ChromaticRedGreenParams.FocalLengthY)); + addMetadata("AliceVision:CARedGreenCenterX", std::to_string(lensParam.ChromaticRedGreenParams.ImageXCenter)); + addMetadata("AliceVision:CARedGreenCenterY", std::to_string(lensParam.ChromaticRedGreenParams.ImageYCenter)); + addMetadata("AliceVision:CARedGreenParam1", std::to_string(lensParam.ChromaticRedGreenParams.RadialDistortParam1)); + addMetadata("AliceVision:CARedGreenParam2", std::to_string(lensParam.ChromaticRedGreenParams.RadialDistortParam2)); + addMetadata("AliceVision:CARedGreenParam3", std::to_string(lensParam.ChromaticRedGreenParams.RadialDistortParam3)); + addMetadata("AliceVision:CARedGreenScaleFactor", std::to_string(lensParam.ChromaticRedGreenParams.ScaleFactor)); + } + + /** + * @brief Get sensor size by combining info in metadata and in sensor database + * @param[in] sensorDatabase The sensor database + * @param[out] sensorWidth The sensor width + * @param[out] sensorHeight The sensor height + * @param[out] focalLengthmm The focal length + * @param[out] intrinsicInitMode The intrinsic init mode + * @param[in] verbose Enable verbosity + * @return An Error or Warning code: 1 - Unknown sensor, 2 - No metadata, 3 - Unsure sensor, 4 - Computation from 35mm Focal + */ + int getSensorSize(const std::vector& sensorDatabase, double& sensorWidth, double& sensorHeight, double& focalLengthmm, camera::EInitMode& intrinsicInitMode, + bool verbose = false); + private: /// image path on disk diff --git a/src/aliceVision/sfmDataIO/viewIO.cpp b/src/aliceVision/sfmDataIO/viewIO.cpp index dffbf0f55c..0b41d5dd4e 100644 --- a/src/aliceVision/sfmDataIO/viewIO.cpp +++ b/src/aliceVision/sfmDataIO/viewIO.cpp @@ -275,6 +275,186 @@ std::shared_ptr getViewIntrinsic( return intrinsic; } +std::shared_ptr getViewIntrinsic(const sfmData::View& view, double mmFocalLength, double sensorWidth, double defaultFocalLength, double defaultFieldOfView, + double defaultFocalRatio, double defaultOffsetX, double defaultOffsetY, LensParam* lensParam, + camera::EINTRINSIC defaultIntrinsicType, camera::EINTRINSIC allowedEintrinsics) +{ + // can't combine defaultFocalLengthPx and defaultFieldOfView + assert(defaultFocalLength < 0 || defaultFieldOfView < 0); + + // get view informations + const std::string& cameraBrand = view.getMetadataMake(); + const std::string& cameraModel = view.getMetadataModel(); + const std::string& bodySerialNumber = view.getMetadataBodySerialNumber(); + const std::string& lensSerialNumber = view.getMetadataLensSerialNumber(); + + double focalLength{-1.0}; + bool hasFocalLengthInput = false; + + if (sensorWidth < 0) + { + ALICEVISION_LOG_WARNING("Sensor size is unknown"); + ALICEVISION_LOG_WARNING("Use default sensor size (36 mm)"); + sensorWidth = 36.0; + } + + if (defaultFocalLength > 0.0) + { + focalLength = defaultFocalLength; + } + + if (defaultFieldOfView > 0.0) + { + const double focalRatio = 0.5 / std::tan(0.5 * degreeToRadian(defaultFieldOfView)); + focalLength = focalRatio * sensorWidth; + } + + camera::EINTRINSIC intrinsicType = defaultIntrinsicType; + + bool isResized = false; + + if (view.hasMetadata({"Exif:PixelXDimension", "PixelXDimension"}) && view.hasMetadata({"Exif:PixelYDimension", "PixelYDimension"})) // has dimension metadata + { + // check if the image is resized + int exifWidth = std::stoi(view.getMetadata({"Exif:PixelXDimension", "PixelXDimension"})); + int exifHeight = std::stoi(view.getMetadata({"Exif:PixelYDimension", "PixelXDimension"})); + + // if metadata is rotated + if (exifWidth == view.getHeight() && exifHeight == view.getWidth()) + std::swap(exifWidth, exifHeight); + + if (exifWidth > 0 && exifHeight > 0 && (exifWidth != view.getWidth() || exifHeight != view.getHeight())) + { + ALICEVISION_LOG_WARNING("Resized image detected: " << fs::path(view.getImagePath()).filename().string() << std::endl + << "\t- real image size: " << view.getWidth() << "x" << view.getHeight() << std::endl + << "\t- image size from exif metadata is: " << exifWidth << "x" << exifHeight << std::endl); + isResized = true; + } + } + + // handle case where focal length (mm) is unset or false + if (mmFocalLength <= 0.0) + { + ALICEVISION_LOG_WARNING("Image '" << fs::path(view.getImagePath()).filename().string() << "' focal length (in mm) metadata is missing." << std::endl + << "Can't compute focal length, use default." << std::endl); + } + else + { + // Retrieve the focal from the metadata in mm and convert to pixel. + focalLength = mmFocalLength; + hasFocalLengthInput = true; + } + + double focalLengthIn35mm = 36.0 * focalLength; + double pxFocalLength = (focalLength / sensorWidth) * std::max(view.getWidth(), view.getHeight()); + + bool hasFisheyeCompatibleParameters = ((focalLengthIn35mm > 0.0 && focalLengthIn35mm < 18.0) || (defaultFieldOfView > 100.0)); + bool checkPossiblePinhole = (allowedEintrinsics & camera::EINTRINSIC::PINHOLE_CAMERA_FISHEYE) && hasFisheyeCompatibleParameters; + + // choose intrinsic type + + camera::EINTRINSIC lcpIntrinsicType = (lensParam == nullptr || lensParam->isEmpty()) ? camera::EINTRINSIC::UNKNOWN : + (lensParam->isFisheye() ? camera::EINTRINSIC::PINHOLE_CAMERA_FISHEYE : camera::EINTRINSIC::PINHOLE_CAMERA_RADIAL3); + + if (cameraBrand == "Custom") + { + intrinsicType = camera::EINTRINSIC_stringToEnum(cameraModel); + } + else if ((lcpIntrinsicType != camera::EINTRINSIC::UNKNOWN) && (allowedEintrinsics & lcpIntrinsicType)) + { + intrinsicType = lcpIntrinsicType; + } + else if (checkPossiblePinhole) + { + // If the focal lens is short, the fisheye model should fit better. + intrinsicType = camera::EINTRINSIC::PINHOLE_CAMERA_FISHEYE; + } + else if (intrinsicType == camera::EINTRINSIC::UNKNOWN) + { + // Choose a default camera model if no default type + static const std::initializer_list intrinsicsPriorities = {camera::EINTRINSIC::PINHOLE_CAMERA_RADIAL3, camera::EINTRINSIC::PINHOLE_CAMERA_BROWN, + camera::EINTRINSIC::PINHOLE_CAMERA_RADIAL1, camera::EINTRINSIC::PINHOLE_CAMERA_FISHEYE, + camera::EINTRINSIC::PINHOLE_CAMERA_FISHEYE1, camera::EINTRINSIC::PINHOLE_CAMERA}; + + for (const auto& e : intrinsicsPriorities) + { + if (allowedEintrinsics & e) + { + intrinsicType = e; + break; + } + } + + // If still unassigned + if (intrinsicType == camera::EINTRINSIC::UNKNOWN) + { + throw std::invalid_argument("No intrinsic type can be attributed."); + } + } + + // create the desired intrinsic + std::shared_ptr intrinsic = camera::createIntrinsic(intrinsicType, view.getWidth(), view.getHeight(), pxFocalLength, pxFocalLength, 0, 0); + if (hasFocalLengthInput) + { + std::shared_ptr intrinsicScaleOffset = std::dynamic_pointer_cast(intrinsic); + + if (intrinsicScaleOffset) + { + intrinsicScaleOffset->setInitialScale({pxFocalLength, (pxFocalLength > 0) ? pxFocalLength / defaultFocalRatio : -1}); + intrinsicScaleOffset->setOffset({defaultOffsetX, defaultOffsetY}); + } + } + + // initialize distortion parameters + switch (intrinsicType) + { + case camera::EINTRINSIC::PINHOLE_CAMERA_FISHEYE: + { + if (cameraBrand == "GoPro") + { + intrinsic->updateFromParams({pxFocalLength, pxFocalLength, 0, 0, 0.0524, 0.0094, -0.0037, -0.0004}); + } + else if ((!lensParam->isEmpty()) && (cameraBrand != "Custom")) + { + std::vector p = {pxFocalLength, pxFocalLength, 0, 0}; + p.push_back(lensParam->fisheyeParams.RadialDistortParam1); + p.push_back(lensParam->fisheyeParams.RadialDistortParam2); + p.push_back(0.0); + p.push_back(0.0); + intrinsic->updateFromParams(p); + } + break; + } + case camera::EINTRINSIC::PINHOLE_CAMERA_FISHEYE1: + { + if (cameraBrand == "GoPro") + { + intrinsic->updateFromParams({pxFocalLength, pxFocalLength, 0, 0, 1.04}); + } + break; + } + case camera::EINTRINSIC::PINHOLE_CAMERA_RADIAL3: + { + if ((!lensParam->isEmpty()) && (cameraBrand != "Custom")) + { + std::vector p = {pxFocalLength, pxFocalLength, 0, 0}; + p.push_back(lensParam->perspParams.RadialDistortParam1); + p.push_back(lensParam->perspParams.RadialDistortParam2); + p.push_back(lensParam->perspParams.RadialDistortParam3); + intrinsic->updateFromParams(p); + } + break; + } + default: + break; + } + + // create serial number + intrinsic->setSerialNumber(bodySerialNumber + lensSerialNumber); + + return intrinsic; +} + std::vector viewPathsFromFolders(const sfmData::View& view, const std::vector& folders) { return utils::getFilesPathsFromFolders(folders, [&view](const boost::filesystem::path& path) { diff --git a/src/aliceVision/sfmDataIO/viewIO.hpp b/src/aliceVision/sfmDataIO/viewIO.hpp index f80950d5eb..dbd2839037 100644 --- a/src/aliceVision/sfmDataIO/viewIO.hpp +++ b/src/aliceVision/sfmDataIO/viewIO.hpp @@ -73,17 +73,16 @@ void updateIncompleteView(sfmData::View& view, EViewIdMethod viewIdMethod = EVie * @param[in] defaultFocalRatio * @param[in] defaultOffsetX * @param[in] defaultOffsetY + * @param[in] lensParam Lens data from LCP file * @param[in] defaultIntrinsicType (unknown by default) * @param[in] allowedEintrinsics The intrinsics values that can be attributed * @return shared_ptr IntrinsicBase */ -std::shared_ptr getViewIntrinsic( - const sfmData::View& view, double mmFocalLength = -1.0, double sensorWidth = -1, - double defaultFocalLength = -1, double defaultFieldOfView = -1, - double defaultFocalRatio = 1.0, double defaultOffsetX = 0.0, double defaultOffsetY = 0.0, - camera::EINTRINSIC lcpIntrinsicType = camera::EINTRINSIC::UNKNOWN, - camera::EINTRINSIC defaultIntrinsicType = camera::EINTRINSIC::UNKNOWN, - camera::EINTRINSIC allowedEintrinsics = camera::EINTRINSIC::VALID_CAMERA_MODEL); +std::shared_ptr getViewIntrinsic(const sfmData::View& view, double mmFocalLength = -1.0, double sensorWidth = -1, double defaultFocalLength = -1, + double defaultFieldOfView = -1, double defaultFocalRatio = 1.0, double defaultOffsetX = 0.0, double defaultOffsetY = 0.0, + LensParam *lensParam = nullptr, + camera::EINTRINSIC defaultIntrinsicType = camera::EINTRINSIC::UNKNOWN, + camera::EINTRINSIC allowedEintrinsics = camera::EINTRINSIC::VALID_CAMERA_MODEL); /** * @brief Allows you to retrieve the files paths corresponding to a view by searching through a list of folders. diff --git a/src/software/pipeline/main_cameraInit.cpp b/src/software/pipeline/main_cameraInit.cpp index 4286f91c9e..448f06068d 100644 --- a/src/software/pipeline/main_cameraInit.cpp +++ b/src/software/pipeline/main_cameraInit.cpp @@ -359,6 +359,8 @@ int aliceVision_main(int argc, char **argv) std::size_t lcpGeometryViewCount = 0; // number of views with LCP data used to add vignetting params in metadata std::size_t lcpVignettingViewCount = 0; + // number of views with LCP data used to add chromatic aberration params in metadata + std::size_t lcpChromaticViewCount = 0; // load known informations if(imageFolder.empty()) @@ -489,11 +491,6 @@ int aliceVision_main(int argc, char **argv) const std::string& make = view.getMetadataMake(); const std::string& model = view.getMetadataModel(); const bool hasCameraMetadata = (!make.empty() || !model.empty()); - const bool hasFocalIn35mmMetadata = view.hasDigitMetadata({"Exif:FocalLengthIn35mmFilm", "FocalLengthIn35mmFilm"}); - const double focalIn35mm = hasFocalIn35mmMetadata ? view.getDoubleMetadata({"Exif:FocalLengthIn35mmFilm", "FocalLengthIn35mmFilm"}) : -1.0; - const double imageRatio = static_cast(view.getWidth()) / static_cast(view.getHeight()); - const double diag24x36 = std::sqrt(36.0 * 36.0 + 24.0 * 24.0); - camera::EInitMode intrinsicInitMode = camera::EInitMode::UNKNOWN; std::unique_ptr in(oiio::ImageInput::open(view.getImagePath())); @@ -540,7 +537,7 @@ int aliceVision_main(int argc, char **argv) if (imgFormat.compare("raw") == 0) { - // Store the color interpretation mode choosed for raw images in metadata, + // Store the color interpretation mode chosen for raw images in metadata, // so all future loads of this image will be interpreted in the same way. if (!dcpError) { @@ -554,25 +551,6 @@ int aliceVision_main(int argc, char **argv) } } - // try to find an appropriate Lens Correction Profile - LCPinfo* lcpData = nullptr; - if (lcpStore.size() == 1) - { - lcpData = lcpStore.retrieveLCP(); - } - else if (!lcpStore.empty()) - { - // Find an LCP file that matches the camera model and the lens model. - const std::string& lensModel = view.getMetadataLensModel(); - const int lensID = view.getMetadataLensID(); - - if (!make.empty() && !lensModel.empty()) - { - #pragma omp critical (lcp) - lcpData = lcpStore.findLCP(make, model, lensModel, lensID, 1); - } - } - // check if the view intrinsic is already defined if(intrinsicId != UndefinedIndexT) { @@ -591,185 +569,82 @@ int aliceVision_main(int argc, char **argv) } } - // try to find in the sensor width in the database - if(hasCameraMetadata) - { - sensorDB::Datasheet datasheet; - if(sensorDB::getInfo(make, model, sensorDatabase, datasheet)) - { - // sensor is in the database - ALICEVISION_LOG_TRACE("Sensor width found in sensor database: " << std::endl - << "\t- brand: " << make << std::endl - << "\t- model: " << model << std::endl - << "\t- sensor width: " << datasheet._sensorWidth << " mm"); - - if(datasheet._model != model) { - // the camera model in sensor database is slightly different - unsureSensors.emplace(std::make_pair(make, model), std::make_pair(view.getImagePath(), datasheet)); // will throw a warning message - } + camera::EInitMode intrinsicInitMode = camera::EInitMode::UNKNOWN; - sensorWidth = datasheet._sensorWidth; - sensorWidthSource = ESensorWidthSource::FROM_DB; + int errCode = view.getSensorSize(sensorDatabase, sensorWidth, sensorHeight, focalLengthmm, intrinsicInitMode, false); - if(focalLengthmm > 0.0) { - intrinsicInitMode = camera::EInitMode::ESTIMATED; - } - } + // Throw a warning at the end + if (errCode == 1) + { + #pragma omp critical(unknownSensors) + unknownSensors.emplace(std::make_pair(make, model), view.getImagePath()); } - - // try to find / compute with 'FocalLengthIn35mmFilm' metadata - if (hasFocalIn35mmMetadata) + else if (errCode == 2) { - if (sensorWidth == -1.0) - { - const double invRatio = 1.0 / imageRatio; - - if (focalLengthmm > 0.0) - { - // no sensorWidth but valid focalLength and valid focalLengthIn35mm, so deduce sensorWith approximation - const double sensorDiag = (focalLengthmm * diag24x36) / focalIn35mm; // 43.3 is the diagonal of 35mm film - sensorWidth = sensorDiag * std::sqrt(1.0 / (1.0 + invRatio * invRatio)); - sensorWidthSource = ESensorWidthSource::FROM_METADATA_ESTIMATION; - } - else - { - // no sensorWidth and no focalLength but valid focalLengthIn35mm, so consider sensorWith as 35mm - sensorWidth = diag24x36 * std::sqrt(1.0 / (1.0 + invRatio * invRatio)); - focalLengthmm = sensorWidth * (focalIn35mm ) / 36.0; - sensorWidthSource = ESensorWidthSource::UNKNOWN; - } - - intrinsicsSetFromFocal35mm.emplace(view.getImagePath(), std::make_pair(sensorWidth, focalLengthmm)); - intrinsicInitMode = camera::EInitMode::ESTIMATED; - } - else if(sensorWidth > 0 && focalLengthmm <= 0) - { - // valid sensorWidth and valid focalLengthIn35mm but no focalLength, so convert focalLengthIn35mm to the actual width of the sensor - const double sensorDiag = std::sqrt(std::pow(sensorWidth, 2) + std::pow(sensorWidth / imageRatio,2)); - focalLengthmm = (sensorDiag * focalIn35mm) / diag24x36; - - intrinsicsSetFromFocal35mm.emplace(view.getImagePath(), std::make_pair(sensorWidth, focalLengthmm)); - intrinsicInitMode = camera::EInitMode::ESTIMATED; - } + #pragma omp critical(noMetadataImagePaths) + noMetadataImagePaths.emplace_back(view.getImagePath()); } - - // error handling - if(sensorWidth == -1.0) + else if (errCode == 3) { - if(hasCameraMetadata) - { - // sensor is not in the database - #pragma omp critical (unknownSensors) - unknownSensors.emplace(std::make_pair(make, model), view.getImagePath()); // will throw a warning at the end - } - else - { - // no metadata 'Make' and 'Model' can't find sensor width - #pragma omp critical (noMetadataImagePaths) - noMetadataImagePaths.emplace_back(view.getImagePath()); // will throw a warning message at the end - } + sensorDB::Datasheet datasheet; + sensorDB::getInfo(make, model, sensorDatabase, datasheet); + #pragma omp critical(unsureSensors) + unsureSensors.emplace(std::make_pair(make, model), std::make_pair(view.getImagePath(), datasheet)); } - else + else if (errCode == 4) { - // we have a valid sensorWidth information, so we store it into the metadata (where it would have been nice to have it in the first place) - if(sensorWidthSource == ESensorWidthSource::FROM_DB) { - view.addMetadata("AliceVision:SensorWidth", std::to_string(sensorWidth)); - } - else if(sensorWidthSource == ESensorWidthSource::FROM_METADATA_ESTIMATION) { - view.addMetadata("AliceVision:SensorWidthEstimation", std::to_string(sensorWidth)); - } + #pragma omp critical(intrinsicsSetFromFocal35mm) + intrinsicsSetFromFocal35mm.emplace(view.getImagePath(), std::make_pair(sensorWidth, focalLengthmm)); } - if (sensorWidth < 0) + // try to find an appropriate Lens Correction Profile + LCPinfo* lcpData = nullptr; + if (lcpStore.size() == 1) { - ALICEVISION_LOG_WARNING("Sensor size is unknown"); - ALICEVISION_LOG_WARNING("Use default sensor size (36 mm)"); - sensorWidth = 36.0; + lcpData = lcpStore.retrieveLCP(); } + else if (!lcpStore.empty()) + { + // Find an LCP file that matches the camera model and the lens model. + const std::string& lensModel = view.getMetadataLensModel(); + const int lensID = view.getMetadataLensID(); - float apertureValue = 2.f * std::log(view.getMetadataFNumber()) / std::log(2.0); - float focusDistance = 0.f; + if (!make.empty() && !lensModel.empty()) + { + #pragma omp critical(lcp) + lcpData = lcpStore.findLCP(make, model, lensModel, lensID, 1); + } + } - camera::EINTRINSIC lcpCameraModel = camera::EINTRINSIC::UNKNOWN; + const float apertureValue = 2.f * std::log(view.getMetadataFNumber()) / std::log(2.0); + const float focusDistance = 0.f; LensParam lensParam; if ((lcpData != nullptr) && !(lcpData->isEmpty())) { - lcpData->getDistortionParams(focalLengthmm, focusDistance, lensParam); - lcpData->getVignettingParams(focalLengthmm, focusDistance, lensParam); - - lcpCameraModel = lensParam.isFisheye() ? camera::EINTRINSIC::PINHOLE_CAMERA_FISHEYE : camera::EINTRINSIC::PINHOLE_CAMERA_RADIAL3; + lcpData->getDistortionParams(focalLengthmm, focusDistance, lensParam); + lcpData->getVignettingParams(focalLengthmm, apertureValue, lensParam); + lcpData->getChromaticParams(focalLengthmm, focusDistance, lensParam); } - // build intrinsic - std::shared_ptr intrinsicBase = getViewIntrinsic( - view, focalLengthmm, sensorWidth, defaultFocalLength, defaultFieldOfView, - defaultFocalRatio, defaultOffsetX, defaultOffsetY, - lcpCameraModel, defaultCameraModel, allowedCameraModels); - - if (lcpData != nullptr) + if (lensParam.hasVignetteParams() && !lensParam.vignParams.isEmpty) { - std::shared_ptr intrinsicDisto = std::dynamic_pointer_cast(intrinsicBase); - if (intrinsicDisto) - { - std::shared_ptr distortion = intrinsicDisto->getDistortion(); - - if (!lensParam.isFisheye()) - { - std::shared_ptr distoRadialK3 = std::dynamic_pointer_cast(distortion); - if (distoRadialK3) - { - const int Dmax = std::max(lcpData->getImageWidth(), lcpData->getImageLength()); - - const aliceVision::Vec2 offset((lensParam.perspParams.ImageXCenter - 0.5f) * Dmax, (lensParam.perspParams.ImageYCenter - 0.5f) * Dmax); - intrinsicDisto->setOffset(offset); - - std::vector p; - p.push_back(lensParam.perspParams.RadialDistortParam1); - p.push_back(lensParam.perspParams.RadialDistortParam2); - p.push_back(lensParam.perspParams.RadialDistortParam3); - intrinsicDisto->setDistortionParams(p); - - ++lcpGeometryViewCount; - } - } - else - { - std::shared_ptr DistortionFisheye = std::dynamic_pointer_cast(distortion); - if (DistortionFisheye) - { - const int Dmax = std::max(lcpData->getImageWidth(), lcpData->getImageLength()); - - const aliceVision::Vec2 offset((lensParam.perspParams.ImageXCenter - 0.5f) * Dmax, (lensParam.perspParams.ImageYCenter - 0.5f) * Dmax); - intrinsicDisto->setOffset(offset); + view.addVignettingMetadata(lensParam); + ++lcpVignettingViewCount; + } - std::vector p; - p.push_back(lensParam.fisheyeParams.RadialDistortParam1); - p.push_back(lensParam.fisheyeParams.RadialDistortParam2); - p.push_back(0.0); - p.push_back(0.0); - intrinsicDisto->setDistortionParams(p); + if(lensParam.hasChromaticParams() && !lensParam.ChromaticGreenParams.isEmpty) + { + view.addChromaticMetadata(lensParam); + ++lcpChromaticViewCount; + } - ++lcpGeometryViewCount; - } - } - // set disto initialization mode - intrinsicDisto->setDistortionInitializationMode(camera::EInitMode::ESTIMATED); - } + // build intrinsic + std::shared_ptr intrinsicBase = getViewIntrinsic(view, focalLengthmm, sensorWidth, defaultFocalLength, defaultFieldOfView, defaultFocalRatio, + defaultOffsetX, defaultOffsetY, &lensParam, defaultCameraModel, allowedCameraModels); - if (lensParam.hasVignetteParams() && !lensParam.vignParams.isEmpty) - { - view.addMetadata("AliceVision:VignParamFocX", std::to_string(lensParam.vignParams.FocalLengthX)); - view.addMetadata("AliceVision:VignParamFocY", std::to_string(lensParam.vignParams.FocalLengthY)); - view.addMetadata("AliceVision:VignParamCenterX", std::to_string(lensParam.vignParams.ImageXCenter)); - view.addMetadata("AliceVision:VignParamCenterY", std::to_string(lensParam.vignParams.ImageYCenter)); - view.addMetadata("AliceVision:VignParam1", std::to_string(lensParam.vignParams.VignetteModelParam1)); - view.addMetadata("AliceVision:VignParam2", std::to_string(lensParam.vignParams.VignetteModelParam2)); - view.addMetadata("AliceVision:VignParam3", std::to_string(lensParam.vignParams.VignetteModelParam3)); - - ++lcpVignettingViewCount; - } - } + if (!lensParam.isEmpty()) + ++lcpGeometryViewCount; std::shared_ptr intrinsic = std::dynamic_pointer_cast(intrinsicBase); @@ -777,22 +652,8 @@ int aliceVision_main(int argc, char **argv) intrinsic->setInitializationMode(intrinsicInitMode); // Set sensor size - if (sensorHeight > 0.0) - { - intrinsicBase->setSensorWidth(sensorWidth); - intrinsicBase->setSensorHeight(sensorHeight); - } - else - { - if (imageRatio > 1.0) { - intrinsicBase->setSensorWidth(sensorWidth); - intrinsicBase->setSensorHeight(sensorWidth / imageRatio); - } - else { - intrinsicBase->setSensorWidth(sensorWidth); - intrinsicBase->setSensorHeight(sensorWidth * imageRatio); - } - } + intrinsicBase->setSensorWidth(sensorWidth); + intrinsicBase->setSensorHeight(sensorHeight); if(intrinsic && intrinsic->isValid()) { @@ -1037,7 +898,6 @@ int aliceVision_main(int argc, char **argv) return EXIT_FAILURE; } - //Check unique frame id per rig element if (!rigHasUniqueFrameIds(sfmData)) { @@ -1059,6 +919,7 @@ int aliceVision_main(int argc, char **argv) << "\n\t - # with DCP color calibration (raw images only): " << viewsWithDCPMetadata << "\n\t - # with LCP lens distortion initialization: " << lcpGeometryViewCount << "\n\t - # with LCP vignetting calibration: " << lcpVignettingViewCount + << "\n\t - # with LCP chromatic aberration correction models: " << lcpChromaticViewCount << "\n\t- # Cameras Intrinsics: " << sfmData.getIntrinsics().size()); return EXIT_SUCCESS; diff --git a/src/software/utils/CMakeLists.txt b/src/software/utils/CMakeLists.txt index 61dd147a26..fea611e5f9 100644 --- a/src/software/utils/CMakeLists.txt +++ b/src/software/utils/CMakeLists.txt @@ -43,11 +43,13 @@ alicevision_add_software(aliceVision_imageProcessing FOLDER ${FOLDER_SOFTWARE_UTILS} LINKS aliceVision_system aliceVision_cmdline + aliceVision_sensorDB aliceVision_image aliceVision_feature aliceVision_sfm aliceVision_sfmData aliceVision_sfmDataIO + aliceVision_lensCorrectionProfile ${Boost_LIBRARIES} ) diff --git a/src/software/utils/main_imageProcessing.cpp b/src/software/utils/main_imageProcessing.cpp index 92ae83c731..b452240b84 100644 --- a/src/software/utils/main_imageProcessing.cpp +++ b/src/software/utils/main_imageProcessing.cpp @@ -11,11 +11,13 @@ #include #include #include +#include +#include #include #include -#include #include #include +#include #include #include @@ -43,7 +45,7 @@ // These constants define the current software version. // They must be updated when the command line is changed. #define ALICEVISION_SOFTWARE_VERSION_MAJOR 3 -#define ALICEVISION_SOFTWARE_VERSION_MINOR 2 +#define ALICEVISION_SOFTWARE_VERSION_MINOR 3 using namespace aliceVision; namespace po = boost::program_options; @@ -61,6 +63,12 @@ struct LensCorrectionParams std::vector caGParams; std::vector caRGParams; std::vector caBGParams; + + RectilinearModel geometryModel = RectilinearModel(); + + RectilinearModel caGModel = RectilinearModel(); + RectilinearModel caBGModel = RectilinearModel(); + RectilinearModel caRGModel = RectilinearModel(); }; std::istream& operator>>(std::istream& in, LensCorrectionParams& lcParams) @@ -404,13 +412,115 @@ void undistortVignetting(aliceVision::image::Image& image, const ProcessingParams& pParams, std::map& imageMetadata, const camera::IntrinsicBase* cam = NULL) +void undistortRectilinearGeometryLCP(const aliceVision::image::Image& img, + RectilinearModel& model, + aliceVision::image::Image& img_ud, + const image::RGBAfColor fillcolor) +{ + if(!model.isEmpty && model.FocalLengthX != 0.0 && model.FocalLengthY != 0.0) + { + img_ud.resize(img.Width(), img.Height(), true, fillcolor); + const image::Sampler2d sampler; + + const float maxWH = std::max(img.Width(), img.Height()); + const float ppX = model.ImageXCenter * img.Width(); + const float ppY = model.ImageYCenter * img.Height(); + const float scaleX = model.FocalLengthX * maxWH; + const float scaleY = model.FocalLengthY * maxWH; + + #pragma omp parallel for + for(int v = 0; v < img.Height(); ++v) + for(int u = 0; u < img.Width(); ++u) + { + // image to camera + const float x = (u - ppX) / scaleX; + const float y = (v - ppY) / scaleY; + + // disto + float xd, yd; + model.distort(x, y, xd, yd); + + // camera to image + const Vec2 distoPix(xd * scaleX + ppX, yd * scaleY + ppY); + + // pick pixel if it is in the image domain + if(img.Contains(distoPix(1), distoPix(0))) + { + img_ud(v, u) = sampler(img, distoPix(1), distoPix(0)); + } + } + } +} + +void undistortChromaticAberrations(const aliceVision::image::Image& img, + RectilinearModel& greenModel, RectilinearModel& blueGreenModel, + RectilinearModel& redGreenModel, + aliceVision::image::Image& img_ud, + const image::RGBAfColor fillcolor, bool undistortGeometry = false) +{ + if(!greenModel.isEmpty && greenModel.FocalLengthX != 0.0 && greenModel.FocalLengthY != 0.0) + { + img_ud.resize(img.Width(), img.Height(), true, fillcolor); + const image::Sampler2d sampler; + + const float maxWH = std::max(img.Width(), img.Height()); + const float ppX = greenModel.ImageXCenter * img.Width(); + const float ppY = greenModel.ImageYCenter * img.Height(); + const float scaleX = greenModel.FocalLengthX * maxWH; + const float scaleY = greenModel.FocalLengthY * maxWH; + + #pragma omp parallel for + for(int v = 0; v < img.Height(); ++v) + for(int u = 0; u < img.Width(); ++u) + { + // image to camera + const float x = (u - ppX) / scaleX; + const float y = (v - ppY) / scaleY; + + // disto + float xdRed,ydRed,xdGreen,ydGreen,xdBlue,ydBlue; + if(undistortGeometry) + { + greenModel.distort(x, y, xdGreen, ydGreen); + } + else + { + xdGreen = x; + ydGreen = y; + } + redGreenModel.distort(xdGreen, ydGreen, xdRed, ydRed); + blueGreenModel.distort(xdGreen, ydGreen, xdBlue, ydBlue); + + // camera to image + const Vec2 distoPixRed(xdRed * scaleX + ppX, ydRed * scaleY + ppY); + const Vec2 distoPixGreen(xdGreen * scaleX + ppX, ydGreen * scaleY + ppY); + const Vec2 distoPixBlue(xdBlue * scaleX + ppX, ydBlue * scaleY + ppY); + + // pick pixel if it is in the image domain + if(img.Contains(distoPixRed(1), distoPixRed(0))) + { + img_ud(v, u)[0] = sampler(img, distoPixRed(1), distoPixRed(0))[0]; + } + if(img.Contains(distoPixGreen(1), distoPixGreen(0))) + { + img_ud(v, u)[1] = sampler(img, distoPixGreen(1), distoPixGreen(0))[1]; + } + if(img.Contains(distoPixBlue(1), distoPixBlue(0))) + { + img_ud(v, u)[2] = sampler(img, distoPixBlue(1), distoPixBlue(0))[2]; + } + } + } +} + +void processImage(image::Image& image, ProcessingParams& pParams, + std::map& imageMetadata, std::shared_ptr cam) { const unsigned int nchannels = 4; // Fix non-finite pixels // Note: fill holes needs to fix non-finite values first - if(pParams.fixNonFinite || pParams.fillHoles) + if (pParams.fixNonFinite || pParams.fillHoles) { oiio::ImageBuf inBuf(oiio::ImageSpec(image.Width(), image.Height(), nchannels, oiio::TypeDesc::FLOAT), image.data()); int pixelsFixed = 0; @@ -419,27 +529,49 @@ void processImage(image::Image& image, const ProcessingParams ALICEVISION_LOG_INFO("Fixed " << pixelsFixed << " non-finite pixels."); } - if (pParams.lensCorrection.enabled && pParams.lensCorrection.vignetting) + if (pParams.lensCorrection.enabled) { - undistortVignetting(image, pParams.lensCorrection.vParams); - } + if (pParams.lensCorrection.vignetting && !pParams.lensCorrection.vParams.empty()) + { + undistortVignetting(image, pParams.lensCorrection.vParams); + } + else if (pParams.lensCorrection.vignetting && pParams.lensCorrection.vParams.empty()) + { + ALICEVISION_LOG_WARNING("No distortion model available for lens vignetting correction."); + } - if (pParams.lensCorrection.enabled && pParams.lensCorrection.geometry) - { - if (cam != NULL && cam->hasDistortion()) + if (pParams.lensCorrection.chromaticAberration && !pParams.lensCorrection.caGModel.isEmpty) + { + const image::RGBAfColor FBLACK_A(.0f, .0f, .0f, 1.0f); + image::Image image_ud; + undistortChromaticAberrations(image, pParams.lensCorrection.caGModel, pParams.lensCorrection.caBGModel, + pParams.lensCorrection.caRGModel, image_ud, FBLACK_A, false); + image = image_ud; + } + else if(pParams.lensCorrection.chromaticAberration && pParams.lensCorrection.caGModel.isEmpty) + { + ALICEVISION_LOG_WARNING("No distortion model available for lens chromatic aberration correction."); + } + + if (pParams.lensCorrection.geometry && cam != NULL && cam->hasDistortion()) { const image::RGBAfColor FBLACK_A(.0f, .0f, .0f, 1.0f); image::Image image_ud; - camera::UndistortImage(image, cam, image_ud, FBLACK_A); + const image::Sampler2d sampler; + + image_ud.resize(image.Width(), image.Height(), true, FBLACK_A); + + camera::UndistortImage(image, cam.get(), image_ud, FBLACK_A); + image = image_ud; } - else if (cam != NULL && !cam->hasDistortion()) + else if (pParams.lensCorrection.geometry && cam != NULL && !cam->hasDistortion()) { - ALICEVISION_LOG_INFO("No distortion model available for lens correction."); + ALICEVISION_LOG_WARNING("No distortion model available for lens geometry distortion correction."); } - else if (cam == NULL) + else if (pParams.lensCorrection.geometry && cam == NULL) { - ALICEVISION_LOG_INFO("No intrinsics data available for lens correction."); + ALICEVISION_LOG_WARNING("No intrinsics data available for lens geometry distortion correction."); } } @@ -489,7 +621,7 @@ void processImage(image::Image& image, const ProcessingParams image.swap(filtered); } - if(pParams.sharpen.enabled) + if (pParams.sharpen.enabled) { image::Image filtered(image.Width(), image.Height()); const oiio::ImageBuf inBuf(oiio::ImageSpec(image.Width(), image.Height(), nchannels, oiio::TypeDesc::FLOAT), image.data()); @@ -517,7 +649,7 @@ void processImage(image::Image& image, const ProcessingParams } // Contrast Limited Adaptive Histogram Equalization - if(pParams.claheFilter.enabled) + if (pParams.claheFilter.enabled) { #if ALICEVISION_IS_DEFINED(ALICEVISION_HAVE_OPENCV) // Convert alicevision::image to BGR openCV Mat @@ -561,7 +693,7 @@ void processImage(image::Image& image, const ProcessingParams throw std::invalid_argument( "Unsupported mode! If you intended to use a Clahe filter, please add OpenCV support."); #endif } - if(pParams.fillHoles) + if (pParams.fillHoles) { image::Image filtered(image.Width(), image.Height()); oiio::ImageBuf inBuf(oiio::ImageSpec(image.Width(), image.Height(), nchannels, oiio::TypeDesc::FLOAT), image.data()); @@ -574,13 +706,13 @@ void processImage(image::Image& image, const ProcessingParams image.swap(filtered); } - if(pParams.noise.enabled) + if (pParams.noise.enabled) { oiio::ImageBuf inBuf(oiio::ImageSpec(image.Width(), image.Height(), nchannels, oiio::TypeDesc::FLOAT), image.data()); oiio::ImageBufAlgo::noise(inBuf, ENoiseMethod_enumToString(pParams.noise.method), pParams.noise.A, pParams.noise.B, pParams.noise.mono); } - if(pParams.nlmFilter.enabled) + if (pParams.nlmFilter.enabled) { #if ALICEVISION_IS_DEFINED(ALICEVISION_HAVE_OPENCV) // Create temporary OpenCV Mat (keep only 3 channels) to handle Eigen data of our image @@ -803,6 +935,9 @@ int aliceVision_main(int argc, char * argv[]) std::string demosaicingAlgo = "AHD"; int highlightMode = 0; double correlatedColorTemperature = -1; + std::string lensCorrectionProfileInfo; + bool lensCorrectionProfileSearchIgnoreCameraModel = true; + std::string sensorDatabasePath; ProcessingParams pParams; @@ -857,6 +992,7 @@ int aliceVision_main(int argc, char * argv[]) ("contrast", po::value(&pParams.contrast)->default_value(pParams.contrast), "Contrast Factor (1.0: no change).") + ("medianFilter", po::value(&pParams.medianFilter)->default_value(pParams.medianFilter), "Median Filter (0: no filter).") @@ -936,11 +1072,20 @@ int aliceVision_main(int argc, char * argv[]) ("highlightMode", po::value(&highlightMode)->default_value(highlightMode), "Highlight management (see libRaw documentation).\n" "0 = clip (default), 1 = unclip, 2 = blend, 3+ = rebuild.") + + ("lensCorrectionProfileInfo", po::value(&lensCorrectionProfileInfo)->default_value(""), + "Lens Correction Profile filepath or database directory path.") + + ("lensCorrectionProfileSearchIgnoreCameraModel", po::value(&lensCorrectionProfileSearchIgnoreCameraModel)->default_value(lensCorrectionProfileSearchIgnoreCameraModel), + "Automatic LCP Search considers only the camera maker and the lens name") ("correlatedColorTemperature", po::value(&correlatedColorTemperature)->default_value(correlatedColorTemperature), "Correlated Color Temperature in Kelvin of scene illuminant.\n" "If less than or equal to 0.0, the value extracted from the metadata will be used.") + ("sensorDatabase,s", po::value(&sensorDatabasePath)->default_value(""), + "Camera sensor width database path.") + ("storageDataType", po::value(&storageDataType)->default_value(storageDataType), ("Storage data type: " + image::EStorageDataType_informations()).c_str()) @@ -1113,13 +1258,43 @@ int aliceVision_main(int argc, char * argv[]) if (pParams.lensCorrection.enabled && pParams.lensCorrection.vignetting) { - pParams.lensCorrection.vParams; if (!view.getVignettingParams(pParams.lensCorrection.vParams)) { pParams.lensCorrection.vParams.clear(); } } + if (pParams.lensCorrection.enabled && pParams.lensCorrection.chromaticAberration) + { + std::vector caGParams, caBGParams, caRGParams; + view.getChromaticAberrationParams(caGParams,caBGParams,caRGParams); + + pParams.lensCorrection.caGModel.init3(caGParams); + pParams.lensCorrection.caBGModel.init3(caBGParams); + pParams.lensCorrection.caRGModel.init3(caRGParams); + + if(pParams.lensCorrection.caGModel.FocalLengthX == 0.0) + { + float sensorWidth = view.getSensorWidth(); + pParams.lensCorrection.caGModel.FocalLengthX = view.getWidth() * view.getMetadataFocalLength() / + sensorWidth / std::max(view.getWidth(), view.getHeight()); + } + if(pParams.lensCorrection.caGModel.FocalLengthY == 0.0) + { + float sensorHeight = view.getSensorHeight(); + pParams.lensCorrection.caGModel.FocalLengthY = view.getHeight() * view.getMetadataFocalLength() / + sensorHeight / std::max(view.getWidth(), view.getHeight()); + } + + if((pParams.lensCorrection.caGModel.FocalLengthX <= 0.0) || + (pParams.lensCorrection.caGModel.FocalLengthY <= 0.0)) + { + pParams.lensCorrection.caGModel.reset(); + pParams.lensCorrection.caBGModel.reset(); + pParams.lensCorrection.caRGModel.reset(); + } + } + // Read original image image::Image image; image::readImage(viewPath, image, options); @@ -1143,7 +1318,7 @@ int aliceVision_main(int argc, char * argv[]) } sfmData::Intrinsics::const_iterator iterIntrinsic = sfmData.getIntrinsics().find(view.getIntrinsicId()); - const camera::IntrinsicBase* cam = iterIntrinsic->second.get(); + std::shared_ptr cam = iterIntrinsic->second; std::map viewMetadata = view.getMetadata(); @@ -1249,6 +1424,33 @@ int aliceVision_main(int argc, char * argv[]) } image::DCPDatabase dcpDatabase; + LCPdatabase lcpStore(lensCorrectionProfileInfo, lensCorrectionProfileSearchIgnoreCameraModel); + + // check sensor database + std::vector sensorDatabase; + if (pParams.lensCorrection.enabled && (pParams.lensCorrection.geometry || pParams.lensCorrection.chromaticAberration)) + { + if (sensorDatabasePath.empty()) + { + const auto root = image::getAliceVisionRoot(); + if (root.empty()) + { + ALICEVISION_LOG_WARNING( + "ALICEVISION_ROOT is not defined, default sensor database cannot be accessed."); + } + else + { + sensorDatabasePath = root + "/share/aliceVision/cameraSensors.db"; + } + } + if (!sensorDatabasePath.empty() && !sensorDB::parseDatabase(sensorDatabasePath, sensorDatabase)) + { + ALICEVISION_LOG_ERROR("Invalid input sensor database '" << sensorDatabasePath + << "', please specify a valid file."); + return EXIT_FAILURE; + } + } + int i = 0; for (const std::string& inputFilePath : filesStrPaths) { @@ -1289,6 +1491,12 @@ int aliceVision_main(int argc, char * argv[]) int width, height; const auto metadata = image::readImageMetadata(inputFilePath, width, height); view.setMetadata(image::getMapFromMetadata(metadata)); + view.setWidth(width); + view.setHeight(height); + std::shared_ptr intrinsicBase; + // Get DSLR maker and model in view metadata. + const std::string& make = view.getMetadataMake(); + const std::string& model = view.getMetadataModel(); if (isRAW && (rawColorInterpretation == image::ERawColorInterpretation::DcpLinearProcessing || rawColorInterpretation == image::ERawColorInterpretation::DcpMetadata)) @@ -1296,10 +1504,6 @@ int aliceVision_main(int argc, char * argv[]) // Load DCP color profiles database if not already loaded dcpDatabase.load(colorProfileDatabaseDirPath.empty() ? getColorProfileDatabaseFolder() : colorProfileDatabaseDirPath, false); - // Get DSLR maker and model in view metadata. - const std::string& make = view.getMetadataMake(); - const std::string& model = view.getMetadataModel(); - // Get DCP profile if (!dcpDatabase.retrieveDcpForCamera(make, model, dcpProf)) { @@ -1318,6 +1522,127 @@ int aliceVision_main(int argc, char * argv[]) view.addDCPMetadata(dcpProf); } + if(isRAW && pParams.lensCorrection.enabled && + (pParams.lensCorrection.geometry || pParams.lensCorrection.vignetting || pParams.lensCorrection.chromaticAberration)) + { + // try to find an appropriate Lens Correction Profile + LCPinfo* lcpData = nullptr; + if (lcpStore.size() == 1) + { + lcpData = lcpStore.retrieveLCP(); + } + else if (!lcpStore.empty()) + { + // Find an LCP file that matches the camera model and the lens model. + const std::string& lensModel = view.getMetadataLensModel(); + const int lensID = view.getMetadataLensID(); + + if (!make.empty() && !lensModel.empty()) + { + #pragma omp critical(lcp) + lcpData = lcpStore.findLCP(make, model, lensModel, lensID, 1); + } + } + + if ((lcpData != nullptr) && !(lcpData->isEmpty())) + { + double focalLengthmm = view.getMetadataFocalLength(); + const float apertureValue = 2.f * std::log(view.getMetadataFNumber()) / std::log(2.0); + const float focusDistance = 0.f; + + LensParam lensParam; + lcpData->getDistortionParams(focalLengthmm, focusDistance, lensParam); + lcpData->getVignettingParams(focalLengthmm, apertureValue, lensParam); + lcpData->getChromaticParams(focalLengthmm, focusDistance, lensParam); + + // Get sensor size by combining information from sensor database and view's metadata + double sensorWidth = -1.0; + double sensorHeight = -1.0; + camera::EInitMode intrinsicInitMode = camera::EInitMode::UNKNOWN; + view.getSensorSize(sensorDatabase, sensorWidth, sensorHeight, focalLengthmm, intrinsicInitMode, true); + + if (lensParam.hasVignetteParams() && !lensParam.vignParams.isEmpty && pParams.lensCorrection.vignetting) + { + float FocX = lensParam.vignParams.FocalLengthX != 0.0 + ? lensParam.vignParams.FocalLengthX + : width * focalLengthmm / sensorWidth / std::max(width, height); + float FocY = lensParam.vignParams.FocalLengthY != 0.0 + ? lensParam.vignParams.FocalLengthY + : height * focalLengthmm / sensorHeight / std::max(width, height); + + pParams.lensCorrection.vParams.clear(); + + if (FocX == 0.0 || FocY == 0.0) + { + ALICEVISION_LOG_WARNING("Vignetting correction is requested but cannot be applied due to missing info."); + } + else + { + pParams.lensCorrection.vParams.push_back(FocX); + pParams.lensCorrection.vParams.push_back(FocY); + pParams.lensCorrection.vParams.push_back(lensParam.vignParams.ImageXCenter); + pParams.lensCorrection.vParams.push_back(lensParam.vignParams.ImageYCenter); + pParams.lensCorrection.vParams.push_back(lensParam.vignParams.VignetteModelParam1); + pParams.lensCorrection.vParams.push_back(lensParam.vignParams.VignetteModelParam2); + pParams.lensCorrection.vParams.push_back(lensParam.vignParams.VignetteModelParam3); + } + } + + if (pParams.lensCorrection.chromaticAberration && lensParam.hasChromaticParams() && !lensParam.ChromaticGreenParams.isEmpty) + { + if (lensParam.ChromaticGreenParams.FocalLengthX == 0.0) + { + lensParam.ChromaticGreenParams.FocalLengthX = + width * focalLengthmm / sensorWidth / std::max(width, height); + } + if (lensParam.ChromaticGreenParams.FocalLengthY == 0.0) + { + lensParam.ChromaticGreenParams.FocalLengthY = + height * focalLengthmm / sensorHeight / std::max(width, height); + } + + if(lensParam.ChromaticGreenParams.FocalLengthX == 0.0 || + lensParam.ChromaticGreenParams.FocalLengthY == 0.0) + { + pParams.lensCorrection.caGModel.reset(); + pParams.lensCorrection.caBGModel.reset(); + pParams.lensCorrection.caRGModel.reset(); + + ALICEVISION_LOG_WARNING( + "Chromatic Aberration correction is requested but cannot be applied due to missing info."); + } + else + { + pParams.lensCorrection.caGModel = lensParam.ChromaticGreenParams; + pParams.lensCorrection.caBGModel = lensParam.ChromaticBlueGreenParams; + pParams.lensCorrection.caRGModel = lensParam.ChromaticRedGreenParams; + } + } + + if (pParams.lensCorrection.geometry) + { + // build intrinsic + const camera::EINTRINSIC defaultCameraModel = camera::EINTRINSIC::PINHOLE_CAMERA_RADIAL3; + const camera::EINTRINSIC allowedCameraModels = camera::EINTRINSIC_parseStringToBitmask("radial3,fisheye4"); + const double defaultFocalLength = -1.0; + const double defaultFieldOfView = -1.0; + const double defaultFocalRatio = 1.0; + const double defaultOffsetX = 0.0; + const double defaultOffsetY = 0.0; + intrinsicBase = sfmDataIO::getViewIntrinsic( + view, focalLengthmm, sensorWidth, defaultFocalLength, defaultFieldOfView, defaultFocalRatio, + defaultOffsetX, defaultOffsetY, &lensParam, defaultCameraModel, allowedCameraModels); + + pParams.lensCorrection.geometryModel = lensParam.perspParams; + } + } + else + { + ALICEVISION_LOG_WARNING("No LCP file found for image " << inputFilePath); + ALICEVISION_LOG_WARNING("Requested lens correction(s) won't be applied"); + } + } + std::map md = view.getMetadata(); // set readOptions @@ -1367,7 +1692,7 @@ int aliceVision_main(int argc, char * argv[]) image::readImage(inputFilePath, image, readOptions); // Image processing - processImage(image, pParams, md); + processImage(image, pParams, md, intrinsicBase); image::ImageWriteOptions writeOptions;