Skip to content

Commit

Permalink
Modifications in the HyperReduction class, changes convention in the …
Browse files Browse the repository at this point in the history
…coordinate choice which has been changed in the tutorial 24HyperReduction too. Modifications in the tutorial 08DEIM to use the HyperReduction class
  • Loading branch information
MatthieuPetitS committed Apr 23, 2024
1 parent 0a0bfe8 commit d3a76d5
Show file tree
Hide file tree
Showing 10 changed files with 278 additions and 84 deletions.
77 changes: 77 additions & 0 deletions src/ITHACA_CORE/ITHACAutilities/ITHACAutilities.C
Original file line number Diff line number Diff line change
Expand Up @@ -133,4 +133,81 @@ List<T> combineList(List<List<T>>& doubleList)
template List<label> combineList(List<List<label>>& doubleList);


// Using the Eigen library, using the SVD decomposition method to solve the
// matrix pseudo-inverse, the default error er is 0
Eigen::MatrixXd pinv_eigen_based(Eigen::MatrixXd &origin, const float er) {
// perform svd decomposition
Eigen::JacobiSVD<Eigen::MatrixXd> svd_holder(origin, Eigen::ComputeThinU | Eigen::ComputeThinV);
// Build SVD decomposition results
Eigen::MatrixXd U = svd_holder.matrixU();
Eigen::MatrixXd V = svd_holder.matrixV();
Eigen::MatrixXd D = svd_holder.singularValues();

// Build the S matrix
Eigen::MatrixXd S(V.cols(), U.cols());
S.setZero();

for (unsigned int i = 0; i < D.size(); ++i) {

if (D(i, 0) > er) {
S(i, i) = 1 / D(i, 0);
} else {
S(i, i) = 0;
}
}
return V * S * U.transpose();
}


Eigen::MatrixXd invertMatrix(Eigen::MatrixXd& matrixToInvert, const word inversionMethod){

Info << "Inversion method : " << inversionMethod << endl;
if(inversionMethod == "pinv_eigen_based"){
return pinv_eigen_based(matrixToInvert);
}
else if(inversionMethod == "direct"){
return matrixToInvert.inverse();
}
else if(inversionMethod == "fullPivLu"){
return matrixToInvert.fullPivLu().inverse();
}
else if(inversionMethod == "partialPivLu"){
return matrixToInvert.partialPivLu().inverse();
}
else if(inversionMethod == "householderQr"){
return matrixToInvert.householderQr().solve(Eigen::MatrixXd::Identity(matrixToInvert.rows(), matrixToInvert.cols()));
}
else if(inversionMethod == "colPivHouseholderQr"){
return matrixToInvert.colPivHouseholderQr().inverse();
}
else if(inversionMethod == "fullPivHouseholderQr"){
return matrixToInvert.fullPivHouseholderQr().inverse();
}
else if(inversionMethod == "completeOrthogonalDecomposition"){
return matrixToInvert.completeOrthogonalDecomposition().pseudoInverse();
}
else if(inversionMethod == "jacobiSvd")
{
return matrixToInvert.jacobiSvd(Eigen::ComputeThinU | Eigen::ComputeThinV).solve(Eigen::MatrixXd::Identity(matrixToInvert.rows(), matrixToInvert.cols()));
}
else if(inversionMethod == "llt")
{
return matrixToInvert.llt().solve(Eigen::MatrixXd::Identity(matrixToInvert.rows(), matrixToInvert.cols()));
}
else if(inversionMethod == "ldlt")
{
Eigen::LLT<Eigen::MatrixXd> lltOfA(matrixToInvert);
return lltOfA.solve(Eigen::MatrixXd::Identity(matrixToInvert.rows(), matrixToInvert.cols()));
}
else if(inversionMethod == "bdcSvd")
{
return matrixToInvert.bdcSvd(Eigen::ComputeThinU | Eigen::ComputeThinV).solve(Eigen::MatrixXd::Identity(matrixToInvert.rows(), matrixToInvert.cols()));
}
else{
Info << "Unkwown inversion method, solving with : completeOrthogonalDecomposition" << endl;
return matrixToInvert.completeOrthogonalDecomposition().pseudoInverse();
}
}


}
21 changes: 21 additions & 0 deletions src/ITHACA_CORE/ITHACAutilities/ITHACAutilities.H
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,27 @@ bool isTurbulent();
template<typename T>
List<T> combineList(List<List<T>>& doubleList);

//------------------------------------------------------------------------------
/// @brief Using the Eigen library, using the SVD decomposition method to solve the
/// matrix pseudo-inverse, the default error er is 0
///
/// @param origin Matrix to invert
/// @param er Error
///
/// @return Inversed matrix
///
Eigen::MatrixXd pinv_eigen_based(Eigen::MatrixXd &origin, const float er = 0);

//------------------------------------------------------------------------------
/// @brief Invert a matrix given the method name in the ITHACAdict
///
/// @param[in] matrixToInvert Matrix to invert
/// @param[in] methodName Method
///
/// @return Inversed matrix
///
Eigen::MatrixXd invertMatrix(Eigen::MatrixXd& matrixToInvert, const word inversionMethod);


};
#endif
40 changes: 36 additions & 4 deletions src/ITHACA_HR/hyperReduction.H
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ SourceFiles
#include <set>
#include "redsvd"

Eigen::MatrixXd pinv_eigen_based(Eigen::MatrixXd &origin, const float er = 0);


// TODO uniform style for separating offline and onlie stages
template <typename... SnapshotsLists>
Expand Down Expand Up @@ -79,6 +79,23 @@ public:
word problemName,
SnapshotsLists &&...snapshotsLists);

//----------------------------------------------------------------------
/// @brief Construct HyperReduction class
///
/// @param[in] n_modes Dimension of the HR basis
/// @param[in] n_nodes Number of nodes
/// @param[in] vectorial_dim Field dimension (1 for volScalarField 3 for volVectorField)
/// @param[in] n_cells Number of cells
/// @param[in] initialSeeds Initial nodes
/// @param[in] problemName Name of the function to be hyper-reduced
///
HyperReduction(label n_modes,
label n_nodes,
unsigned int vectorial_dim,
label n_cells,
Eigen::VectorXi initialSeeds,
word problemName);

// ITHACA parameters dict
ITHACAparameters* para;

Expand Down Expand Up @@ -181,16 +198,31 @@ public:

/// Submeshes
autoPtr<volVectorField> submesh_field;

/// Online Matrix
Eigen::MatrixXd MatrixOnline;

//----------------------------------------------------------------------
/// @brief Methods implemented: 'GappyDEIM' from "DEIM, Chaturantabut, Saifon, and Danny C. Sorensen. "Nonlinear model reduction via discrete empirical interpolation." SIAM Journal on Scientific Computing 32.5 (2010): 2737-2764" and "GNAT, Carlberg, Kevin, et al. "The GNAT method for nonlinear model reduction: effective implementation and application to computational fluid dynamics and turbulent flows." Journal of Computational Physics 242 (2013): 623-647.". Quasi-optimal point selection algorithm 'SOPT' and optimal point selection 'SOPTE' from "Lauzon, Jessica T., Siu Wun Cheung, Yeonjong Shin, Youngsoo Choi, Dylan Matthew Copeland, and Kevin Huynh. "S-OPT: A Points Selection Algorithm for Hyper-Reduction in Reduced Order Models." arXiv preprint arXiv:2203.16494 (2022).""
///
void offlineGappyDEIM(Eigen::MatrixXd& snapshotsModes, Eigen::VectorXd& normalizingWeights);
void offlineGappyDEIM(Eigen::MatrixXd& snapshotsModes, Eigen::VectorXd& normalizingWeights){
methodName = "GappyDEIM";
word name = "ITHACAoutput/" + problemName + "/" + methodName;
offlineGappyDEIM(snapshotsModes, normalizingWeights, name);
}

void offlineGappyDEIM(Eigen::MatrixXd& snapshotsModes, Eigen::VectorXd& normalizingWeights, word folderMethodName);

//----------------------------------------------------------------------
/// @brief Methods implemented: 'ECP' from "ECP, Hernandez, Joaquin Alberto, Manuel Alejandro Caicedo, and Alex Ferrer. "Dimensional hyper-reduction of nonlinear finite element models via empirical cubature." Computer methods in applied mechanics and engineering 313 (2017): 687-722.". TODO implement non-negative LS.
///
void offlineECP(Eigen::MatrixXd& snapshotsModes, Eigen::VectorXd& normalizingWeights);
void offlineECP(Eigen::MatrixXd& snapshotsModes, Eigen::VectorXd& normalizingWeights){
methodName = "ECP";
word name = "ITHACAoutput/" + problemName + "/" + methodName;
offlineECP(snapshotsModes, normalizingWeights, name);
}

void offlineECP(Eigen::MatrixXd& snapshotsModes, Eigen::VectorXd& normalizingWeights, word folderMethodName);

//----------------------------------------------------------------------
/// @brief TODO
Expand Down Expand Up @@ -315,7 +347,7 @@ public:
/// @param[in] layers projector into interpolation nodes
/// @param[in] mesh matrix to restrict and invert
///
void generateSubmesh(label layers, fvMesh &mesh);
void generateSubmesh(label layers, const fvMesh &mesh);

//----------------------------------------------------------------------
/// @brief Get local indices in the submesh from indices in the global ones
Expand Down
102 changes: 57 additions & 45 deletions src/ITHACA_HR/hyperReduction.templates.H
Original file line number Diff line number Diff line change
Expand Up @@ -95,29 +95,29 @@ HyperReduction<SnapshotsLists...>::HyperReduction(label n_modes,
Info << "Number of boundary cells: " << n_boundary_cells << endl;
}

// Using the Eigen library, using the SVD decomposition method to solve the
// matrix pseudo-inverse, the default error er is 0
Eigen::MatrixXd pinv_eigen_based(Eigen::MatrixXd &origin, const float er) {
// perform svd decomposition
Eigen::JacobiSVD<Eigen::MatrixXd> svd_holder(origin, Eigen::ComputeThinU | Eigen::ComputeThinV);
// Build SVD decomposition results
Eigen::MatrixXd U = svd_holder.matrixU();
Eigen::MatrixXd V = svd_holder.matrixV();
Eigen::MatrixXd D = svd_holder.singularValues();

// Build the S matrix
Eigen::MatrixXd S(V.cols(), U.cols());
S.setZero();

for (unsigned int i = 0; i < D.size(); ++i) {

if (D(i, 0) > er) {
S(i, i) = 1 / D(i, 0);
} else {
S(i, i) = 0;
}
}
return V * S * U.transpose();

// Template function constructor
template <typename... SnapshotsLists>
HyperReduction<SnapshotsLists...>::HyperReduction(label n_modes,
label n_nodes,
unsigned int vectorialDim,
label n_cells,
Eigen::VectorXi initialSeeds,
word problemName)
: n_modes{n_modes},
n_nodes{n_nodes},
vectorial_dim{vectorialDim},
n_cells{n_cells},
initialSeeds{initialSeeds},
problemName{problemName}
{
Info << "Init HyperReduction class with vectorial dim: " << vectorial_dim << endl;
para = ITHACAparameters::getInstance();
folderProblem = problemName;
mkDir(folderProblem);

Info << "Initial seeds length: " << initialSeeds.rows() << endl;

}

template <typename... SnapshotsLists>
Expand Down Expand Up @@ -368,10 +368,10 @@ void HyperReduction<SnapshotsLists...>::saveModes(SnapshotsList sList, Eigen::Ma
}

template <typename... SnapshotsLists>
void HyperReduction<SnapshotsLists...>::offlineGappyDEIM(Eigen::MatrixXd& snapshotsModes, Eigen::VectorXd& weights)
void HyperReduction<SnapshotsLists...>::offlineGappyDEIM(Eigen::MatrixXd& snapshotsModes, Eigen::VectorXd& weights, word folderMethodName)
{
methodName = "GappyDEIM";
folderMethod = "ITHACAoutput/" + problemName + "/" + methodName;
folderMethod = folderMethodName;
Info << "FolderMethod : " << folderMethod << endl;
mkDir(folderMethod);

normalizingWeights = weights;
Expand All @@ -380,7 +380,7 @@ void HyperReduction<SnapshotsLists...>::offlineGappyDEIM(Eigen::MatrixXd& snapsh
IOobject("nodePoints", para->runTime.time().constant(), "../" + folderMethod,
para->mesh, IOobject::READ_IF_PRESENT, IOobject::NO_WRITE)));

auto greedyMetric = para->ITHACAdict->lookupOrDefault<word>("GreedyMetric", "l2");
auto greedyMetric = para->ITHACAdict->lookupOrDefault<word>("GreedyMetric", "L2");

bool offlineStage = !nodePoints().headerOk();

Expand Down Expand Up @@ -584,12 +584,15 @@ void HyperReduction<SnapshotsLists...>::offlineGappyDEIM(Eigen::MatrixXd& snapsh
evaluatePinv(P, basisMatrix, normalizingWeights);
renormalizedBasisMatrix = normalizingWeights.asDiagonal()*basisMatrix;

MatrixOnline = renormalizedBasisMatrix * pinvPU;

P.makeCompressed();
cnpy::save(basisMatrix, folderMethod +"/basisMatrix.npy");
cnpy::save(P, folderMethod +"/projectionMatrix.npz");
cnpy::save(normalizingWeights, folderMethod + "/normalizingWeights.npy");
cnpy::save(nodes, folderMethod + "/mp.npy");
cnpy::save(pinvPU, folderMethod + "/pinvPU.npy");
cnpy::save(MatrixOnline, folderMethod + "/MatrixOnline.npy");
nodePoints().write();

Info << "Projection Matrix shape: " << P.rows() << " " << P.cols() << endl;
Expand All @@ -600,6 +603,7 @@ void HyperReduction<SnapshotsLists...>::offlineGappyDEIM(Eigen::MatrixXd& snapsh
{
Info << "Read GappyDEIM\n";
cnpy::load(basisMatrix, folderMethod +"/basisMatrix.npy");
cnpy::load(MatrixOnline, folderMethod +"/MatrixOnline.npy");
cnpy::load(normalizingWeights, folderMethod + "/normalizingWeights.npy");
cnpy::load(P, folderMethod +"/projectionMatrix.npz");
cnpy::load(nodes, folderMethod + "/mp.npy");
Expand All @@ -614,9 +618,12 @@ void HyperReduction<SnapshotsLists...>::offlineGappyDEIM(Eigen::MatrixXd& snapsh
}

template <typename... SnapshotsLists>
void HyperReduction<SnapshotsLists...>::offlineECP(Eigen::MatrixXd& snapshotsModes, Eigen::VectorXd& weights)
void HyperReduction<SnapshotsLists...>::offlineECP(Eigen::MatrixXd& snapshotsModes, Eigen::VectorXd& weights, word folderMethodName)
{
methodName = "ECP";
folderMethod = folderMethodName;
Info << "FolderMethod : " << folderMethod << endl;
mkDir(folderMethod);

n_nodes = n_modes + 1;

normalizingWeights = weights;
Expand All @@ -626,9 +633,6 @@ void HyperReduction<SnapshotsLists...>::offlineECP(Eigen::MatrixXd& snapshotsMod
<< ", nodePoints=" << n_nodes << " #######"
<< endl;

folderMethod = "ITHACAoutput/" + problemName + "/" + methodName;
mkDir(folderMethod);

nodePoints = autoPtr<IOList<label>>(new IOList<label>(
IOobject("nodePoints", para->runTime.time().constant(), "../" + folderMethod,
para->mesh, IOobject::READ_IF_PRESENT, IOobject::NO_WRITE)));
Expand Down Expand Up @@ -754,28 +758,36 @@ void HyperReduction<SnapshotsLists...>::initSeeds(Eigen::VectorXd mp_not_mask, s
template <typename... SnapshotsLists>
void HyperReduction<SnapshotsLists...>::updateNodes(Eigen::SparseMatrix<double>& P, label& ind, Eigen::VectorXd& mp_not_mask)
{
nodePoints->append(ind);

nodes.conservativeResize(nodes.rows()+1);
nodes(nodes.rows()-1) = ind;

unsigned int last_col{0};

if (P.rows()==0)
{
P.resize(n_cells*vectorial_dim, vectorial_dim);
}
else
{
last_col = P.cols();
P.conservativeResize(P.rows(), P.cols() + vectorial_dim);
P.resize(P.rows(), P.cols() + vectorial_dim);
}
int step = P.cols() / vectorial_dim - 1;

for (int ith_node=0; ith_node <= step; ith_node++)
{
for (unsigned int ith_field = 0; ith_field < vectorial_dim; ith_field++)
{
P.insert(nodes[ith_node]+ith_field*n_cells,ith_node+ith_field*(step+1))=1;
}
}

for (unsigned int ith_field = 0; ith_field < vectorial_dim; ith_field++)
{
P.insert(ind + ith_field * n_cells, last_col + ith_field) = 1;
mp_not_mask(ind + ith_field * n_cells) = 0;
}

nodePoints->append(ind);

nodes.conservativeResize(nodes.rows()+1);
nodes(nodes.rows()-1) = ind;
}

template<typename... SnapshotsLists>
Expand Down Expand Up @@ -863,7 +875,7 @@ void HyperReduction<SnapshotsLists...>::evaluatePinv(Eigen::SparseMatrix<double>
assert(Projector.cols() > 0);
Eigen::MatrixXd restricted = Projector.transpose() * Modes;
Info << "S-Optimalty: " << s_optimality(restricted) << endl;
pinvPU = pinv_eigen_based(restricted);
pinvPU = ITHACAutilities::invertMatrix(restricted, para->ITHACAdict->lookupOrDefault<word>("InversionMethod", "completeOrthogonalDecomposition"));
pinvPU = pinvPU*(Projector.transpose() * fieldWeights.array().cwiseInverse().matrix()).asDiagonal();
}

Expand All @@ -875,9 +887,9 @@ void HyperReduction<SnapshotsLists...>::evaluateWPU(Eigen::SparseMatrix<double>
unsigned int n_weightsPerField = quadratureWeights.rows()/vectorial_dim;
unsigned int reorderIndex{0};

for (unsigned int ith_weight = 0; ith_weight < n_weightsPerField; ith_weight++)
{
for (unsigned int ith_field = 0; ith_field < vectorial_dim; ith_field++)
for (unsigned int ith_field = 0; ith_field < vectorial_dim; ith_field++)
{
for (unsigned int ith_weight = 0; ith_weight < n_weightsPerField; ith_weight++)
{
quadratureWeightsOrderedAsProjector(reorderIndex) = quadratureWeights(ith_weight + ith_field * n_weightsPerField);
reorderIndex++;
Expand All @@ -887,7 +899,7 @@ void HyperReduction<SnapshotsLists...>::evaluateWPU(Eigen::SparseMatrix<double>
}

template<typename... SnapshotsLists>
void HyperReduction<SnapshotsLists...>::generateSubmesh(label layers, fvMesh &mesh) {
void HyperReduction<SnapshotsLists...>::generateSubmesh(label layers, const fvMesh &mesh) {

Info << "####### Extract submesh #######\n";
ITHACAparameters *para(ITHACAparameters::getInstance());
Expand Down
Loading

0 comments on commit d3a76d5

Please sign in to comment.