Skip to content
New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

Issue 637 fix #638

Merged
merged 3 commits into from
Jun 30, 2023
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 27 additions & 14 deletions src/Optimization/hiopHessianLowRank.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,9 @@ hiopHessianLowRank::hiopHessianLowRank(hiopNlpDenseConstraints* nlp_, int max_me
_Vmat = V->alloc_clone();
#endif

yk = dynamic_cast<hiopVectorPar*>(nlp->alloc_primal_vec());
sk = dynamic_cast<hiopVectorPar*>(nlp->alloc_primal_vec());

}

hiopHessianLowRank::~hiopHessianLowRank()
Expand All @@ -153,6 +156,8 @@ hiopHessianLowRank::~hiopHessianLowRank()
if(L) delete L;
if(D) delete D;
if(V) delete V;
if(yk) delete yk;
if(sk) delete sk;
#ifdef HIOP_DEEPCHECKS
delete _Vmat;
#endif
Expand Down Expand Up @@ -181,6 +186,14 @@ hiopHessianLowRank::~hiopHessianLowRank()
if(_2l_vec1) delete _2l_vec1;
if(_V_ipiv_vec) delete[] _V_ipiv_vec;
if(_V_work_vec) delete _V_work_vec;

for(auto* it: a) {
delete it;
}

for(auto* it: b) {
delete it;
}
}


Expand Down Expand Up @@ -934,23 +947,30 @@ void hiopHessianLowRank::timesVecCmn(double beta, hiopVector& y, double alpha, c
nlp->log->write("y_in=", y, hovMatrices);
}

hiopVectorPar *yk=dynamic_cast<hiopVectorPar*>(nlp->alloc_primal_vec());
hiopVectorPar *sk=dynamic_cast<hiopVectorPar*>(nlp->alloc_primal_vec());
//allocate and compute a_k and b_k
vector<hiopVector*> a(l_curr), b(l_curr);
a.resize(l_curr, nullptr);
b.resize(l_curr, nullptr);
int n_local = Yt->get_local_size_n();
for(int k=0; k<l_curr; k++) {
//bk=yk/sqrt(yk'*sk)
yk->copyFrom(Yt->local_data() + k*n_local);
sk->copyFrom(St->local_data() + k*n_local);
double skTyk=yk->dotProductWith(*sk);
assert(skTyk>0);
b[k]=dynamic_cast<hiopVectorPar*>(nlp->alloc_primal_vec());

if(skTyk < std::numeric_limits<double>::epsilon()) {
nlp->log->printf(hovLinAlgScalars, "hiopHessianLowRank: ||s_k^T*y_k||=%12.6e too small..."
" set it to machine precision = %12.6e \n", skTyk, std::numeric_limits<double>::epsilon());
skTyk = std::numeric_limits<double>::epsilon();
}

if(a[k] == nullptr && b[k] == nullptr) {
b[k]=dynamic_cast<hiopVectorPar*>(nlp->alloc_primal_vec());
a[k]=dynamic_cast<hiopVectorPar*>(nlp->alloc_primal_vec());
}

b[k]->copyFrom(*yk);
b[k]->scale(1/sqrt(skTyk));

a[k]=dynamic_cast<hiopVectorPar*>(nlp->alloc_primal_vec());

//compute ak by an inner loop
a[k]->copyFrom(*sk);
a[k]->scale(sigma);
Expand Down Expand Up @@ -986,13 +1006,6 @@ void hiopHessianLowRank::timesVecCmn(double beta, hiopVector& y, double alpha, c
nlp->log->write("y_out=", y, hovMatrices);
}

for(vector<hiopVector*>::iterator it=a.begin(); it!=a.end(); ++it)
delete *it;
for(vector<hiopVector*>::iterator it=b.begin(); it!=b.end(); ++it)
delete *it;

delete yk;
delete sk;
}

void hiopHessianLowRank::timesVec(double beta, hiopVector& y, double alpha, const hiopVector&x)
Expand Down
4 changes: 4 additions & 0 deletions src/Optimization/hiopHessianLowRank.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,10 @@ class hiopHessianLowRank : public hiopMatrix
int sigma_update_strategy;
double sigma_safe_min, sigma_safe_max; //min and max safety thresholds for sigma
hiopNlpDenseConstraints* nlp;
mutable std::vector<hiopVector*> a;
mutable std::vector<hiopVector*> b;
hiopVectorPar *yk;
hiopVectorPar *sk;
private:
hiopVector* DhInv; //(B0+Dk)^{-1}
// needed in timesVec (for residual checking in solveCompressed.
Expand Down