Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add new function for warmstart #688

Merged
merged 5 commits into from
Jun 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions .github/push_mirror.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
#name: Push Mirror

#on: [push, delete]

#jobs:
# PNNL_GitLab:
# runs-on: ubuntu-22.04
# steps:
# - uses: actions/checkout@v1
# - uses: spyoungtech/mirror-action@master
# with:
# REMOTE: ${{ secrets.GIT_REPO_URL }}
# GIT_USERNAME: ${{ secrets.GIT_USER }}
# GIT_PASSWORD: ${{ secrets.GIT_PASSWORD }}
# GIT_PUSH_ARGS: --push-option=ci.skip --tags --force --prune
# - uses: nelonoel/[email protected]
# - name: Trigger Pipeline
# run: |
# response=$(curl -X POST -F token=${{ secrets.PNNL_PIPELINE_TRIGGER_TOKEN }} -F ref=${BRANCH_NAME} https://gitlab.pnnl.gov/api/v4/projects/769/trigger/pipeline)
# exit_code=$?
# sudo apt install jq
# pipeline_id=$(echo $response | jq '.id' | sed 's/"//g')
# echo "PIPELINE_ID=${pipeline_id}" >> $GITHUB_ENV
# exit $exit_code
24 changes: 0 additions & 24 deletions .github/workflows/push_mirror.yml

This file was deleted.

19 changes: 6 additions & 13 deletions .github/workflows/spack_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,14 @@ on: [push]

jobs:
hiop_spack_builds:
# 20.04 is a version shared by E4S cache and Spack binaries for x86_64
runs-on: ubuntu-20.04
container: spack/ubuntu-focal:latest
runs-on: ubuntu-22.04
container: spack/ubuntu-jammy:latest
strategy:
matrix:
spack_spec:
- hiop@develop+mpi~raja~shared~kron~sparse ^openmpi
- hiop@develop~mpi~raja~shared~kron~sparse
- hiop@develop~mpi+raja~shared~kron~sparse
- hiop@develop+mpi~raja~shared~kron~sparse ^openmpi ^libevent~openssl
- hiop@develop~mpi~raja~shared~kron~sparse ^libevent~openssl
- hiop@develop~mpi+raja~shared~kron~sparse ^libevent~openssl

# We will need coinhsl for this, but what are the rules for using
# a coinhsl tarball?
Expand Down Expand Up @@ -41,13 +40,7 @@ jobs:
spack env activate ./spack-env
spack add $SPACK_SPEC target=x86_64
spack develop --path $(pwd) --no-clone hiop@develop
# Add E4S mirror - likely relying on spack cache but nice backup
# https://oaciss.uoregon.edu/e4s/inventory.html
# Need to add build cache before concretizing to re-use dependencies
# Using custom e4s cache due to known bug
# TODO: Update cache link after bug is resolved
spack mirror add E4S https://cache.e4s.io/23.02
spack buildcache keys --install --trust
spack external find --all --exclude python
spack concretize --reuse
git config --global --add safe.directory $(pwd)
spack --stacktrace install --fail-fast
Expand Down
152 changes: 0 additions & 152 deletions .gitlab/pnnl-ci.yml

This file was deleted.

33 changes: 32 additions & 1 deletion src/Interface/hiopInterface.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -465,7 +465,38 @@ class hiopInterfaceBase
{
return true;
}


/**
* This method is used to provide an user all the hiop iterate
* procedure. @see solution_callback() for an explanation of the parameters.
*
* @param[in] x array of (local) entries of the primal variables (managed by Umpire, see note below)
* @param[in] z_L array of (local) entries of the dual variables for lower bounds (managed by Umpire, see note below)
* @param[in] z_U array of (local) entries of the dual variables for upper bounds (managed by Umpire, see note below)
* @param[in] yc array of (local) entries of the dual variables for equality constraints (managed by Umpire, see note below)
* @param[in] yd array of (local) entries of the dual variables for inequality constraints (managed by Umpire, see note below)
* @param[in] s array of the slacks added to transfer inequalities to equalities (managed by Umpire, see note below)
* @param[in] v_L array of (local) entries of the dual variables for constraint lower bounds (managed by Umpire, see note below)
* @param[in] v_U array of (local) entries of the dual variables for constraint upper bounds (managed by Umpire, see note below)
*
* @note HiOp's option `callback_mem_space` can be used to change the memory location of array parameters managaged by Umpire.
* More specifically, when `callback_mem_space` is set to `host` (and `mem_space` is `device`), HiOp transfers the
* arrays from device to host first, and then passes/returns pointers on host for the arrays managed by Umpire. These pointers
* can be then used in host memory space (without the need to rely on or use Umpire).
*
*/
virtual bool iterate_full_callback(const double* x,
const double* z_L,
const double* z_U,
const double* yc,
const double* yd,
const double* s,
const double* v_L,
const double* v_U)
{
return true;
}

/**
* A wildcard function used to change the primal variables.
*
Expand Down
69 changes: 68 additions & 1 deletion src/Optimization/hiopNlpFormulation.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1213,7 +1213,7 @@ bool hiopNlpFormulation::user_callback_iterate(int iter,
hiopVectorPar x_host(n_vars_, vec_distrib_, comm_);
x.copy_to_vectorpar(x_host);

hiopVectorPar s_host(n_vars_, vec_distrib_, comm_);
hiopVectorPar s_host(n_cons_ineq_, vec_distrib_, comm_);
s.copy_to_vectorpar(s_host);

hiopVectorPar zl_host(n_vars_, vec_distrib_, comm_);
Expand Down Expand Up @@ -1271,6 +1271,73 @@ bool hiopNlpFormulation::user_callback_iterate(int iter,
return bret;
}

bool hiopNlpFormulation::user_callback_full_iterate(hiopVector& x,
hiopVector& z_L,
hiopVector& z_U,
hiopVector& y_c,
hiopVector& y_d,
hiopVector& s,
hiopVector& v_L,
hiopVector& v_U)
{
assert(x.get_size()==n_vars_);
assert(y_c.get_size() == n_cons_eq_);
assert(y_d.get_size() == n_cons_ineq_);

bool bret{false};

if(options->GetString("callback_mem_space")=="host" && options->GetString("mem_space")=="device") {

#if !defined(HIOP_USE_MPI)
int* vec_distrib_ = nullptr;
MPI_Comm comm_ = MPI_COMM_SELF;
#endif
hiopVectorPar x_host(n_vars_, vec_distrib_, comm_);
x.copy_to_vectorpar(x_host);

hiopVectorPar zl_host(n_vars_, vec_distrib_, comm_);
z_L.copy_to_vectorpar(zl_host);

hiopVectorPar zu_host(n_vars_, vec_distrib_, comm_);
z_U.copy_to_vectorpar(zu_host);

hiopVectorPar yc_host(n_cons_eq_, vec_distrib_, comm_);
y_c.copy_to_vectorpar(yc_host);

hiopVectorPar yd_host(n_cons_ineq_, vec_distrib_, comm_);
y_d.copy_to_vectorpar(yd_host);

hiopVectorPar s_host(n_cons_ineq_, vec_distrib_, comm_);
s.copy_to_vectorpar(s_host);

hiopVectorPar vl_host(n_cons_ineq_, vec_distrib_, comm_);
v_L.copy_to_vectorpar(zl_host);

hiopVectorPar vu_host(n_cons_ineq_, vec_distrib_, comm_);
v_U.copy_to_vectorpar(zu_host);

bret = interface_base.iterate_full_callback(x_host.local_data_const(),
zl_host.local_data_const(),
zu_host.local_data_const(),
yc_host.local_data_const(),
yd_host.local_data_const(),
s_host.local_data_const(),
vl_host.local_data_const(),
vu_host.local_data_const());
} else {
bret = interface_base.iterate_full_callback(x.local_data_const(),
z_L.local_data_const(),
z_U.local_data_const(),
y_c.local_data_const(),
y_d.local_data_const(),
s.local_data_const(),
v_L.local_data_const(),
v_U.local_data_const());
}
return bret;
}


bool hiopNlpFormulation::user_force_update(int iter,
double& obj_value,
hiopVector& x,
Expand Down
10 changes: 10 additions & 0 deletions src/Optimization/hiopNlpFormulation.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,16 @@ class hiopNlpFormulation
double alpha_pr,
int ls_trials);

virtual
bool user_callback_full_iterate(hiopVector& x,
hiopVector& z_L,
hiopVector& z_U,
hiopVector& y_c,
hiopVector& y_d,
hiopVector& s,
hiopVector& v_L,
hiopVector& v_U);

virtual
bool user_force_update(int iter,
double& obj_value,
Expand Down
Loading