Commit 85d53eae authored by Dmitry I. Lyakh's avatar Dmitry I. Lyakh
Browse files

Added optimization algorithm selection suffix in Reconstructor and Optimizer


Signed-off-by: default avatarDmitry I. Lyakh <quant4me@gmail.com>
parent c2209ded
/** ExaTN:: Variational optimizer of a closed symmetric tensor network expansion functional
REVISION: 2021/10/01
REVISION: 2021/10/02
Copyright (C) 2018-2021 Dmitry I. Lyakh (Liakh)
Copyright (C) 2018-2021 Oak Ridge National Laboratory (UT-Battelle) **/
......@@ -72,6 +72,12 @@ bool TensorNetworkOptimizer::optimize()
bool TensorNetworkOptimizer::optimize(const ProcessGroup & process_group)
{
return optimize_sd(process_group);
}
bool TensorNetworkOptimizer::optimize_sd(const ProcessGroup & process_group)
{
constexpr bool NORMALIZE_WITH_METRICS = true; //whether to normalize tensor network factors with metrics or not
constexpr double MIN_ACCEPTABLE_DENOM = 1e-13; //minimally acceptable denominator in optimal step size determination
......
/** ExaTN:: Variational optimizer of a closed symmetric tensor network expansion functional
REVISION: 2021/09/08
REVISION: 2021/10/02
Copyright (C) 2018-2021 Dmitry I. Lyakh (Liakh)
Copyright (C) 2018-2021 Oak Ridge National Laboratory (UT-Battelle) **/
......@@ -67,6 +67,11 @@ public:
static void resetDebugLevel(unsigned int level = 0);
protected:
//Implementation based on the steepest descent algorithm:
bool optimize_sd(const ProcessGroup & process_group); //in: executing process group
private:
struct Environment{
......
/** ExaTN:: Reconstructs an approximate tensor network expansion for a given tensor network expansion
REVISION: 2021/09/14
REVISION: 2021/10/02
Copyright (C) 2018-2021 Dmitry I. Lyakh (Liakh)
Copyright (C) 2018-2021 Oak Ridge National Laboratory (UT-Battelle) **/
......@@ -111,6 +111,17 @@ bool TensorNetworkReconstructor::reconstruct(const ProcessGroup & process_group,
bool rnd_init,
bool nesterov,
double acceptable_fidelity)
{
return reconstruct_sd(process_group, residual_norm, fidelity, rnd_init, nesterov, acceptable_fidelity);
}
bool TensorNetworkReconstructor::reconstruct_sd(const ProcessGroup & process_group,
double * residual_norm,
double * fidelity,
bool rnd_init,
bool nesterov,
double acceptable_fidelity)
{
unsigned int local_rank; //local process rank within the process group
if(!process_group.rankIsIn(exatn::getProcessRank(),&local_rank)) return true; //process is not in the group: Do nothing
......
/** ExaTN:: Reconstructs an approximate tensor network expansion for a given tensor network expansion
REVISION: 2021/09/14
REVISION: 2021/10/02
Copyright (C) 2018-2021 Dmitry I. Lyakh (Liakh)
Copyright (C) 2018-2021 Oak Ridge National Laboratory (UT-Battelle) **/
......@@ -81,6 +81,16 @@ public:
static void resetDebugLevel(unsigned int level = 0);
protected:
//Implementation based on the steepest descent algorithm:
bool reconstruct_sd(const ProcessGroup & process_group, //in: executing process group
double * residual_norm, //out: 2-norm of the residual tensor (error)
double * fidelity, //out: squared normalized overlap (fidelity)
bool rnd_init, //in: random initialization flag
bool nesterov, //in: Nesterov acceleration
double acceptable_fidelity); //in: acceptable fidelity
private:
void reinitializeApproximant(const ProcessGroup & process_group);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment