Commit 44bed2a5 authored by Dmitry I. Lyakh's avatar Dmitry I. Lyakh

Implementing the reconstructing solver. Added necessary methods to TensorExpansion class.

parent 7c219487
/** ExaTN:: Reconstructor of an approximate tensor network expansion from a given tensor network expansion
REVISION: 2020/01/24
REVISION: 2020/01/31
Copyright (C) 2018-2020 Dmitry I. Lyakh (Liakh)
Copyright (C) 2018-2020 Oak Ridge National Laboratory (UT-Battelle) **/
......@@ -15,6 +15,8 @@ TensorNetworkReconstructor::TensorNetworkReconstructor(std::shared_ptr<TensorExp
double tolerance):
expansion_(expansion), approximant_(approximant), tolerance_(tolerance), fidelity_(0.0)
{
assert(expansion_->isKet() && approximant_->isBra());
assert(expansion_->getRank() == approximant_->getRank());
}
......@@ -29,6 +31,13 @@ std::shared_ptr<TensorExpansion> TensorNetworkReconstructor::getSolution(double
bool TensorNetworkReconstructor::reconstruct(double * fidelity)
{
assert(fidelity != nullptr);
//Construct the Lagrangian optimization functional (scalar):
TensorExpansion lagrangian(*approximant_,*expansion_);
TensorExpansion approximant_conjugate = approximant_->clone(); //deep copy
approximant_conjugate.conjugate();
TensorExpansion normalization(approximant_conjugate,*approximant_);
lagrangian.appendExpansion(normalization,{1.0,0.0});
//Alternating least squares optimization:
//`Finish
*fidelity = fidelity_;
return true;
......
/** ExaTN:: Reconstructor of an approximate tensor network expansion from a given tensor network expansion
REVISION: 2020/01/24
REVISION: 2020/01/31
Copyright (C) 2018-2020 Dmitry I. Lyakh (Liakh)
Copyright (C) 2018-2020 Oak Ridge National Laboratory (UT-Battelle) **/
......@@ -11,6 +11,8 @@ Copyright (C) 2018-2020 Oak Ridge National Laboratory (UT-Battelle) **/
The reconstruction fidelity is the overlap between the two tensor network expansions.
The reconstruction tolerance is a numerical tolerance used for checking convergence
of the underlying linear algebra procedures.
(B) The reconstructed tensor network expansion must be a Ket (primary space) and
the reconstructing tensor network expansion must be a Bra (dual space).
**/
#ifndef EXATN_RECONSTRUCTOR_HPP_
......
/** ExaTN::Numerics: Tensor network expansion
REVISION: 2019/12/15
REVISION: 2020/01/31
Copyright (C) 2018-2019 Dmitry I. Lyakh (Liakh)
Copyright (C) 2018-2019 Oak Ridge National Laboratory (UT-Battelle) **/
Copyright (C) 2018-2020 Dmitry I. Lyakh (Liakh)
Copyright (C) 2018-2020 Oak Ridge National Laboratory (UT-Battelle) **/
#include "tensor_expansion.hpp"
......@@ -78,8 +78,19 @@ TensorExpansion::TensorExpansion(const TensorExpansion & expansion, const std::s
}
bool TensorExpansion::appendComponent(std::shared_ptr<TensorNetwork> network, //in: tensor network
const std::complex<double> coefficient) //in: expansion coefficient
TensorExpansion TensorExpansion::clone()
{
TensorExpansion clon;
for(auto iter = this->cbegin(); iter != this->cend(); ++iter){
clon.appendComponent(std::make_shared<TensorNetwork>(*(iter->network_)),iter->coefficient_);
}
//`Finish: Ket, Name
return clon;
}
bool TensorExpansion::appendComponent(std::shared_ptr<TensorNetwork> network,
const std::complex<double> coefficient)
{
auto output_tensor = network->getTensor(0);
const auto output_tensor_rank = output_tensor->getRank();
......@@ -111,6 +122,20 @@ bool TensorExpansion::appendComponent(std::shared_ptr<TensorNetwork> network, //
}
bool TensorExpansion::appendExpansion(const TensorExpansion & another,
const std::complex<double> coefficient)
{
if(this->isKet() != another.isKet()) return false;
if(this->getRank() != another.getRank()) return false;
bool appended = true;
for(auto iter = another.cbegin(); iter != another.cend(); ++iter){
appended = this->appendComponent(iter->network_,(iter->coefficient_)*coefficient);
if(!appended) break;
}
return appended;
}
void TensorExpansion::conjugate()
{
for(auto & component: components_){
......
/** ExaTN::Numerics: Tensor network expansion
REVISION: 2019/12/15
REVISION: 2020/01/31
Copyright (C) 2018-2019 Dmitry I. Lyakh (Liakh)
Copyright (C) 2018-2019 Oak Ridge National Laboratory (UT-Battelle) **/
Copyright (C) 2018-2020 Dmitry I. Lyakh (Liakh)
Copyright (C) 2018-2020 Oak Ridge National Laboratory (UT-Battelle) **/
/** Rationale:
(a) A tensor network expansion is an ordered linear expansion
......@@ -108,6 +108,8 @@ public:
TensorExpansion & operator=(TensorExpansion &&) noexcept = default;
virtual ~TensorExpansion() = default;
virtual TensorExpansion clone(); //deep copy
inline Iterator begin() {return components_.begin();}
inline Iterator end() {return components_.end();}
inline ConstIterator cbegin() const {return components_.cbegin();}
......@@ -118,10 +120,12 @@ public:
return ket_;
}
/** Returns whether the tensor network expansion is bra or not. **/
inline bool isBra() const{
return !ket_;
}
/** Returns the tensor network expansion name (may be empty). **/
inline const std::string & getName() const{
return name_;
}
......@@ -148,6 +152,10 @@ public:
bool appendComponent(std::shared_ptr<TensorNetwork> network, //in: tensor network
const std::complex<double> coefficient); //in: expansion coefficient
/** Appends another tensor network expansion into the current one. **/
bool appendExpansion(const TensorExpansion & another, //in: tensor network expansion
const std::complex<double> coefficient); //in: scaling coefficient (applies to the appended tensor network expansion)
/** Conjugates the tensor network expansion: All constituting tensors are complex conjugated,
all tensor legs reverse their direction, complex linear expansion coefficients are conjugated:
The ket tensor network expansion becomes a bra, and vice versa. **/
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment