Commit f73bdd14 authored by Alvarez, Gonzalo's avatar Alvarez, Gonzalo
Browse files

Tensor.h: no longer needs non-const data() member

parent 505c25fb
Loading
Loading
Loading
Loading
+4 −4
Original line number Diff line number Diff line
@@ -45,10 +45,10 @@ public:
		throw PsimagLite::RuntimeError("Not implemented yet\n");
	}

//	HandleType operator()()
//	{
//		throw PsimagLite::RuntimeError("Not implemented yet\n");
//	}
	HandleType operator()()
	{
		throw PsimagLite::RuntimeError("Not implemented yet\n");
	}

	void printResult(std::ostream& os) const
	{
+25 −14
Original line number Diff line number Diff line
@@ -36,13 +36,12 @@ public:
	typedef PsimagLite::Vector<SizeType>::Type VectorSizeType;
	typedef typename PsimagLite::Vector<ComplexOrRealType>::Type VectorComplexOrRealType;
	typedef std::pair<PsimagLite::String, SizeType> PairStringSizeType;
	typedef std::shared_ptr<exatn::numerics::Tensor> TensorBlobType;
	typedef ComplexOrRealType* TensorBlobType;

	// Tensor with only one dimension
	Tensor(PsimagLite::String name, SizeType dim0, SizeType ins)
	    : name_(name),
	      dimensions_(1, dim0),
	      //data_(std::make_shared<exatn::numerics::Tensor>(name, exatn::numerics::TensorShape(dimensions_))),
	      ins_(ins)
	{
		exatn::createTensor(name_, exatn::TensorElementType::REAL64, exatn::numerics::TensorShape(dimensions_));
@@ -51,7 +50,6 @@ public:
	Tensor(PsimagLite::String name, const VectorSizeType& d, SizeType ins)
	    : name_(name),
	      dimensions_(d),
	      //data_(std::make_shared<exatn::numerics::Tensor>(name, exatn::numerics::TensorShape(dimensions_))),
	      ins_(ins)
	{
		exatn::createTensor(name_, exatn::TensorElementType::REAL64, exatn::numerics::TensorShape(dimensions_));
@@ -263,19 +261,27 @@ public:
//		return index;
//	}

	const TensorBlobType& data() const
	const ComplexOrRealType* data() const
	{

		std::shared_ptr<talsh::Tensor> ptr = exatn::getLocalTensor(name_);
		const ComplexOrRealType* ptr2 = 0;
		bool ret = ptr->getDataAccessHostConst(&ptr2);
		//if (!ret)
			// check error
		const ComplexOrRealType** ptr2;
		bool ret = ptr->getDataAccessHostConst(ptr2);
		checkTalshErrorCode(ret, "getLocalTensor");
		return *ptr2;
	}

	void setData(const TensorBlobType& data)
	void setData(const ComplexOrRealType* data)
	{
		std::shared_ptr<talsh::Tensor> ptr = exatn::getLocalTensor(name_);
		ComplexOrRealType** ptr2;
		bool ret = ptr->getDataAccessHost(ptr2);
		checkTalshErrorCode(ret, "getLocalTensor");

		const SizeType n = ptr->getVolume();
		for (SizeType i = 0; i < n; ++i)
			(*ptr2)[i] = data[i];

		// needs to be done as an add
		// data_ = data;
	}
@@ -284,19 +290,24 @@ public:

private:

	//static exatn::numerics::TensorOpFactory* opFactory_;
	Tensor(const Tensor&) = delete;

	Tensor& operator=(const Tensor&) = delete;

	void checkTalshErrorCode(bool code, PsimagLite::String what) const
	{
		if (code) return;
		throw PsimagLite::RuntimeError("MERA++: TALSH returned false from " + what + "\n");
	}

	static PsimagLite::RandomForTests<ComplexOrRealType> rng_;
	PsimagLite::String name_;
	VectorSizeType dimensions_;
	//TensorBlobType data_;
	SizeType ins_;
};

template<typename ComplexOrRealType>
PsimagLite::RandomForTests<ComplexOrRealType> Tensor<ComplexOrRealType>::rng_(1234);

//template<typename ComplexOrRealType>
//exatn::numerics::TensorOpFactory* Tensor<ComplexOrRealType>::opFactory_ = exatn::numerics::TensorOpFactory::get();

}
#endif // TENSOR_EXATN_H
+5 −6
Original line number Diff line number Diff line
@@ -192,14 +192,13 @@ public:

	const PairStringSizeType& nameId() const { return tensorToOptimize_; }

	SizeType indexToOptimize() const { return indToOptimize_; }

	SizeType layer() const { return layer_; }

	void restoreTensor()
	{
		if (stack_.size() == 0)
			throw PsimagLite::RuntimeError("restoreTensor: stack is empty\n");
		tensors_[indToOptimize_]->setData(stack_.top());
		stack_.pop();
		tensors_[indToOptimize_]->setData(savedTensor_);
	}

	const SizeType& firstOfLayer() const { return firstOfLayer_; }
@@ -214,7 +213,7 @@ private:

	void saveTensor()
	{
		stack_.push(tensors_[indToOptimize_]->data());
		savedTensor_ = tensors_[indToOptimize_]->data();
	}

	PsimagLite::String conditionToSrep(PairStringSizeType nameId,
@@ -456,7 +455,7 @@ private:
	SymmetryLocalType* symmLocal_;
	bool verbose_;
	PsimagLite::Random48<double> rng_;
	StackVectorType stack_;
	const ComplexOrRealType* savedTensor_;
}; // class TensorOptimizer
} // namespace Mera
#endif // TENSOROPTIMIZER_H