From 42c03ef9dee973857da2a9aa672e1b89134d1774 Mon Sep 17 00:00:00 2001 From: Jiri Pittner Date: Sat, 6 Apr 2024 06:37:17 +0200 Subject: [PATCH] working on tensor --- tensor.cc | 2 ++ tensor.h | 28 +++++++++++++++++++--------- 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/tensor.cc b/tensor.cc index e0db0dc..2e2d375 100644 --- a/tensor.cc +++ b/tensor.cc @@ -193,6 +193,7 @@ template void Tensor::put(int fd) const { shape.put(fd,true); +groupsizes.put(fd,true); cumsizes.put(fd,true); data.put(fd,true); } @@ -202,6 +203,7 @@ void Tensor::get(int fd) { shape.get(fd,true); myrank=calcrank(); //is not stored but recomputed +groupsizes.put(fd,true); cumsizes.get(fd,true); data.get(fd,true); } diff --git a/tensor.h b/tensor.h index 11941a1..a099ac0 100644 --- a/tensor.h +++ b/tensor.h @@ -89,6 +89,7 @@ template class Tensor { int myrank; NRVec shape; + NRVec groupsizes; //group sizes of symmetry index groups (a function of shape but precomputed for efficiency) NRVec cumsizes; //cumulative sizes of symmetry index groups (a function of shape but precomputed for efficiency) NRVec data; @@ -101,12 +102,14 @@ private: public: //constructors Tensor() : myrank(0) {}; - Tensor(const NRVec &s) : shape(s), data((int)getsize()), myrank(calcrank()) {data.clear();}; //general tensor - Tensor(const indexgroup &g) {shape.resize(1); shape[0]=g; data.resize(getsize()); myrank=calcrank(); data.clear();}; //tensor with a single index group + Tensor(const NRVec &s) : shape(s), data((int)calcsize()), myrank(calcrank()) {}; //general tensor + Tensor(const indexgroup &g) {shape.resize(1); shape[0]=g; data.resize(calcsize()); myrank=calcrank();}; //tensor with a single index group + Tensor(const Tensor &rhs): myrank(rhs.myrank), shape(rhs.shape), groupsizes(rhs.groupsizes), cumsizes(rhs.cumsizes), data(rhs.data) {}; + void clear() {data.clear();}; int rank() const {return myrank;}; int calcrank(); //is computed from shape - LA_largeindex getsize(); //set redundant data and return total size + LA_largeindex calcsize(); //set redundant data and return total size LA_largeindex size() const {return data.size();}; void copyonwrite() {shape.copyonwrite(); data.copyonwrite();}; inline Signedpointer lhs(const SUPERINDEX &I) {int sign; LA_largeindex i=index(&sign,I); return Signedpointer(&data[i],sign);}; @@ -116,18 +119,24 @@ public: inline Signedpointer lhs(LA_index i1...) {va_list args; int sign; LA_largeindex i; va_start(args,i1); i= vindex(&sign, i1,args); return Signedpointer(&data[i],sign); }; inline T operator()(LA_index i1...) {va_list args; ; int sign; LA_largeindex i; va_start(args,i1); i= vindex(&sign, i1,args); if(sign==0) return 0; return sign>0 ?data[i] : -data[i];}; + inline Tensor& operator=(const Tensor &rhs) {myrank=rhs.myrank; shape=rhs.shape; groupsizes=rhs.groupsizes; cumsizes=rhs.cumsizes; data=rhs.data; return *this;}; + inline Tensor& operator*=(const T &a) {data*=a; return *this;}; + inline Tensor operator*(const T &a) const {Tensor r(*this); r *=a; return r;}; inline Tensor& operator/=(const T &a) {data/=a; return *this;}; + inline Tensor operator/(const T &a) const {Tensor r(*this); r /=a; return r;}; void put(int fd) const; void get(int fd); //@@@TODO - unwinding to full size in a specified index - //@@@TODO - contractions - basic and efficient - //@@@dodelat indexy + //@@@TODO - contractions - basic and efficient? first contraction in a single index; between a given group+index in group at each tensor //@@@ dvojite rekurzivni loopover s callbackem - nebo iterator s funkci next??? //@@@nebo inverse index function? //@@@ stream i/o na zaklade tohoto + //@@@permuteindexgroups + //@@@symmetreize a group, antisymmetrize a group, expand a (anti)symmetric grtoup + //@@@@@@+= -= + - on same shape }; @@ -148,8 +157,9 @@ return r; template -LA_largeindex Tensor::getsize() +LA_largeindex Tensor::calcsize() { +groupsizes.resize(shape.size()); cumsizes.resize(shape.size()); LA_largeindex s=1; for(int i=0; i