tensor.cpp 63.6 KB
Newer Older
Baum's avatar
Baum committed
1
// Xerus - A General Purpose Tensor Library
2
// Copyright (C) 2014-2017 Benjamin Huber and Sebastian Wolf. 
Baum's avatar
Baum committed
3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
// 
// Xerus is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published
// by the Free Software Foundation, either version 3 of the License,
// or (at your option) any later version.
// 
// Xerus is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
// 
// You should have received a copy of the GNU Affero General Public License
// along with Xerus. If not, see <http://www.gnu.org/licenses/>.
//
// For further information on Xerus visit https://libXerus.org 
// or contact us at contact@libXerus.org.

20
/**
Sebastian Wolf's avatar
Sebastian Wolf committed
21 22 23
* @file
* @brief Implementation of the Tensor class.
*/
24

25
#include <xerus/tensor.h>
26
#include <xerus/misc/check.h>
Sebastian Wolf's avatar
Sebastian Wolf committed
27

Sebastian Wolf's avatar
Sebastian Wolf committed
28
#include <xerus/misc/containerSupport.h>
29
#include <xerus/misc/basicArraySupport.h>
Sebastian Wolf's avatar
Sebastian Wolf committed
30
#include <xerus/misc/math.h>
31
#include <xerus/misc/internal.h>
Sebastian Wolf's avatar
Sebastian Wolf committed
32

33
#include <xerus/blasLapackWrapper.h>
34
#include <xerus/cholmod_wrapper.h>
35
#include <xerus/sparseTimesFullContraction.h>
36
#include <xerus/index.h>
37 38
#include <xerus/tensorNetwork.h>

39
#include <xerus/cholmod_wrapper.h>
40

41 42 43
#include <fstream>
#include <iomanip>

Baum's avatar
Baum committed
44
namespace xerus {
45 46
	size_t Tensor::sparsityFactor = 4;
	
Sebastian Wolf's avatar
Sebastian Wolf committed
47
	/*- - - - - - - - - - - - - - - - - - - - - - - - - - Constructors - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*/
48
	
49
	Tensor::Tensor(const Representation _representation) : Tensor(DimensionTuple({}), _representation) { } 
50
	
51
	
52 53
	Tensor::Tensor(DimensionTuple _dimensions, const Representation _representation, const Initialisation _init) 
		: dimensions(std::move(_dimensions)), size(misc::product(dimensions)), representation(_representation)
54
	{
Sebastian Wolf's avatar
Sebastian Wolf committed
55
		REQUIRE(size != 0, "May not create tensors with an dimension == 0.");
56 57 58
		
		if(representation == Representation::Dense) {
			denseData.reset(new value_t[size], internal::array_deleter_vt);
Sebastian Wolf's avatar
Sebastian Wolf committed
59
			if(_init == Initialisation::Zero) {
60
				misc::set_zero(denseData.get(), size);
Sebastian Wolf's avatar
Sebastian Wolf committed
61
			}
62 63 64
		} else {
			sparseData.reset(new std::map<size_t, value_t>());
		}
Sebastian Wolf's avatar
Sebastian Wolf committed
65 66 67
	}
	
	
Ben Huber's avatar
Ben Huber committed
68 69
	Tensor::Tensor(DimensionTuple _dimensions, std::unique_ptr<value_t[]>&& _data)
	: dimensions(std::move(_dimensions)), size(misc::product(dimensions)), representation(Representation::Dense), denseData(_data.release()) {
70 71
		REQUIRE(size != 0, "May not create tensors with an dimension == 0.");
	}
Sebastian Wolf's avatar
Sebastian Wolf committed
72
	
73 74
	
	Tensor::Tensor(DimensionTuple _dimensions, const std::function<value_t()>& _f) : Tensor(std::move(_dimensions), Representation::Dense, Initialisation::None) {
75 76 77 78 79 80
		value_t* const realData = denseData.get();
		for (size_t i=0; i < size; ++i) {
			realData[i] = _f();
		}
	}
	
81 82
	
	Tensor::Tensor(DimensionTuple _dimensions, const std::function<value_t(const size_t)>& _f) : Tensor(std::move(_dimensions), Representation::Dense, Initialisation::None) {
83 84 85 86 87 88
		value_t* const realData = denseData.get();
		for (size_t i=0; i < size; ++i) {
			realData[i] = _f(i);
		}
	}
	
89 90
	
	Tensor::Tensor(DimensionTuple _dimensions, const std::function<value_t(const MultiIndex&)>& _f) : Tensor(std::move(_dimensions), Representation::Dense, Initialisation::None) {
91
		value_t* const realData = denseData.get();
92
		MultiIndex multIdx(degree(), 0);
93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109
		size_t idx = 0;
		while (true) {
			realData[idx] = _f(multIdx);
			// Increasing indices
			idx++;
			size_t changingIndex = degree()-1;
			multIdx[changingIndex]++;
			while(multIdx[changingIndex] == dimensions[changingIndex]) {
				multIdx[changingIndex] = 0;
				changingIndex--;
				// Return on overflow 
				if(changingIndex >= degree()) { return; }
				multIdx[changingIndex]++;
			}
		}
	}
	
110 111
	
	Tensor::Tensor(DimensionTuple _dimensions, const size_t _N, const std::function<std::pair<size_t, value_t>(size_t, size_t)>& _f) : Tensor(std::move(_dimensions), Representation::Sparse, Initialisation::Zero) {
112
		REQUIRE(_N <= size, "Cannot create more non zero entries that the dimension of the Tensor.");
Sebastian Wolf's avatar
Sebastian Wolf committed
113
		for (size_t i = 0; i < _N; ++i) {
114 115 116 117 118 119
			std::pair<size_t, value_t> entry = _f(i, size);
			REQUIRE(entry.first < size, "Postion is out of bounds " << entry.first);
			REQUIRE(!misc::contains(*sparseData, entry.first), "Allready contained " << entry.first);
			sparseData->insert(std::move(entry));
		}
	}
Sebastian Wolf's avatar
Sebastian Wolf committed
120 121
	
	
122 123
	Tensor Tensor::ones(DimensionTuple _dimensions) {
		Tensor ret(std::move(_dimensions), Representation::Dense, Initialisation::None);
124
		value_t* const data = ret.get_dense_data();
125 126 127 128 129 130
		for(size_t i = 0; i < ret.size; ++i) {
			data[i] = 1.0;
		}
		return ret;
	}
		
131
	Tensor Tensor::identity(DimensionTuple _dimensions) {
132
		REQUIRE(_dimensions.size()%2 == 0, "Identity tensor must have even degree, here: " << _dimensions.size());
133 134
		const size_t d = _dimensions.size();
		
135 136
		Tensor ret(std::move(_dimensions), Representation::Sparse);
		MultiIndex position(d, 0);
137
		
Sebastian Wolf's avatar
Sebastian Wolf committed
138
		if(d == 0) {
139
			ret[position] = 1.0;
Sebastian Wolf's avatar
Sebastian Wolf committed
140 141 142 143 144 145 146
		} else {
			bool notMultiLevelBreak = true;
			while(notMultiLevelBreak) {
				ret[position] = 1.0;
				
				position[0]++; position[d/2]++;
				size_t node = 0;
147
				while(position[node] == std::min(ret.dimensions[node], ret.dimensions[d/2+node])) {
Sebastian Wolf's avatar
Sebastian Wolf committed
148 149 150 151 152
					position[node] = position[d/2+node] = 0;
					node++;
					if(node == d/2) {notMultiLevelBreak = false; break;}
					position[node]++; position[d/2+node]++;
				}
153 154
			}
		}
155 156 157
		
		return ret;
	}
158 159 160 161 162
	
	
	Tensor Tensor::kronecker(DimensionTuple _dimensions) {
		Tensor ret(std::move(_dimensions), Representation::Sparse);
		if(ret.degree() == 0) {
Sebastian Wolf's avatar
Sebastian Wolf committed
163 164 165
			ret[{}] = 1.0;
		} else {
			for(size_t i = 0; i < misc::min(ret.dimensions); ++i) {
166
				ret[MultiIndex(ret.degree(), i)] = 1.0;
Sebastian Wolf's avatar
Sebastian Wolf committed
167
			}
168 169 170
		}
		return ret;
	}
171 172 173 174
	
	
	Tensor Tensor::dirac(DimensionTuple _dimensions, const MultiIndex& _position) {
		Tensor ret(std::move(_dimensions), Representation::Sparse);
175 176 177
		ret[_position] = 1.0;
		return ret;
	}
178 179 180 181
	
	
	Tensor Tensor::dirac(DimensionTuple _dimensions, const size_t _position) {
		Tensor ret(std::move(_dimensions), Representation::Sparse);
182 183 184
		ret[_position] = 1.0;
		return ret;
	}
Sebastian Wolf's avatar
Sebastian Wolf committed
185 186 187
	
	
	Tensor Tensor::dense_copy() const {
188 189 190 191 192 193 194 195 196 197 198
		Tensor ret(*this);
		ret.use_dense_representation();
		return ret;
	}
	
	
	Tensor Tensor::sparse_copy() const {
		Tensor ret(*this);
		ret.use_sparse_representation();
		return ret;
	}
Sebastian Wolf's avatar
Sebastian Wolf committed
199
	
200 201 202 203 204
	Tensor& Tensor::operator=(const TensorNetwork& _network) {
		return operator=(Tensor(_network));
	}
	
	
205
	/*- - - - - - - - - - - - - - - - - - - - - - - - - - Information - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*/
206 207 208 209
	size_t Tensor::degree() const {
		return dimensions.size();
	}
	
210
	
211
	bool Tensor::has_factor() const {
212
// 		return std::abs(1.0-factor) > std::numeric_limits<double>::epsilon();
213 214 215 216 217 218
		#pragma GCC diagnostic push
			#pragma GCC diagnostic ignored "-Wfloat-equal"
			return (factor != 1.0);
		#pragma GCC diagnostic pop
	}
	
219
	
220
	bool Tensor::is_dense() const {
221
		INTERNAL_CHECK((representation == Representation::Dense && denseData && !sparseData) || (representation == Representation::Sparse && sparseData && !denseData), "Internal Error: " << bool(representation) << bool(denseData) << bool(sparseData));
222 223 224
		return representation == Representation::Dense;
	}
	
225
	
226
	bool Tensor::is_sparse() const {
227
		INTERNAL_CHECK((representation == Representation::Dense && denseData && !sparseData) || (representation == Representation::Sparse && sparseData && !denseData), "Internal Error: " << bool(representation) << bool(denseData) << bool(sparseData));
228 229
		return representation == Representation::Sparse;
	}
230
	
231
	
232
	size_t Tensor::sparsity() const {
233 234 235
		if(is_sparse()) {
			return sparseData->size();
		}
236
		return size;
237 238
	}
	
239
	
240
	size_t Tensor::count_non_zero_entries(const value_t _eps) const {
241
		if(is_dense()) {
242 243 244
			size_t count = 0;
			for(size_t i = 0; i < size; ++i) {
				if(std::abs(denseData.get()[i]) > _eps ) { count++; }
245
			}
246
			return count;
247
		}
248 249 250 251 252
		size_t count = 0;
		for(const auto& entry : *sparseData) {
			if(std::abs(entry.second) > _eps) { count++; } 
		}
		return count;
253 254
	}
	
255
	
256
	bool Tensor::all_entries_valid() const {
257
		if(is_dense()) {
258 259
			for(size_t i = 0; i < size; ++i) {
				if(!std::isfinite(denseData.get()[i])) { return false; } 
260 261
			}
		} else {
262
			for(const auto& entry : *sparseData) {
263
				if(!std::isfinite(entry.second)) {return false; } 
264 265
			}
		}
266
		return true;
267 268
	}
	
269
	
270
	size_t Tensor::reorder_cost() const {
271
		return is_sparse() ? 10*sparsity() : size;
272 273
	}
	
274
	
275 276 277 278 279 280 281 282 283 284 285
	value_t Tensor::one_norm() const {
		if(is_dense()) {
			return std::abs(factor)*blasWrapper::one_norm(denseData.get(), size);
		} 
		value_t norm = 0;
		for(const auto& entry : *sparseData) {
			norm += std::abs(entry.second);
		}
		return std::abs(factor)*norm;
	}
	
286
	value_t Tensor::frob_norm() const {
287
		if(is_dense()) {
288
			return std::abs(factor)*blasWrapper::two_norm(denseData.get(), size);
289
		} 
290 291 292 293 294
		value_t norm = 0;
		for(const auto& entry : *sparseData) {
			norm += misc::sqr(entry.second);
		}
		return std::abs(factor)*sqrt(norm);
295 296
	}
	
297
	
298
	/*- - - - - - - - - - - - - - - - - - - - - - - - - - Basic arithmetics - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*/
299 300 301 302 303
	Tensor& Tensor::operator+=(const Tensor& _other) {
		plus_minus_equal<1>(*this, _other);
		return *this;
	}
	
304
	
305 306 307 308 309
	Tensor& Tensor::operator-=(const Tensor& _other) {
		plus_minus_equal<-1>(*this, _other);
		return *this;
	}
	
310
	
311 312 313 314 315
	Tensor& Tensor::operator*=(const value_t _factor) {
		factor *= _factor;
		return *this;
	}
	
316
	
317 318 319 320
	Tensor& Tensor::operator/=(const value_t _divisor) {
		factor /= _divisor;
		return *this;
	}
Sebastian Wolf's avatar
Sebastian Wolf committed
321
	
322
	
323
	/*- - - - - - - - - - - - - - - - - - - - - - - - - - Access - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*/
324 325 326 327 328
	value_t& Tensor::operator[](const size_t _position) {
		REQUIRE(_position < size, "Position " << _position << " does not exist in Tensor of dimensions " << dimensions);
		
		ensure_own_data_and_apply_factor();
		
329
		if(is_dense()) {
330 331
			return denseData.get()[_position];
		}
332 333 334 335 336 337
		value_t& result = (*sparseData)[_position];
		use_dense_representation_if_desirable();
		if (is_dense()) {
			return denseData.get()[_position];
		}
		return result;
338
	}
339
	
340 341 342 343

	value_t Tensor::operator[](const size_t _position) const {
		REQUIRE(_position < size, "Position " << _position << " does not exist in Tensor of dimensions " << dimensions);
		
344
		if(is_dense()) {
345
			return factor*denseData.get()[_position];
346 347 348 349
		} 
		const std::map<size_t, value_t>::const_iterator entry = sparseData->find(_position);
		if(entry == sparseData->end()) {
			return 0.0;
350
		}
351
		return factor*entry->second;
352
	}
353 354
	
	
355
	value_t& Tensor::operator[](const MultiIndex& _positions) {
356 357 358
		return operator[](multiIndex_to_position(_positions, dimensions));
	}
	
359
	
360
	value_t Tensor::operator[](const MultiIndex& _positions) const {
361 362 363 364
		return operator[](multiIndex_to_position(_positions, dimensions));
	}
	
	
365 366 367 368 369 370 371
	value_t& Tensor::at(const size_t _position) {
		REQUIRE(_position < size, "Position " << _position << " does not exist in Tensor of dimensions " << dimensions);
		REQUIRE(!has_factor(), "at() must not be called if there is a factor.");
		REQUIRE((is_dense() && denseData.unique()) || (is_sparse() && sparseData.unique()) , "Data must be unique to call at().");
		
		if(is_dense()) {
			return denseData.get()[_position];
372
		} 
373 374 375 376
			value_t& result = (*sparseData)[_position];
			use_dense_representation_if_desirable();
			if (is_dense()) {
				return denseData.get()[_position];
377
			} 
378
				return result;
379 380
			
		
381 382
	}
	
383
	
384 385 386 387 388 389
	value_t Tensor::cat(const size_t _position) const {
		REQUIRE(_position < size, "Position " << _position << " does not exist in Tensor of dimensions " << dimensions);
		REQUIRE(!has_factor(), "at() must not be called if there is a factor.");
		
		if(is_dense()) {
			return denseData.get()[_position];
390
		} 
391 392 393
			const std::map<size_t, value_t>::const_iterator entry = sparseData->find(_position);
			if(entry == sparseData->end()) {
				return 0.0;
394
			} 
395
				return entry->second;
396 397
			
		
398 399
	}
	
400 401 402 403 404 405 406
	
	value_t* Tensor::get_dense_data() {
		use_dense_representation();
		ensure_own_data_and_apply_factor();
		return denseData.get();
	}
	
407
	
408
	value_t* Tensor::get_unsanitized_dense_data() {
409
		REQUIRE(is_dense(), "Unsanitized dense data requested, but representation is not dense!");
410 411 412
		return denseData.get();
	}
	
413
	
414
	const value_t* Tensor::get_unsanitized_dense_data() const  {
415
		REQUIRE(is_dense(), "Unsanitized dense data requested, but representation is not dense!");
416 417 418
		return denseData.get();
	}
	
419
	
420
	value_t* Tensor::override_dense_data()  {
421 422 423 424 425 426
		factor = 1.0;
		if(!denseData.unique()) {
			sparseData.reset();
			denseData.reset(new value_t[size], internal::array_deleter_vt);
			representation = Representation::Dense;
		}
427 428 429
		return denseData.get();
	}
	
430
	
431
	const std::shared_ptr<value_t>& Tensor::get_internal_dense_data() {
432
		REQUIRE(is_dense(), "Internal dense data requested, but representation is not dense!");
433 434 435
		return denseData;
	}
	
436 437 438 439 440 441 442 443
	
	std::map<size_t, value_t>& Tensor::get_sparse_data() {
		CHECK(is_sparse(), warning, "Request for sparse data although the Tensor is not sparse.");
		use_sparse_representation();
		ensure_own_data_and_apply_factor();
		return *sparseData.get();
	}
	
444
	
445 446 447 448 449
	std::map<size_t, value_t>& Tensor::get_unsanitized_sparse_data() {
		REQUIRE(is_sparse(), "Unsanitized sparse data requested, but representation is not sparse!");
		return *sparseData.get();
	}
	
450
	
451
	const std::map<size_t, value_t>& Tensor::get_unsanitized_sparse_data() const  {
452 453 454 455
		REQUIRE(is_sparse(), "Unsanitized sparse data requested, but representation is not sparse!");
		return *sparseData.get();
	}
	
456
	
457 458 459
	std::map<size_t, value_t>& Tensor::override_sparse_data() {
		factor = 1.0;
		if(sparseData.unique()) {
460
			INTERNAL_CHECK(is_sparse(), "Internal Error");
461 462 463 464 465 466 467
			sparseData->clear();
		} else {
			denseData.reset();
			sparseData.reset(new std::map<size_t, value_t>());
			representation = Representation::Sparse;
		}
		
468 469 470
		return *sparseData.get();
	}
	
471
	
472 473 474 475
	const std::shared_ptr<std::map<size_t, value_t>>& Tensor::get_internal_sparse_data() {
		REQUIRE(is_sparse(), "Internal sparse data requested, but representation is not sparse!");
		return sparseData;
	}
476
	
477
	
Sebastian Wolf's avatar
Sebastian Wolf committed
478 479 480 481 482
	/*- - - - - - - - - - - - - - - - - - - - - - - - - - Indexing - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*/
	internal::IndexedTensor<Tensor> Tensor::operator()(const std::vector<Index>&  _indices) {
		return internal::IndexedTensor<Tensor>(this, _indices, false);
	}
	
483
	
Sebastian Wolf's avatar
Sebastian Wolf committed
484 485 486 487
	internal::IndexedTensor<Tensor> Tensor::operator()(      std::vector<Index>&& _indices) {
		return internal::IndexedTensor<Tensor>(this, std::move(_indices), false);
	}
	
488
	
Sebastian Wolf's avatar
Sebastian Wolf committed
489 490 491 492
	internal::IndexedTensorReadOnly<Tensor> Tensor::operator()(const std::vector<Index>&  _indices) const {
		return internal::IndexedTensorReadOnly<Tensor>(this, _indices);
	}
	
493
	
Sebastian Wolf's avatar
Sebastian Wolf committed
494 495 496 497 498 499
	internal::IndexedTensorReadOnly<Tensor> Tensor::operator()(      std::vector<Index>&& _indices) const {
		return internal::IndexedTensorReadOnly<Tensor>(this, std::move(_indices));
	}
	
	
	/*- - - - - - - - - - - - - - - - - - - - - - - - - - Modififiers - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -*/
500
	void Tensor::reset(DimensionTuple _newDim, const Representation _representation, const Initialisation _init) {
501
		const size_t oldDataSize = size;
502
		dimensions = std::move(_newDim);
503 504 505 506
		size = misc::product(dimensions);
		factor = 1.0;
		
		if(_representation == Representation::Dense) {
Sebastian Wolf's avatar
Sebastian Wolf committed
507
			if(representation == Representation::Dense) {
508 509 510 511 512 513 514 515 516 517 518 519 520
				if(oldDataSize != size || !denseData.unique()) {
					denseData.reset(new value_t[size], internal::array_deleter_vt);
				}
			} else {
				sparseData.reset();
				denseData.reset(new value_t[size], internal::array_deleter_vt);
				representation = _representation;
			}
			
			if(_init == Initialisation::Zero) {
				memset(denseData.get(), 0, size*sizeof(value_t));
			}
		} else {
Sebastian Wolf's avatar
Sebastian Wolf committed
521
			if(representation == Representation::Sparse) {
522 523 524 525 526 527 528 529 530 531 532 533 534
				if(sparseData.unique()) {
					sparseData->clear();
				} else {
					sparseData.reset(new std::map<size_t, value_t>());
				}
			} else {
				denseData.reset();
				sparseData.reset(new std::map<size_t, value_t>());
				representation = _representation;
			}
		}
	}
	
535 536
	
	void Tensor::reset(DimensionTuple _newDim, const Initialisation _init) {
537
		const size_t oldDataSize = size;
538
		dimensions = std::move(_newDim);
539 540 541
		size = misc::product(dimensions);
		factor = 1.0;
		
Sebastian Wolf's avatar
Sebastian Wolf committed
542
		if(representation == Representation::Dense) {
543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558
			if(oldDataSize != size || !denseData.unique()) {
				denseData.reset(new value_t[size], internal::array_deleter_vt);
			}
			
			if(_init == Initialisation::Zero) {
				memset(denseData.get(), 0, size*sizeof(value_t));
			}
		} else {
			if(sparseData.unique()) {
				sparseData->clear();
			} else {
				sparseData.reset(new std::map<size_t, value_t>());
			}
		}
	}
	
Sebastian Wolf's avatar
Sebastian Wolf committed
559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577
	void Tensor::reset() {
		dimensions.clear();
		factor = 1.0;
		
		if(representation == Representation::Dense) {
			if(size != 1 || !denseData.unique()) {
				denseData.reset(new value_t[1], internal::array_deleter_vt);
			}
			*denseData.get() = 0.0;
		} else {
			if(sparseData.unique()) {
				sparseData->clear();
			} else {
				sparseData.reset(new std::map<size_t, value_t>());
			}
		}
		size = 1;
	}
	
578 579 580
	
	void Tensor::reset(DimensionTuple _newDim, const std::shared_ptr<value_t>& _newData) {
		dimensions = std::move(_newDim);
581 582 583
		size = misc::product(dimensions);
		factor = 1.0;
		
Sebastian Wolf's avatar
Sebastian Wolf committed
584
		if(representation == Representation::Sparse) {
585 586 587 588 589 590 591
			sparseData.reset();
			representation = Representation::Dense;
		}
		
		denseData = _newData;
	}
	
592 593 594
	
	void Tensor::reset(DimensionTuple _newDim, std::unique_ptr<value_t[]>&& _newData) {
		dimensions = std::move(_newDim);
595 596 597
		size = misc::product(dimensions);
		factor = 1.0;
		
Sebastian Wolf's avatar
Sebastian Wolf committed
598
		if(representation == Representation::Sparse) {
599 600 601 602
			sparseData.reset();
			representation = Representation::Dense;
		}
		
Ben Huber's avatar
Ben Huber committed
603
		denseData.reset(_newData.release());
604 605
	}
	
606 607 608 609 610 611 612 613 614 615 616 617 618
	void Tensor::reset(DimensionTuple _newDim, std::map<size_t, value_t>&& _newData) {
		dimensions = std::move(_newDim);
		size = misc::product(dimensions);
		factor = 1.0;
		
		if(representation == Representation::Dense) {
			denseData.reset();
			representation = Representation::Sparse;
		}
		
		std::shared_ptr<std::map<size_t, value_t>> newD(new std::map<size_t, value_t>(std::move(_newData)));
		sparseData = std::move(newD);
	}
619 620
		
	void Tensor::reinterpret_dimensions(DimensionTuple _newDimensions) {
621
		REQUIRE(misc::product(_newDimensions) == size, "New dimensions must not change the size of the tensor in reinterpretation: " << misc::product(_newDimensions) << " != " << size);
622
		dimensions = std::move(_newDimensions); // NOTE pass-by-value
623 624
	}
	
625
	
626 627
	void Tensor::resize_mode(const size_t _mode, const size_t _newDim, size_t _cutPos) {
		REQUIRE(_mode < degree(), "Can't resize mode " << _mode << " as the tensor is only order " << degree());
628

629
		if (dimensions[_mode] == _newDim) { return; }  // Trivial case: Nothing to do
630

631
		const size_t oldDim = dimensions[_mode];
632
		_cutPos = std::min(_cutPos, oldDim);
633
		
634 635
		REQUIRE(_newDim > 0, "Dimension must be larger than 0! Is " << _newDim);
		REQUIRE(_newDim > oldDim || _cutPos >= oldDim -_newDim, "Cannot remove " << oldDim -_newDim << " slates starting (exclusivly) at position " << _cutPos);
636
		
637
		const size_t dimStepSize = misc::product(dimensions, _mode+1, degree());
638
		const size_t oldStepSize = oldDim*dimStepSize;
639 640 641
		const size_t newStepSize = _newDim*dimStepSize;
		const size_t blockCount = size / oldStepSize; //  == misc::product(dimensions, 0, _n);
		const size_t newsize = blockCount*newStepSize;
642
		
643
		if(is_dense()) {
644
			std::unique_ptr<value_t[]> tmpData(new value_t[newsize]);
645
			
646 647 648 649 650
			if (_newDim > oldDim) { // Add new slates
				const size_t insertBlockSize = (_newDim-oldDim)*dimStepSize;
				if(_cutPos == 0) {
					for ( size_t i = 0; i < blockCount; ++i ) {
						value_t* const currData = tmpData.get()+i*newStepSize;
651 652
						misc::set_zero(currData, insertBlockSize);
						misc::copy(currData+insertBlockSize, denseData.get()+i*oldStepSize, oldStepSize);
653 654 655 656
					}
				} else if(_cutPos == oldDim) {
					for ( size_t i = 0; i < blockCount; ++i ) {
						value_t* const currData = tmpData.get()+i*newStepSize;
657 658
						misc::copy(currData, denseData.get()+i*oldStepSize, oldStepSize);
						misc::set_zero(currData+oldStepSize, insertBlockSize);
659 660
					}
				} else {
661 662
					const size_t preBlockSize = _cutPos*dimStepSize;
					const size_t postBlockSize = (oldDim-_cutPos)*dimStepSize;
663
					for (size_t i = 0; i < blockCount; ++i) {
664
						value_t* const currData = tmpData.get()+i*newStepSize;
665 666 667
						misc::copy(currData, denseData.get()+i*oldStepSize, preBlockSize);
						misc::set_zero(currData+preBlockSize, insertBlockSize);
						misc::copy(currData+preBlockSize+insertBlockSize, denseData.get()+i*oldStepSize+preBlockSize, postBlockSize);
668
					}
669
				}
670 671 672 673 674 675 676
			} else { // Remove slates
				if (_cutPos < oldDim) {
					const size_t removedSlates = (oldDim-_newDim);
					const size_t removedBlockSize = removedSlates*dimStepSize;
					const size_t preBlockSize = (_cutPos-removedSlates)*dimStepSize;
					const size_t postBlockSize = (oldDim-_cutPos)*dimStepSize;
					
677
					INTERNAL_CHECK(removedBlockSize+preBlockSize+postBlockSize == oldStepSize && preBlockSize+postBlockSize == newStepSize, "IE");
678
					
679
					for (size_t i = 0; i < blockCount; ++i) {
680
						value_t* const currData = tmpData.get()+i*newStepSize;
681 682
						misc::copy(currData, denseData.get()+i*oldStepSize, preBlockSize);
						misc::copy(currData+preBlockSize, denseData.get()+i*oldStepSize+preBlockSize+removedBlockSize, postBlockSize);
683 684 685
					}
				} else {
					for (size_t i = 0; i < blockCount; ++i) {
686
						misc::copy(tmpData.get()+i*newStepSize, denseData.get()+i*oldStepSize, newStepSize);
687
					}
688 689
				}
			}
690
			denseData.reset(tmpData.release(), internal::array_deleter_vt);
691
		
692
		} else {
693
			std::unique_ptr<std::map<size_t, value_t>> tmpData(new std::map<size_t, value_t>());
694
			
695 696
			if (_newDim > oldDim) { // Add new slates
				const size_t slatesAdded = _newDim-oldDim;
697
				for(const auto& entry : *sparseData.get()) {
698 699 700 701 702
					// Decode the position as i*oldStepSize + j*DimStepSize + k
					const size_t k = entry.first%dimStepSize;
					const size_t j = (entry.first%oldStepSize)/dimStepSize;
					const size_t i = entry.first/oldStepSize;
					
703 704
					if(j < _cutPos) { // Entry remains at current position j
						tmpData->emplace(i*newStepSize+j*dimStepSize+k, entry.second);
705
					} else { // Entry moves to position j+slatesAdded
706
						tmpData->emplace(i*newStepSize+(j+slatesAdded)*dimStepSize+k, entry.second);
707 708
					}
				}
709 710
			} else { // Remove slates
				const size_t slatesRemoved = oldDim-_newDim;
711
				for(const auto& entry : *sparseData.get()) {
712 713 714 715 716
					// Decode the position as i*oldStepSize + j*DimStepSize + k
					const size_t k = entry.first%dimStepSize;
					const size_t j = (entry.first%oldStepSize)/dimStepSize;
					const size_t i = entry.first/oldStepSize;
					
717 718 719 720
					if(j < _cutPos-slatesRemoved) { // Entry remains at current position j
						tmpData->emplace(i*newStepSize+j*dimStepSize+k, entry.second);
					} else if(j >= _cutPos) { // Entry advances in position j
						tmpData->emplace(i*newStepSize+(j-slatesRemoved)*dimStepSize+k, entry.second);
721
					}
722 723
				}
			}
724
			sparseData.reset(tmpData.release());
725
		}
726
		
727
		dimensions[_mode] = _newDim;
728 729 730
		size = newsize;
	}
	
731
	
732
	void Tensor::fix_mode(const size_t _mode, const size_t _slatePosition) {
733
		REQUIRE(_slatePosition < dimensions[_mode], "The given slatePosition must be smaller than the corresponding dimension. Here " << _slatePosition << " >= " << dimensions[_mode] << ", dim = " << dimensions << "=" << size << " mode " << _mode);
734
		
735 736 737
		const size_t stepCount = misc::product(dimensions, 0, _mode);
		const size_t blockSize = misc::product(dimensions, _mode+1, degree());
		const size_t stepSize = dimensions[_mode]*blockSize;
738
		const size_t slateOffset = _slatePosition*blockSize;
739 740
		
		if(is_dense()) {
741
			std::unique_ptr<value_t[]> tmpData(new value_t[stepCount*blockSize]);
742 743 744
			
			// Copy data
			for(size_t i = 0; i < stepCount; ++i) {
745
				misc::copy(tmpData.get()+i*blockSize, denseData.get()+i*stepSize+slateOffset, blockSize);
746 747
			}
			
748
			denseData.reset(tmpData.release(), &internal::array_deleter_vt);
749
		} else {
750 751
			std::unique_ptr<std::map<size_t, value_t>> tmpData(new std::map<size_t, value_t>());
			
752
			for(const auto& entry : *sparseData.get()) {
753 754 755 756 757 758 759 760 761 762 763
				// Decode the position as i*stepSize + j*blockSize + k
				const size_t k = entry.first%blockSize;
				const size_t j = (entry.first%stepSize)/blockSize;
				const size_t i = entry.first/stepSize;
				
				if(j == _slatePosition) {
					tmpData->emplace(i*blockSize+k, entry.second);
				}
			}
			
			sparseData.reset(tmpData.release());
764
		}
765 766
		
		// Adjust dimensions
767
		dimensions.erase(dimensions.begin()+long(_mode));
768
		size = stepCount*blockSize;
769 770
	}
	
771
	
772 773 774 775
	void Tensor::remove_slate(const size_t _mode, const size_t _pos) {
		REQUIRE(_mode < degree(), "");
		REQUIRE(_pos < dimensions[_mode], _pos << " " << dimensions[_mode]);
		REQUIRE(dimensions[_mode] > 1, "");
776
		
777
		resize_mode(_mode, dimensions[_mode]-1, _pos+1);
778 779
	}
	
780
	
781 782 783
	void Tensor::perform_trace(size_t _firstMode, size_t _secondMode) {
		REQUIRE(_firstMode != _secondMode, "Given indices must not coincide");
		REQUIRE(dimensions[_firstMode] == dimensions[_secondMode], "Dimensions of trace indices must coincide.");
Sebastian Wolf's avatar
Sebastian Wolf committed
784
		
785
		if(_firstMode > _secondMode) { std::swap(_firstMode, _secondMode); }
Sebastian Wolf's avatar
Sebastian Wolf committed
786
		
787 788
		
		
789 790 791 792
		const size_t front = misc::product(dimensions, 0, _firstMode);
		const size_t mid = misc::product(dimensions, _firstMode+1, _secondMode);
		const size_t back = misc::product(dimensions, _secondMode+1, degree());
		const size_t traceDim = dimensions[_firstMode];
Sebastian Wolf's avatar
Sebastian Wolf committed
793 794 795 796 797 798
		const size_t frontStepSize = traceDim*mid*traceDim*back;
		const size_t traceStepSize = mid*traceDim*back+back;
		const size_t midStepSize = traceDim*back;
		
		size = front*mid*back;
		
799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814
		if(is_dense()) {
			std::unique_ptr<value_t[]> newData(new value_t[size]);
			misc::set_zero(newData.get(), size);
			
			for(size_t f = 0; f < front; ++f) {
				for(size_t t = 0; t < traceDim; ++t) {
					for(size_t m = 0; m < mid; ++m) {
						misc::add_scaled(newData.get()+(f*mid+m)*back, factor, denseData.get()+f*frontStepSize+t*traceStepSize+m*midStepSize, back);
					}
				}
			}
			
			denseData.reset(newData.release(), internal::array_deleter_vt);
		} else {
			std::unique_ptr<std::map<size_t, value_t>> newData( new std::map<size_t, value_t>());
			
815
			for(const auto& entry : *sparseData) {
816 817 818 819 820 821 822 823 824 825 826 827 828
				size_t pos = entry.first;
				const size_t backIdx = pos%back;
				pos /= back;
				const size_t traceBackIdx = pos%traceDim;
				pos /= traceDim;
				const size_t midIdx = pos%mid;
				pos /= mid;
				const size_t traceFrontIdx = pos%traceDim;
				pos /= traceDim;
				const size_t frontIdx = pos;
				
				if(traceFrontIdx == traceBackIdx) {
					(*newData)[(frontIdx*mid + midIdx)*back + backIdx] += factor*entry.second;
Sebastian Wolf's avatar
Sebastian Wolf committed
829 830
				}
			}
831 832
			
			sparseData.reset(newData.release());
Sebastian Wolf's avatar
Sebastian Wolf committed
833 834
		}
		
835 836
		dimensions.erase(dimensions.begin()+_secondMode);
		dimensions.erase(dimensions.begin()+_firstMode);
Sebastian Wolf's avatar
Sebastian Wolf committed
837 838 839
		factor = 1.0;
	}
	
840
	
841
	void Tensor::modify_diagonal_entries(const std::function<void(value_t&)>& _f) {
842
		ensure_own_data_and_apply_factor();
843 844 845 846 847 848 849 850 851 852 853 854 855 856 857
		
		if(degree() == 0) {
			_f(at(0)); 
		} else {
			size_t stepSize = 1;
			for(size_t i = 1; i < degree(); ++i) {
				stepSize *= dimensions[i];
				stepSize += 1;
			}
			
			const size_t numDiags = misc::min(dimensions);
			
			for(size_t i = 0; i < numDiags; ++i){
				_f(at(i*stepSize));
			}
858 859 860
		}
	}
	
861
	
862
	void Tensor::modify_diagonal_entries(const std::function<void(value_t&, const size_t)>& _f) {
863
		ensure_own_data_and_apply_factor();
864 865 866 867 868 869 870 871 872 873 874 875 876 877 878
		
		if(degree() == 0) {
			_f(at(0), 0); 
		} else {
			size_t stepSize = 1;
			for(size_t i = 1; i < degree(); ++i) {
				stepSize *= dimensions[i];
				stepSize += 1;
			}
			
			const size_t numDiags = misc::min(dimensions);
			
			for(size_t i = 0; i < numDiags; ++i){
				_f(at(i*stepSize), i);
			}
879 880 881
		}
	}
	
882
	
883
	void Tensor::modify_entries(const std::function<void(value_t&)>& _f) {
884
		ensure_own_data_and_apply_factor();
885 886 887 888 889 890 891
		if(is_dense()) {
			for(size_t i = 0; i < size; ++i) { _f(at(i)); }
		} else {
			for(size_t i = 0; i < size; ++i) {
				value_t val = cat(i);
				const value_t oldVal = val;
				_f(val);
Sebastian Wolf's avatar
Sebastian Wolf committed
892
				if(misc::hard_not_equal(val, 0.0)) {
893
					at(i) = val;
Sebastian Wolf's avatar
Sebastian Wolf committed
894
				} else if( misc::hard_not_equal(oldVal, 0.0)) {
895
					IF_CHECK( size_t succ =) sparseData->erase(i);
896
					INTERNAL_CHECK(succ == 1, "Internal Error");
897 898 899
				}
			}
		}
900
	}
901
	
902

903
	void Tensor::modify_entries(const std::function<void(value_t&, const size_t)>& _f) {
904 905 906 907 908 909 910 911
		ensure_own_data_and_apply_factor();
		if(is_dense()) {
			for(size_t i = 0; i < size; ++i) { _f(at(i), i); }
		} else {
			for(size_t i = 0; i < size; ++i) {
				value_t val = cat(i);
				const value_t oldVal = val;
				_f(val, i);
Sebastian Wolf's avatar
Sebastian Wolf committed
912
				if(misc::hard_not_equal(val, 0.0)) {
913
					at(i) = val;
Sebastian Wolf's avatar
Sebastian Wolf committed
914
				} else if( misc::hard_not_equal(oldVal, 0.0)) {
915
					IF_CHECK( size_t succ =) sparseData->erase(i);
916
					INTERNAL_CHECK(succ == 1, "Internal Error");
917 918 919 920
				}
			}
		}
	}
921
	
922
	
923
	void Tensor::modify_entries(const std::function<void(value_t&, const MultiIndex&)>& _f) {
924 925
		ensure_own_data_and_apply_factor();
		
926
		MultiIndex multIdx(degree(), 0);
927 928
		size_t idx = 0;
		while (true) {
929
			if(is_dense()) {
930
				_f(at(idx), multIdx);
931
			} else {
932 933 934
				value_t val = cat(idx);
				const value_t oldVal = val;
				_f(val, multIdx);
Sebastian Wolf's avatar
Sebastian Wolf committed