#include <algorithm>
#include "RowAggregator.h"
#include "KeyValue.h"
#include "AggregationData.h"
#include "reordering.h"

using std::min;

RowAggregator::RowAggregator():
		_keys(0),
		_hashes(0),
		_defaultValue(0) {}

RowAggregator::~RowAggregator() {
	delete[] _keys;
	delete[] _hashes;
	delete[] _defaultValue;
}

void RowAggregator::init(SharedArray<Column> in, SharedArray<int> groupedColumns, SharedArray<AggregationData> aggregations) {
	assert(groupedColumns.size() != 0);
	assert(countColumnsCount(aggregations) <= 1);

	_groupedColumns = groupedColumns;
	_summedIntColumns = summedColumns(in, aggregations, INT);
	_summedDoubleColumns = summedColumns(in, aggregations, DOUBLE);
	_isCounting = areThereCountColumns(aggregations);
	_outValueColumns = outValueColumns(in, aggregations);

	initKeyValueSizes(in);
	initDefaultValue();
}

void RowAggregator::loadInputData(SharedArray<Column> in) {
	delete[] _keys;
	delete[] _hashes;
	_keys = 0;
	_hashes = 0;

	_in = in;

	computeKeys(_in);
	computeHashes(_in);
}

void RowAggregator::loadAggregatedData(SharedArray<KeyValue> keysAndValues, int chunkSize) {
	const int size = (keysAndValues.size() + chunkSize - 1) / chunkSize;
	_aggregatedChunks = SharedArray<SharedArray<Column> >(size);

	for (int i = 0; i < size; ++i) {
		_aggregatedChunks[size - i - 1] = reorderedColumns(extractToColumns(keysAndValues, i * chunkSize, chunkSize));
	}
}

SharedArray<Column> RowAggregator::takeAggregatedDataChunk() {
	if (_aggregatedChunks.size() == 0)
		return extractToColumns(SharedArray<KeyValue>(), 0, 0);

	SharedArray<Column> result = _aggregatedChunks[_aggregatedChunks.size() - 1];
	_aggregatedChunks.shrink(_aggregatedChunks.size() - 1);
	if (_aggregatedChunks.size() == 0)
		_aggregatedChunks = SharedArray<SharedArray<Column> >();

	return result;
}

void RowAggregator::aggregate(SharedArray<char*> valueRows) {
	int offset = 0;

	for (int col = 0; col < _summedDoubleColumns.size(); ++col) {
		const Column& column = _in[_summedDoubleColumns[col]];
		for (int i = 0; i < valueRows.size(); ++i) {
			*reinterpret_cast<double*>(valueRows[i] + offset) += column.at<double>(i);
		}
		offset += sizeof(double);
	}

	for (int col = 0; col < _summedIntColumns.size(); ++col) {
		const Column& column = _in[_summedIntColumns[col]];
		for (int i = 0; i < valueRows.size(); ++i) {
			*reinterpret_cast<int32_t*>(valueRows[i] + offset) += column.at<int32_t>(i);
		}
		offset += sizeof(int32_t);
	}

	if (_isCounting) {
		assert(offset == _valueSize - (int) sizeof(int32_t));

		for (int i = 0; i < valueRows.size(); ++i) {
			++*reinterpret_cast<int32_t*>(valueRows[i] + offset);
		}
	}
}
void RowAggregator::initKeyValueSizes(const SharedArray<Column>& in) {
	_keySize = 0;
	for (int col = 0; col < _groupedColumns.size(); ++col)
		_keySize += typeSizeOf(in[_groupedColumns[col]].type());

	_valueSize = _summedDoubleColumns.size() * sizeof(double) + _summedIntColumns.size() * sizeof(int32_t);
	if (_isCounting)
		_valueSize += sizeof(int32_t);
}

template <typename T>
void writeKeys(char* writeDestination, const Column& column, int keySize) {
	for (int i = 0; i < column.size(); ++i) {
		*reinterpret_cast<T*>(writeDestination + i * keySize) = column.at<T>(i);
	}
}

// Keys are concaternated bytes of row elements in order as in _groupedColumns.
void RowAggregator::computeKeys(const SharedArray<Column>& in) {
	assert(in.size() != 0);
	int colSize = in[0].size();

	_keys = new char[colSize * _keySize];
	int offset = 0;
	
	for (int col = 0; col < _groupedColumns.size(); ++col) {
		const Column& column = in[_groupedColumns[col]];
		const int colTypeSize = typeSizeOf(column.type());
		char* initialWriteDestination = _keys + offset;

		switch (column.type()) {
			case INT:
				writeKeys<int32_t>(initialWriteDestination, column, _keySize);
				break;

			case DOUBLE:
				writeKeys<double>(initialWriteDestination, column, _keySize);
				break;

			case BOOL:
				writeKeys<bool>(initialWriteDestination, column, _keySize);
				break;

			default:
				assert(false);
		}

		offset += colTypeSize;
	}
}

template <typename T>
void computeIncrementalHashes(hash_t* hashes, const Column& column) {
	for (int i = 0; i < column.size(); ++i) {
		hashes[i] = hash(column.at<T>(i), hashes[i]);
	}
}

void RowAggregator::computeHashes(const SharedArray<Column>& in) {
	assert(in.size() != 0);
	int colSize = in[0].size();

	_hashes = new hash_t[colSize];
	for (int i = 0; i < colSize; ++i)
		_hashes[i] = 0;

	for (int col = 0; col < _groupedColumns.size(); ++col) {
		const Column& column = in[_groupedColumns[col]];
		
		switch (column.type()) {
			case INT:
				computeIncrementalHashes<int32_t>(_hashes, column);
				break;

			case DOUBLE:
				assert(false);
				break;

			case BOOL:
				computeIncrementalHashes<bool>(_hashes, column);
				break;

			default:
				assert(false);
		}
	}
}

// Values are concaternated bytes of row elements in order like input data columns.
void RowAggregator::initDefaultValue() {
	delete[] _defaultValue;

	_defaultValue = new char[_valueSize];
	char* ptr = _defaultValue;

	for (int i = 0; i < _summedDoubleColumns.size(); ++i) {
		*reinterpret_cast<double*>(ptr) = 0.0;
		ptr += sizeof(double);
	}

	for (int i = 0; i < _summedIntColumns.size(); ++i) {
		*reinterpret_cast<int32_t*>(ptr) = 0;
		ptr += sizeof(int32_t);
	}

	if (_isCounting) {
		*reinterpret_cast<int32_t*>(ptr) = 0;
	}
}

template <typename T>
Column extractKeysToColumn(const SharedArray<KeyValue>& kv, int offset, int startIndex, int amount) {
	int size = min(kv.size() - startIndex, amount);
	SharedArray<T> column(size);

	for (int i = 0; i < size; ++i)
		column[i] = *reinterpret_cast<const T*>(kv[startIndex + i].key + offset);

	return Column(column);
}

template <typename T>
Column extractValuesToColumn(const SharedArray<KeyValue>& kv, int offset, int startIndex, int amount) {
	int size = min(kv.size() - startIndex, amount);
	SharedArray<T> column(size);

	for (int i = 0; i < size; ++i) {
		column[i] = *reinterpret_cast<const T*>(kv[startIndex + i].value + offset);
	}

	return Column(column);
}

SharedArray<Column> RowAggregator::extractToColumns(const SharedArray<KeyValue>& keysAndValues, int startIndex, int amount) const {
	const int size = _groupedColumns.size() + _summedDoubleColumns.size() +
			_summedIntColumns.size() + (int) _isCounting;
	SharedArray<Column> columns(size);

	int columnIndex = 0;
	int offset = 0;

	for (int col = 0; col < _groupedColumns.size(); ++col) {
		switch (_in[_groupedColumns[col]].type()) {
			case INT:
				columns[columnIndex] = extractKeysToColumn<int32_t>(keysAndValues,
						offset, startIndex, amount);
				break;

			case DOUBLE:
				columns[columnIndex] = extractKeysToColumn<double>(keysAndValues,
						offset, startIndex, amount);
				break;

			case BOOL:
				columns[columnIndex] = extractKeysToColumn<bool>(keysAndValues,
						offset, startIndex, amount);
				break;

			default:
				assert(false);
		}

		offset += typeSizeOf(_in[_groupedColumns[col]].type());
		++columnIndex;
	}

	assert(offset == _keySize);

	offset = 0;

	for (int col = 0; col < _summedDoubleColumns.size(); ++col) {
		columns[columnIndex] = extractValuesToColumn<double>(keysAndValues,
				offset, startIndex, amount);
		offset += sizeof(double);
		++columnIndex;
	}

	for (int col = 0; col < _summedIntColumns.size(); ++col) {
		columns[columnIndex] = extractValuesToColumn<int32_t>(keysAndValues,
				offset, startIndex, amount);
		offset += sizeof(int32_t);
		++columnIndex;
	}

	if (_isCounting) {
		columns[columnIndex] = extractValuesToColumn<int32_t>(keysAndValues,
				offset, startIndex, amount);
		offset += sizeof(int32_t);
	}

	assert(offset == _valueSize);

	return columns;
}

SharedArray<Column> RowAggregator::reorderedColumns(const SharedArray<Column>& columns) const {
	SharedArray<Column> reordered(_groupedColumns.size() + _outValueColumns.size());

	int offset = 0;

	for (int i = 0; i < _groupedColumns.size(); ++i) {
		reordered[offset++] = columns[i];
	}

	for (int i = 0; i < _outValueColumns.size(); ++i) {
		reordered[offset++] = columns[_groupedColumns.size() + _outValueColumns[i]];
	}

	return reordered;
}
