#pragma once
#include <vector>


namespace ly
{
	template<class V>
	struct hashNode
	{
		hashNode* _next;
		V _data;

		hashNode(V data)
			:_next(nullptr)
			,_data(data)
		{}
	};

	template<class K, class V, class KeyOfVal, class HF>
	class hashBucket;

	template<class K, class V, class KeyOfVal, class HF>
	struct HBIterator
	{
		typedef hashBucket<K, V, KeyOfVal, HF> hashBucket;
		typedef hashNode<V> node;
		typedef HBIterator<K, V, KeyOfVal, HF> self;

		HBIterator(node* node, hashBucket* pht)
			:_node(node)
			,_pht(pht)
		{}

		self& operator++()
		{
			if (_node->_next != nullptr)
			{
				_node = _node->_next;
			}
			else
			{
				int hashi = hf(kv(_node->_data)) % _pht->capacity() + 1;
				while (hashi < _pht->capacity())
				{
					if (_pht->_tables[hashi] != nullptr)
					{
						_node = _pht->_tables[hashi];
						break;
					}
					++hashi;
				}

				if (hashi == _pht->capacity())
				{
					_node = nullptr;
				}
			}

			return *this;
		}

		self operator++(int)
		{
			node* tmp = _node;

			if (_node->_next != nullptr)
			{
				_node = _node->_next;
			}
			else
			{
				int hashi = hf(kv(_node->_data)) % _pht->capacity() + 1;
				while (hashi < _pht->capacity())
				{
					if (_pht->_tables[hashi] != nullptr)
					{
						_node = _pht->_tables[hashi];
						break;
					}
					++hashi;
				}

				if (hashi == _pht->capacity())
				{
					_node = nullptr;
				}
			}

			return tmp;
		}

		V& operator*()
		{
			return _node->_data;
		}

		V* operator->()
		{
			return &(_node->_data);
		}

		bool operator!=(const self& s)
		{
			return _node != s._node;
		}

		bool operator==(const self& s)
		{
			return _node == s._node;
		}

		node* _node;
		hashBucket* _pht;
		HF hf;
		KeyOfVal kv;
	};

	template<class K, class V, class KeyOfVal, class HF>
	//template<class K, class V>
	class hashBucket
	{
		template<class K, class V, class KeyOfVal, class HF>
		friend struct HBIterator;

		typedef hashNode<V> node;
		typedef HBIterator<K, V, KeyOfVal, HF> iterator;
	public:
		iterator begin()
		{
			for (size_t i = 0; i < _tables.capacity(); i++)
			{
				if (_tables[i] != nullptr)
				{
					return iterator(_tables[i], this);
				}
			}

			return iterator(nullptr, this);
		}

		iterator end()
		{
			return iterator(nullptr, this);
		}

		hashBucket()
			:_size(0)
		{
			_tables.resize(10);
		}

		~hashBucket()
		{
			size_t size = _tables.size();
			for (size_t i = 0; i < size; ++i)
			{
				node* cur = _tables[i];
				while (cur != nullptr)
				{
					node* next = cur->_next;
					delete cur;
					cur = next;
				}
				_tables[i] = nullptr;
			}
		}

		pair<iterator, bool> insert(const V& data)
		{
			if (find(kv(data))._node != nullptr)
			{
				return make_pair(iterator( nullptr, this ), false);
			}

			if (_size / _tables.size() == 1)
			{
				vector<node*> newTables;
				newTables.resize(_tables.capacity() * 2);

				for (size_t i = 0; i < _tables.capacity(); ++i)
				{
					node* cur = _tables[i];
					while (cur != nullptr)
					{
						node* next = cur->_next;

						//int hashi = cur->_data.first % newTables.capacity();
						int hashi = hf(kv(data)) % _tables.capacity();
						cur->_next = newTables[hashi];
						newTables[hashi] = cur;
						
						cur = next;
					}
					_tables[i] = nullptr;
				}

				swap(_tables, newTables);
			}

			//int hashi = hf(kv(data));
			int hashi = hf(kv(data)) % _tables.capacity();
			node* newnode = new node(data);
			newnode->_next = _tables[hashi];
			_tables[hashi] = newnode;

			++_size;
			return make_pair(iterator(newnode, this), true);
		}

		iterator find(const K& key)
		{
			int hashi = hf(key) % _tables.capacity();
			node* cur = _tables[hashi];
			while (cur != nullptr)
			{
				if (cur->_data.first == key)
				{
					return iterator(cur, this);
				}
				cur = cur->_next;
			}

			return iterator(nullptr, this);
		}

		bool erase(const K& key)
		{
			int hashi = hf(key) % _tables.capacity();
			node* cur = _tables[hashi];
			node* prev = nullptr;

			while (cur != nullptr)
			{
				if (cur->_data.first == key)
				{
					if (prev == nullptr)
					{
						_tables[hashi] = cur->_next;
					}
					else
					{
						prev->_next = cur->_next;
					}

					delete cur;
					--_size;
					return true;
				}
				prev = cur;
				cur = cur->_next;
			}

			return false;
		}

		int capacity()
		{
			return _tables.capacity();
		}

		size_t size() const
		{
			return _size;
		}

		bool empty() const
		{
			return _tables.empty();
		}

		size_t count(const K& key)
		{
			if (find(key) != end())
			{
				return 1;
			}

			return 0;
		}

	private:
		vector<node*> _tables;
		size_t _size; 
		HF hf;
		KeyOfVal kv;
	};
	
	template<class K>
	struct DefHashF
	{
		int operator()(const K& key)
		{
			return key;
		}
	};

	template<>
	struct DefHashF<string>
	{
		int operator()(const string& key)
		{
			int sum = 0;
			for (auto val : key)
			{
				sum *= 31;
				sum += val;
			}
			return sum;
		}
	};

	template<class K, class V, class HF = DefHashF<K>>
	class unordered_map
	{
		template<class K, class V>
		struct KetOfVal
		{
			const K operator()(const pair<K, V>& data)
			{
				return data.first;
			}
		};
		typedef typename hashBucket<K, pair<K, V>, KetOfVal<K, V>, HF> HT;
	public:
		typedef typename HBIterator<K, pair<K, V>, KetOfVal<K, V>, HF> iterator;

		pair<iterator, bool> insert(const pair<K, V>& data)
		{
			return _ht.insert(data);
		}

		iterator begin()
		{
			return _ht.begin();
		}

		iterator end()
		{
			return _ht.end();
		}

		// capacity
		size_t size() const
		{
			return _ht.size();
		}

		bool empty() const
		{
			return _ht.empty();
		}

		//lookup
		iterator find(const K& key)
		{
			return _ht.find(key);
		}

		size_t count(const K& key)
		{
			return _ht.count(key);
		}

		//access
		V& operator[](const K& key)
		{
			pair<iterator, bool> ret = _ht.insert(pair<K, V>(key, V()));
			//     ret.first == iterator._node   ret.first.operator()-> == &iterator._data   ret.first->second == _data.second
			iterator it = ret.first;
			return ret.first->second;
		}
	private:
		HT _ht;
	};

}
