#include "unicode.h"

void Unicode::utf8decode(unsigned char* chr, unsigned int len, std::vector<u32>* uni)
{
	u32 unicode = 0;
	
	for (unsigned int i=0; i<len; ++i) {
		// 2-byte coding starting bit (110xxxxx)
		if (chr[i] >> 5 == 0x06) {
			unicode = (chr[i] & 0x1F) << 6;
			utf8long(chr, &i, &unicode, 1);
		}
		// 3-byte coding starting bit (1110xxxx)
		else if (chr[i] >> 4 == 0x0E) {
			unicode = (chr[i] & 0x1F) << 12;
			utf8long(chr, &i, &unicode, 2);
		}
		// 4-byte coding starting bit (11110xxx)
		else if (chr[i] >> 3 == 0x1E) {
			unicode = (chr[i] & 0x1F) << 18;
			utf8long(chr, &i, &unicode, 3);
		}
		// 1-byte bit (0xxxxxxx)
		else {
			unicode = chr[i];
		}
		uni->push_back( unicode );
	}
}

void Unicode::utf8long(unsigned char* chr, unsigned int* i, u32* uni, unsigned char c)
{
	for (c=c; c>0; --c) {
		*i = *i+1;
		// add 10xxxxxx
		*uni = *uni + (u32)((chr[*i] & 0x3F) << (6*(c-1)));
	}
}

void Unicode::utf8tou64(unsigned char* chr, u64* uni, u8 c)
{
	u8 i=0;
	for (c=c; c>0; --c) {
		// add 10xxxxxx
		*uni = *uni + (u64)((chr[i] & 0x3F) << (6*(c-1)));
		i++;
	}
}

