#ifndef utf16_h
#define utf16_h

// this header needs
#include "wchar.h"
#include "ascii.h"
#include "unicode.h"

// UTF16 can encode characters in blocks of 2 bytes
// these building blocks of 2 bytes can be affected by endian
// https://stackoverflow.com/questions/6240055/manually-converting-unicode-codepoints-into-utf-8-and-utf-16
// https://en.wikipedia.org/wiki/UTF-16
// https://linjan2.github.io/utf16-utf16.html

// UTF16 has this thing, there are valid/invalid
// code points that can be represented by it

// BIG ENDIAN
// code point range     bin code point representation   bin write representation              bits encoded
// 0x0000 - 0xD7FF      XXXXXXXX YYYYYYYY               XXXXXXXX YYYYYYYY                     16    
// 0xD800 - 0xDFFF      (invalid code points)           ...                                   ...
// 0xE000 - 0xFFFF      XXXXXXXX YYYYYYYY               XXXXXXXX YYYYYYYY                     16
// 0x010000 - 0x10FFFF  XXYY YYYYYYWW ZZZZZZZZ          110110XX YYYYYYYY 110111WW ZZZZZZZZ   20

// LITTLE ENDIAN (bs)
// code point range     bin code point representation   bin write representation              bits encoded
// 0x0000 - 0xD7FF      XXXXXXXX YYYYYYYY               YYYYYYYY XXXXXXXX                     16
// 0xD800 - 0xDFFF      (invalid code points)           ...                                   ...
// 0xE000 - 0xFFFF      XXXXXXXX YYYYYYYY               YYYYYYYY XXXXXXXX                     16
// 0x010000 - 0x10FFFF  XXYY YYYYYYWW ZZZZZZZZ          YYYYYYYY 110110XX ZZZZZZZZ 110111WW   20

// ranges of the valid integers
#define OWL_UTF16_RANGE1_MIN 0x000000
#define OWL_UTF16_RANGE1_MAX 0x00D7FF
#define OWL_UTF16_RANGE2_MIN 0x00E000
#define OWL_UTF16_RANGE2_MAX 0x00FFFF
#define OWL_UTF16_RANGE3_MIN 0x010000
#define OWL_UTF16_RANGE3_MAX 0x10FFFF
// ranges of the invalid integers
#define OWL_INV_UTF16_RANGE1_MIN 0xD800
#define OWL_INV_UTF16_RANGE1_MAX 0xDFFF
// for when the character is in the 0x010000 - 0x10FFFF range the following mask will be used
#define OWL_UTF16_MASK_1 0xFC // 11111100
// the result of the mask (applied to the needed byte)
// will output one of the following outputs if the character is UTF16
#define OWL_UTF16_BLOCK_1 0xD8 // 11011000
#define OWL_UTF16_BLOCK_2 0xDC // 11011100
// also, for the same range, this is the integer to be substracted
// from the actual code point to be able to be encoded in 20 bits
#define OWL_UTF16_LAST_RANGE_BIAS 0x10000
#define OWL_INV_ENC_UTF16BE_STR "[INV_ENC_UTF16BE]"
#define OWL_INV_ENC_UTF16LE_STR "[INV_ENC_UTF16LE]"

// check, get, write and print an UTF16 character
// NOTE: the "be" and "le" functions are for big endian and
//       little endian byte ordered UTF16 encoded characters

// basic functions
owl_byte owl_check_wchar_utf16(owl_wchar ch);
owl_byte owl_check_wchar_enc_utf16be(owl_byte * src, owl_umax size);
owl_byte owl_check_wchar_enc_utf16le(owl_byte * src, owl_umax size);
owl_wchar owl_get_wchar_utf16be(owl_byte * src, owl_umax size);
owl_wchar owl_get_wchar_utf16le(owl_byte * src, owl_umax size);
owl_byte * owl_write_wchar_enc_utf16be(owl_wchar ch, owl_byte * dest, owl_umax size);
owl_byte * owl_write_wchar_enc_utf16le(owl_wchar ch, owl_byte * dest, owl_umax size);
owl_byte owl_print_wchar_utf16(owl_wchar ch);
owl_byte owl_print_wchar_enc_utf16be(owl_byte * src, owl_umax size);
owl_byte owl_print_wchar_enc_utf16le(owl_byte * src, owl_umax size);

// conversion to unicode
owl_wchar owl_wchar_utf16_as_unicode(owl_wchar utf16);
owl_wchar owl_wchar_unicode_as_utf16(owl_wchar unicode);

#include "utf16.c"

#endif // utf16_h
