#!/bin/bash

# Тест лексического анализатора

build/tests/test-inout-token -h > test/tmp/test-token.out
build/tests/test-inout-token -n @BUILDING_NUMBER_MASK 10 test/source/token/Tokenizer-01.txt >> test/tmp/test-token.out
build/tests/test-inout-token -n @BUILDING_NUMBER_MASK 10 --use-char16-buffer test/source/token/Tokenizer-01.txt >> test/tmp/test-token.out
build/tests/test-inout-token -n @BUILDING_NUMBER_MASK 10 test/source/test/Tokenizer-02.txt >> test/tmp/test-token.out
build/tests/test-inout-token -n @BUILDING_NUMBER_MASK 10 --use-char16-buffer test/source/token/Tokenizer-02.txt >> test/tmp/test-token.out
build/tests/test-inout-token -n "#nnnnnn" 16 -n 0bN 2 -n 0xN 16 -n @BUILDING_NUMBER_MASK 10 test/source/token/Tokenizer-03.txt >> test/tmp/test-token.out
build/tests/test-inout-token -n "#nnnnnn" 16 -n 0bN 2 -n 0xN 16 -n @BUILDING_NUMBER_MASK 10 test/source/token/Tokenizer-04.txt >> test/tmp/test-token.out
build/tests/test-inout-token -n @BUILDING_NUMBER_MASK 10 test/source/token/Tokenizer-05.txt >> test/tmp/test-token.out

