from tokenizer import tokenize
from parser import parse


def plot(points, reset=False):
    import matplotlib.pyplot as plt
    x = []
    y = []
    for p in points:
        if reset:
            p[0] = p[0] + 100
            p[1] = -p[1] + 300
        x.append(p[0])
        y.append(p[1])
    plt.scatter(x, y, c='black')


def plot2(points, reset=False):
    import turtle as t
    t.speed(10)
    t.fill(True)
    for p in points:
        if reset:
            p[0] = p[0] - 60
            p[1] = -p[1] + 60
        p[0] *= 5
        p[1] *= 5
        t.up()
        t.goto(p[0], p[1])
        t.down()
        # t.circle(1)
        t.goto(p[0]+1, p[1]+1)
    t.up()
    t.goto(700, 700)


def compile_from_str(src):
    tokens = tokenize(src)
    return parse(tokens)


def compile_from_file(filename):
    fr = open(filename, 'r')
    src = fr.read()
    return compile_from_str(src)


def test():
    src = 'for t from 0 to 100 step 10 draw(t,t);'
    points = compile_from_str(src)
    plot(points, reset=True)

if __name__ == '__main__':
    test()
    input()
