import tokenize

reader = open('iter_demo.py').readline
tokens = tokenize.generate_tokens(reader)


def tokenize_demo():
    for index in range(5):
        res = next(tokens)
        print(res)


def power(values):
    for value in values:
        print('powering %s' % value)
        yield value


def adder(values):
    for value in values:
        print('adding to %s' % value)
        if value % 2 == 0:
            yield value + 3
        else:
            yield value + 2
    pass


def subtractor(values):
    g = adder(power(values))
    try:
        while res := next(g):
            print(res)
    except StopIteration:
        pass


def psychologist():
    print('please tell me your problems')
    while True:
        answer = (yield)
        print('answer: %s' % answer)
        if answer is not None:
            if answer.endswith('?'):
                print("Don't ask yourself too much questions")
            elif 'good' in answer:
                print("Ahh that's good, go on")
            elif 'bad' in answer:
                print("Don't be so negative")
            elif answer in ('q', 'quit'):
                print('Goodbye')
                yield
                return
            else:
                print("I don't understand")


if __name__ == '__main__':
    print('Starting psychologist session, type "q" or "quit" to quit')
    freud = psychologist()
    for phrase in freud:
        problem = input("> ")
        freud.send(problem)




if __name__ == '__main__':
    # subtractor([1, 4, 7, 9, 12, 19])
    free = psychologist()
    next(free)


