"""
 compiling dataflow pipelines

 Idea: use python generators that accept another generator argument
       to model the dataflow consumers/producers
       
 First, a translation of examples from the paper
"""


def forever5():
	while True:
		yield 5

def double(numbers):
	for n in numbers:
		yield n*2

def accumulate(numbers):
	sum = 0
	for n in numbers:
		sum += n
		yield sum

def output(numbers):
    for n in numbers:
        print n

import transducers as ts
forever10 = ts.compose(double,forever5)
stepBy10 = ts.compose(accumulate, forever10)
showStepBy10 = ts.compose(output, stepBy10)

ts.optomize()

showStepBy10() # you'll have to CTRL-C this

"""
class TheBasicIdea:
    def forever10():
        for n in forever5():
            yield n*2

    def forever10():
        while True:
            n = 5
            yield n*2

    def forever10():
        while True:
            yield 5*2

    def forever10():
        while True:
            yield 10

class MoreRealisticSteps:
    def forever10():
        numbers = iter(forever5())
        while True:
            n = numbers.next()
            yield n*2
    def forever10():
        next = iter(forever5()).next
        while True:
            n = next()
            yield n*2
    def forever10():
        next = lambda: 5
        while True:
            n = next()
            yield n*2
    def forever10():
        while True:
            n = 5
            yield n*2
    # and so on with the first version
    
"""
