from pyspark import SparkContext
import itertools

def combines(data, *num):
    for i in num:
        yield itertools.combinations(data, i)

sc = SparkContext.getOrCreate()
assert isinstance(sc, SparkContext)

rdd1 = sc.textFile("file:///Users/sonto/Workspace/Rimi/P1902/spark_example/products.txt")

def map_func1(line):
    data_set = []
    ps = line.split(",")
    for ds in combines(ps, 1, 2, 3):
        for d in ds:
            data_set.append(sorted(d))

    return data_set

def do_sub_pairs(kv):
    key, value = kv
    data_set = []
    data_set.append((key, (None, value)))
    key_len = len(key)
    if key_len <= 1:
        return data_set
    for d in itertools.combinations(key, key_len - 1):
        data_set.append((d, (key, value)))

    return data_set

rdd1 =  rdd1.flatMap(map_func1).map(lambda v: (tuple(v), 1)).groupByKey().mapValues(lambda vs: sum(vs))
for x in rdd1.flatMap(f=do_sub_pairs).groupByKey().mapValues(lambda f: list(f)).collect():
    print(x)


