from pyspark import SparkContext

sc = SparkContext('local')
rdd1 = sc.parallelize([1, 2, 3, 4, 5], 1)
rdd2 = sc.parallelize([1, 2, 3, 4, 5], 4)
seqOp = lambda accumulate, element: (accumulate[0] + element, accumulate[1] * element)
combOp = lambda accumulate, element: (accumulate[0] + element[0], accumulate[1] * element[1])

zeraData1 = 0
foldResult = rdd1.fold(zeraData1, lambda element, accmulate:(element + accmulate))

zeroData = (1, 2)

aggregate1 = rdd1.aggregate(zeroData, seqOp, combOp)
aggregate2 = rdd2.aggregate(zeroData, seqOp, combOp)

print(foldResult)
print(aggregate1)
print(aggregate2)