from pyspark import SparkContext

sc = SparkContext('local')
old = sc.parallelize([('key1', 1), ('key2', 3), ('key1', 5), ('key2', 7), ('key3', 2)])
reduceKyeResult = old.reduceByKey(lambda accumulate, ele: (accumulate + ele))
# reduceKyeResult = old.reduceByKey(lambda accumulate, ele: (accumulate, accumulate + ele))
reduceLocallyResult = old.reduceByKeyLocally(lambda accumulate, ele: (accumulate + ele))

print(reduceKyeResult.collect())
print(reduceLocallyResult)