from pyspark import SparkConf, SparkContext

import os

# 创建SparkConf
conf = SparkConf().setMaster('local[*]').setAppName('test_spark_app')

sc = SparkContext(conf=conf)

rdd = sc.parallelize([('abc', 45), ('def', 23), ('ghj', 12), ('jkl', 125)])

# sortBy
# [('ghj', 12), ('def', 23), ('abc', 45), ('jkl', 125)]
print(rdd.sortBy(lambda x: x[1], ascending=True, numPartitions=1).collect())

# 停止spark
sc.stop()
