from pyspark import SparkConf, SparkContext
import os
os.environ['PYSPARK_PYTHON'] = "C:/Python310/python.exe"

conf = SparkConf().setMaster("local[*]").setAppName("test_spark")
sc = SparkContext(conf=conf)

rdd = sc.parallelize([["java", "python", "go"], ["html", "javascript", "vue"], ["mysql", "db2", "oracle"]])

rdd = rdd.flatMap(lambda x: x)

print(rdd.collect())

