#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""

@author ：chenzf
@date ：2024/4/23 下午2:32 
"""
from pyspark import *
import os

os.environ['PYSPARK_PYTHON'] = 'C:/coder/python/Python3.10.9/python.exe'
os.environ['JAVA_HOME'] = 'C:/coder/Java/jdk1.8.0_112'
os.environ['HADOOP_HOME'] = 'C:/coder/hadoop-3.0.0'
conf = (SparkConf().setMaster("local[*]").setAppName("test_spark")
        # .set("spark.driver.host", "localhost")
        )
conf.set("spark.default.parallelism", "1")# 设置全局并行度
spark_context = SparkContext(conf=conf)
# 数据操作 设置分区数量numSlices
content = spark_context.parallelize([1, 2, 3, 4, 5], numSlices=1)
rdd = content.map(lambda x: x * 2)
# content  = spark_context.parallelize([1, 2, 3, 4, 5])
print(f'自身翻倍：{rdd.collect()}')
print(f'偶数:{content.filter(lambda x: x % 2 == 0).collect()}')
content = spark_context.parallelize(['it i222','it 25s','ai 152df'], numSlices=1)
print(f'{content.map(lambda x: x.split(" ") ).collect()}')
print(f'{content.flatMap(lambda x: x.split(" ") ).collect()}')
print(f'去重：{spark_context.parallelize([1,1,3,4,4]).distinct().collect()}')
print(f'两两相加：{spark_context.parallelize([1,1,3,4,4]).reduce(lambda x,y:x+y)}')
print(f'元素汇总：{spark_context.parallelize([1,1,3,4,4]).count()}')
print(f'元素汇总：{spark_context.parallelize([1,1,3,4,4]).take(3)}')
content = spark_context.parallelize([('a',5),('b',4),('c',3),('d',3)], numSlices=1)
print(f'排序:{content.sortBy(lambda x:x[1],ascending=True,numPartitions=1).collect()}')
# map 使用
# rdd = spark_context.parallelize([{"it 66 ","cat dog fox","egg sea"}])
#        # .map(lambda x: x.split(" "))
# print(rdd.collect())
# print(spark_context.version)
content = spark_context.parallelize([('a',1),('a',1),('a',1),('b',1),('b',1)])
print(f'相同的key聚合计算value值:{content.reduceByKey(lambda x,y:x+y).collect()}')
content = spark_context.parallelize([1, 2, 3, 4, 5], numSlices=1)
#compressionCodecClass  通常是一个字符串，表示某个特定压缩编解码器类的全限定
# 压缩并输出文件compressionCodecClass
content.saveAsTextFile('D:/output', compressionCodecClass='org.apache.hadoop.io.compress.GzipCodec')
spark_context.stop()
