from pyspark import SparkContext, SparkConf
from pyspark.sql import SparkSession
import os

# 锁定远端操作环境, 避免存在多个版本环境的问题
os.environ['SPARK_HOME'] = '/export/server/spark'
os.environ["PYSPARK_PYTHON"] = "/root/anaconda3/bin/python"
os.environ["PYSPARK_DRIVER_PYTHON"] = "/root/anaconda3/bin/python"
os.environ['JAVA_HOME'] = '/export/server/jdk'
# 快捷键:  main 回车
if __name__ == '__main__':
    print(os.environ.get('JAVA_HOME'))
#dsadads
# len('abcd') == 4 sss。。。。。。，----dddkddkd---0000 ddd sssss dssddssdsd4444444dsds