#!/usr/bin/env python3

import sys

# check the OS type.
# if sys.platform == "darwin":
#     print("You are running Mac OS")
# elif sys.platform == "win32":
#     print("You are running Win32")
# elif sys.platform == "linux":
#     print("You are running Linux")

# get the encoding type.
# print(sys.getdefaultencoding())

# print out the arguments passed from command line.
# print(sys.argv)
from getopt import getopt

# 解析命令行参数(例1).
# try:
#     opts, args = getopt(sys.argv[1:], "h:p:t:", ["host="])
#     for opt_key, opt_val in opts:
#         print("option: {} = {}".format(opt_key, opt_val))
#     print("The arguments are {}".format(args))
# except Exception as e:
#     print(e)

# 解析命令行参数(例2).
# import argparse
#
# ap = argparse.ArgumentParser()
# ap.add_argument("-H", "--host", type=str, help="this option is for host")
# ap.add_argument("-p", type=int, help="this option is for port", action="append", required=True)
# arg = ap.parse_args(sys.argv[1:])
# print(arg)

import os

# 以追加方式打开文件
# fd = os.open("./README.txt", os.O_RDWR | os.O_APPEND)
# data = os.read(fd, 4)
# os.write(fd, "this is a test".encode("utf-8"))
# os.close(fd)

# 查看环境变量
# ip = os.getenv("DJANGO_ADDR", "127.0.0.1")
# print(ip)

# 执行外部程序 1
# pid = os.spawnl(os.P_WAIT, "/opt/local/libexec/gnubin/ls", "ls")
# print(pid)

# 执行外部程序 2
# os.system("ls -l")

# 执行外部程序 3
# os.execl("/opt/local/libexec/gnubin/ls", "ls", "-l")

# read_fd, write_fd = os.pipe()
# read_fd1, write_fd1 = os.pipe()
#
# pid = os.fork()
# if pid == 0:
#     os.close(write_fd)
#     os.close(read_fd1)
#     while True:
#         data = os.read(read_fd, 4)
#         os.write(write_fd1, data)
# else:
#     os.close(read_fd)
#     os.close(write_fd1)
#     while True:
#         d = input("Enter: ")
#         os.write(write_fd, d.encode("utf-8")[:4])
#         data = os.read(read_fd1, 4)
#         print(data.decode("utf-8"))

# import multiprocessing
# x = multiprocessing.Process(target=)
# x.start()

import numpy as np

b = np.arange(12).reshape(3,4)
print(b.cumsum(axis=0))
np.floor()

from pyspark import SparkContext
sc = SparkContext()
sc.parallelize()