#函数式编程实践

# 使用生成器处理大数据流
def read_large_file(file_path):
    with open(file_path, 'r') as f:
        while True:
            data = f.read(1024*1024)  # 每次读取1MB
            if not data:
                break
            yield data

# 函数组合应用
from functools import reduce
result = reduce(lambda x,y: x+y, map(lambda x: x**2, filter(lambda x: x%2==0, range(10))))