import os

# 设置代理
os.environ["HF_ENDPOINT"] = "https://hf-mirror.com"

# 设置本地缓存目录
cache_dir = os.path.join('D:', os.path.sep, 'ModelSpace', 'Cache')
os.environ['HF_HOME'] = cache_dir

from transformers import pipeline

# 创建Pipeline任务
nlp = pipeline("token-classification", model="dbmdz/bert-large-cased-finetuned-conll03-english")

# 执行词元分类任务
if __name__ == "__main__":
    result = nlp("My name is Wolfgang and I live in Berlin.")
    for entity in result:
        print(f"Word: {entity['word']}, Entity: {entity['entity']}")
    # 输出：Word: Wolfgang, Entity: I-PER
    # 输出：Word: Berlin, Entity: I-LOC