import sys
#检查是否提供了关键字参数
if len(sys.argv) != 2:
    print("Usage: python example.py keyword")
    sys.exit(1)

# 从命令行参数获取关键字
keyword = sys.argv[1]

#这里写具体逻辑
import mindspore as ms
from mindspore import Model
from mindspore import Tensor
from mindspore.common import initializer as init

from mindformers.pipeline import pipeline
from mindformers.models import LlamaConfig
from mindformers import MindFormerConfig
from mindformers.trainer.utils import transform_and_load_checkpoint
from mindformers.core.context import build_context

from baichuan2_13b import Baichuan13BV2ForCausalLM
from baichuan2_tokenizer import Baichuan2Tokenizer


# init model
baichuan2_config_path = "research/baichuan2/run_baichuan2_13b.yaml"
baichuan2_config = MindFormerConfig(baichuan2_config_path)
build_context(baichuan2_config)

baichuan2_model_config = LlamaConfig(**baichuan2_config.model.model_config)
baichuan2_model_config.checkpoint_name_or_path = None
baichuan2_network = Baichuan13BV2ForCausalLM(
    config=baichuan2_model_config
)

baichuan2_model = Model(baichuan2_network)


seq_length = baichuan2_config.model.model_config.seq_length
infer_data = Tensor(shape=(1, seq_length), dtype=ms.int32, init=init.One())
transform_and_load_checkpoint(baichuan2_config, baichuan2_model, baichuan2_network, infer_data, do_predict=True)

# init tokenizer
tokenizer = Baichuan2Tokenizer(
    vocab_file=baichuan2_config.processor.tokenizer.vocab_file
)
pipeline_task = pipeline(task="text_generation", model=baichuan2_model, tokenizer=tokenizer)
pipeline_result = pipeline_task(keyword,
                                do_sample=False,
                                top_k=1,
                                top_p=1.0,
                                repetition_penalty=1.0,
                                temperature=1.0,
                                max_length=64)

print(pipeline_result)