import torch
from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig

model_dir = 'BelleGroup/BELLE-7B-0.2M'
img_path = '/Users/sunshicheng/myspace/code/my_code/use-lang-chain/src/1.jpg'

tokenizer = AutoTokenizer.from_pretrained(model_dir, trust_remote_code=True)
model = AutoModelForCausalLM.from_pretrained(model_dir, trust_remote_code=True).eval()
model.generation_config = GenerationConfig.from_pretrained(model_dir, trust_remote_code=True)
question = '详细描述一下这张图'

query = tokenizer.from_list_format([
    {'image': img_path},  # Either a local path or an url
    {'text': question},
])
response, history = model.chat(tokenizer, query=query, history=None)
print(response)

# or
query = f'<img>{img_path}</img>\n{question}'
response, history = model.chat(tokenizer, query=query, history=None)
print(response)
