Update llava_olmo.py
Browse files- llava_olmo.py +0 -34
llava_olmo.py
CHANGED
@@ -62,37 +62,3 @@ print(decoded_text[:decoded_text.find('</s>')].replace('|||IP_ADDRESS|||', ''))
|
|
62 |
print("-"*100)
|
63 |
|
64 |
|
65 |
-
#
|
66 |
-
##
|
67 |
-
#
|
68 |
-
#
|
69 |
-
#
|
70 |
-
'''
|
71 |
-
# ORIGINAL CODE WITH ONLY OLMO:
|
72 |
-
with open('llava/config.json') as json_file:
|
73 |
-
data = json.load(json_file)
|
74 |
-
|
75 |
-
text = "Paris is a historic city with architectural marvels. It is also "
|
76 |
-
# text = ["Language modeling is "]
|
77 |
-
|
78 |
-
config_class = llava_olmo.LlavaOLMoBitnet1BConfig(**data)
|
79 |
-
lolmo = llava_olmo.LlavaOLMoBitnet1BForCausalLM(config_class).to(device)
|
80 |
-
lolmo.load_state_dict(torch.load('OLMo_Bitnet_1B/pytorch_model.bin'), strict=False)
|
81 |
-
|
82 |
-
olmo = OLMoForCausalLM(config_class).to(device)
|
83 |
-
olmo.load_state_dict(torch.load('OLMo_Bitnet_1B/pytorch_model.bin'))
|
84 |
-
actual_olmo = OLMoForCausalLM.from_pretrained("allenai/OLMo-1B").to(device)
|
85 |
-
|
86 |
-
actual_olmo_tokenizer = OLMoTokenizerFast.from_pretrained("allenai/OLMo-1B")
|
87 |
-
olmo_tokenizer = AutoTokenizer.from_pretrained("NousResearch/OLMo-Bitnet-1B")
|
88 |
-
|
89 |
-
olmo_tokens = olmo_tokenizer(text, return_tensors='pt', return_token_type_ids=False).to(device)
|
90 |
-
# olmo_tokens = actual_olmo_tokenizer(text, return_tensors='pt', return_token_type_ids=False).to(device)
|
91 |
-
|
92 |
-
|
93 |
-
response = lolmo.generate(inputs=olmo_tokens['input_ids'], attention_mask=olmo_tokens['attention_mask'], max_new_tokens=100, do_sample=True, top_k=50, top_p=0.95)
|
94 |
-
# response = olmo.generate(inputs=olmo_tokens['input_ids'], attention_mask=olmo_tokens['attention_mask'], max_new_tokens=100, do_sample=True, top_k=50, top_p=0.95)
|
95 |
-
|
96 |
-
|
97 |
-
print(olmo_tokenizer.batch_decode(response, skip_special_tokens=True)[0])
|
98 |
-
'''
|
|
|
62 |
print("-"*100)
|
63 |
|
64 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|