anamikac2708 commited on
Commit
6f6ec05
1 Parent(s): c26830a

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +0 -2
README.md CHANGED
@@ -33,7 +33,6 @@ Please find an example below using Unsloth:
33
  import torch
34
  from unsloth import FastLanguageModel
35
  from transformers import AutoTokenizer, pipeline
36
- model_id='FinLang/investopedia_chat_model'
37
  max_seq_length=2048
38
  model, tokenizer = FastLanguageModel.from_pretrained(
39
  model_name = "anamikac2708/Llama3-8b-finetuned-investopedia-Lora-Adapters", # YOUR MODEL YOU USED FOR TRAINING
@@ -41,7 +40,6 @@ model, tokenizer = FastLanguageModel.from_pretrained(
41
  dtype = torch.bfloat16,
42
  load_in_4bit = False #Make it True if you want to use bitsandbytes 4bit
43
  )
44
- tokenizer = AutoTokenizer.from_pretrained(model_id)
45
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
46
  example = [{'content': 'You are a financial expert and you can answer any questions related to finance. You will be given a context and a question. Understand the given context and\n try to answer. Users will ask you questions in English and you will generate answer based on the provided CONTEXT.\n CONTEXT:\n D. in Forced Migration from the University of the Witwatersrand (Wits) in Johannesburg, South Africa; A postgraduate diploma in Folklore & Cultural Studies at Indira Gandhi National Open University (IGNOU) in New Delhi, India; A Masters of International Affairs at Columbia University; A BA from Barnard College at Columbia University\n', 'role': 'system'}, {'content': ' In which universities did the individual obtain their academic qualifications?\n', 'role': 'user'}, {'content': ' University of the Witwatersrand (Wits) in Johannesburg, South Africa; Indira Gandhi National Open University (IGNOU) in New Delhi, India; Columbia University; Barnard College at Columbia University.', 'role': 'assistant'}]
47
  prompt = pipe.tokenizer.apply_chat_template(example[:2], tokenize=False, add_generation_prompt=True)
 
33
  import torch
34
  from unsloth import FastLanguageModel
35
  from transformers import AutoTokenizer, pipeline
 
36
  max_seq_length=2048
37
  model, tokenizer = FastLanguageModel.from_pretrained(
38
  model_name = "anamikac2708/Llama3-8b-finetuned-investopedia-Lora-Adapters", # YOUR MODEL YOU USED FOR TRAINING
 
40
  dtype = torch.bfloat16,
41
  load_in_4bit = False #Make it True if you want to use bitsandbytes 4bit
42
  )
 
43
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
44
  example = [{'content': 'You are a financial expert and you can answer any questions related to finance. You will be given a context and a question. Understand the given context and\n try to answer. Users will ask you questions in English and you will generate answer based on the provided CONTEXT.\n CONTEXT:\n D. in Forced Migration from the University of the Witwatersrand (Wits) in Johannesburg, South Africa; A postgraduate diploma in Folklore & Cultural Studies at Indira Gandhi National Open University (IGNOU) in New Delhi, India; A Masters of International Affairs at Columbia University; A BA from Barnard College at Columbia University\n', 'role': 'system'}, {'content': ' In which universities did the individual obtain their academic qualifications?\n', 'role': 'user'}, {'content': ' University of the Witwatersrand (Wits) in Johannesburg, South Africa; Indira Gandhi National Open University (IGNOU) in New Delhi, India; Columbia University; Barnard College at Columbia University.', 'role': 'assistant'}]
45
  prompt = pipe.tokenizer.apply_chat_template(example[:2], tokenize=False, add_generation_prompt=True)