kunato commited on
Commit
4a63c11
1 Parent(s): a628760

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -30
README.md CHANGED
@@ -101,16 +101,13 @@ vllm serve scb10x/llama3.1-typhoon2-8b-instruct
101
 
102
  ## Function-Call Example
103
  ```python
104
- import json
105
  import torch
106
  from transformers import AutoModelForCausalLM, AutoTokenizer
107
- import os
108
  import ast
109
-
110
  model_name = "scb10x/llama3.1-typhoon2-8b-instruct"
111
  tokenizer = AutoTokenizer.from_pretrained(model_name)
112
  model = AutoModelForCausalLM.from_pretrained(
113
- model_name, torch_dtype=torch.bfloat16
114
  )
115
 
116
  get_weather_api = {
@@ -171,10 +168,6 @@ messages = [
171
  {"role": "user", "content": "ขอราคาหุ้น Tasla (TLS) และ Amazon (AMZ) ?"},
172
  ]
173
 
174
- final_prompt = tokenizer.apply_chat_template(
175
- messages, tools=openai_format_tools, add_generation_prompt=True, tokenize=False
176
- )
177
-
178
  inputs = tokenizer.apply_chat_template(
179
  messages, tools=openai_format_tools, add_generation_prompt=True, return_tensors="pt"
180
  ).to(model.device)
@@ -187,7 +180,7 @@ outputs = model.generate(
187
  num_return_sequences=1,
188
  eos_token_id=[tokenizer.eos_token_id, 128009],
189
  )
190
- response = outputs[0][input_ids.shape[-1]:]
191
 
192
  print("Here Output:", tokenizer.decode(response, skip_special_tokens=True))
193
 
@@ -299,27 +292,6 @@ def parse_nested_value(value):
299
  )
300
  return repr(value)
301
 
302
-
303
- def decoded_output_to_execution_list(decoded_output):
304
- """
305
- Convert decoded output to a list of executable function calls.
306
-
307
- Args:
308
- decoded_output (list): A list of dictionaries representing function calls.
309
-
310
- Returns:
311
- list: A list of strings, each representing an executable function call.
312
- """
313
- execution_list = []
314
- for function_call in decoded_output:
315
- for key, value in function_call.items():
316
- args_str = ", ".join(
317
- f"{k}={parse_nested_value(v)}" for k, v in value.items()
318
- )
319
- execution_list.append(f"{key}({args_str})")
320
- return execution_list
321
-
322
-
323
  def default_decode_ast_prompting(result, language="Python"):
324
  result = result.strip("`\n ")
325
  if not result.startswith("["):
 
101
 
102
  ## Function-Call Example
103
  ```python
 
104
  import torch
105
  from transformers import AutoModelForCausalLM, AutoTokenizer
 
106
  import ast
 
107
  model_name = "scb10x/llama3.1-typhoon2-8b-instruct"
108
  tokenizer = AutoTokenizer.from_pretrained(model_name)
109
  model = AutoModelForCausalLM.from_pretrained(
110
+ model_name, torch_dtype=torch.bfloat16, device_map='auto'
111
  )
112
 
113
  get_weather_api = {
 
168
  {"role": "user", "content": "ขอราคาหุ้น Tasla (TLS) และ Amazon (AMZ) ?"},
169
  ]
170
 
 
 
 
 
171
  inputs = tokenizer.apply_chat_template(
172
  messages, tools=openai_format_tools, add_generation_prompt=True, return_tensors="pt"
173
  ).to(model.device)
 
180
  num_return_sequences=1,
181
  eos_token_id=[tokenizer.eos_token_id, 128009],
182
  )
183
+ response = outputs[0][inputs.shape[-1]:]
184
 
185
  print("Here Output:", tokenizer.decode(response, skip_special_tokens=True))
186
 
 
292
  )
293
  return repr(value)
294
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
295
  def default_decode_ast_prompting(result, language="Python"):
296
  result = result.strip("`\n ")
297
  if not result.startswith("["):