erndgn commited on
Commit
3158c60
1 Parent(s): 99d76c7

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +8 -0
README.md CHANGED
@@ -49,16 +49,22 @@ Due to the diverse nature of the training data, which includes websites, books,
49
  import torch
50
  from transformers import AutoTokenizer, GPT2LMHeadModel
51
  from transformers import pipeline
 
52
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
53
  device_id = 0 if torch.cuda.is_available() else -1
 
54
  model = GPT2LMHeadModel.from_pretrained("ytu-ce-cosmos/turkish-gpt2-medium-350m-instruct-v0.1").to(device)
 
55
  tokenizer = AutoTokenizer.from_pretrained("ytu-ce-cosmos/turkish-gpt2-medium-350m-instruct-v0.1")
 
56
  text_generator = pipeline('text-generation', model=model, tokenizer=tokenizer, device=device_id, max_new_tokens=256)
 
57
  def get_model_response(instruction):
58
  instruction_prompt = f"### Kullanıcı:\n{instruction}\n### Asistan:\n"
59
  result = text_generator(instruction_prompt)
60
  generated_response = result[0]['generated_text']
61
  return generated_response[len(instruction_prompt):]
 
62
  model_response = get_model_response("Evde egzersiz yapmanın avantajlarını açıkla.")
63
  print(model_response)
64
  """
@@ -70,9 +76,11 @@ To use the chat template:
70
 
71
  ```python
72
  chat_generator = pipeline("conversational", model=model, tokenizer=tokenizer, device=device_id, max_new_tokens=256)
 
73
  messages = [
74
  {"role": "user", "content": "Evde egzersiz yapmanın avantajlarını açıkla."}
75
  ]
 
76
  chat_outputs = chat_generator(messages)
77
  print(chat_outputs)
78
  """
 
49
  import torch
50
  from transformers import AutoTokenizer, GPT2LMHeadModel
51
  from transformers import pipeline
52
+
53
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
54
  device_id = 0 if torch.cuda.is_available() else -1
55
+
56
  model = GPT2LMHeadModel.from_pretrained("ytu-ce-cosmos/turkish-gpt2-medium-350m-instruct-v0.1").to(device)
57
+
58
  tokenizer = AutoTokenizer.from_pretrained("ytu-ce-cosmos/turkish-gpt2-medium-350m-instruct-v0.1")
59
+
60
  text_generator = pipeline('text-generation', model=model, tokenizer=tokenizer, device=device_id, max_new_tokens=256)
61
+
62
  def get_model_response(instruction):
63
  instruction_prompt = f"### Kullanıcı:\n{instruction}\n### Asistan:\n"
64
  result = text_generator(instruction_prompt)
65
  generated_response = result[0]['generated_text']
66
  return generated_response[len(instruction_prompt):]
67
+
68
  model_response = get_model_response("Evde egzersiz yapmanın avantajlarını açıkla.")
69
  print(model_response)
70
  """
 
76
 
77
  ```python
78
  chat_generator = pipeline("conversational", model=model, tokenizer=tokenizer, device=device_id, max_new_tokens=256)
79
+
80
  messages = [
81
  {"role": "user", "content": "Evde egzersiz yapmanın avantajlarını açıkla."}
82
  ]
83
+
84
  chat_outputs = chat_generator(messages)
85
  print(chat_outputs)
86
  """