iproskurina
commited on
Commit
•
bf125eb
1
Parent(s):
2b8b9ca
Update README.md
Browse files
README.md
CHANGED
@@ -101,4 +101,4 @@ tokenizer = AutoTokenizer.from_pretrained(pretrained_model_dir, use_fast=True)
|
|
101 |
model = AutoGPTQForCausalLM.from_quantized(pretrained_model_dir, device="cuda:0", model_basename="model")
|
102 |
pipeline = TextGenerationPipeline(model=model, tokenizer=tokenizer)
|
103 |
print(pipeline("auto-gptq is")[0]["generated_text"])
|
104 |
-
```
|
|
|
101 |
model = AutoGPTQForCausalLM.from_quantized(pretrained_model_dir, device="cuda:0", model_basename="model")
|
102 |
pipeline = TextGenerationPipeline(model=model, tokenizer=tokenizer)
|
103 |
print(pipeline("auto-gptq is")[0]["generated_text"])
|
104 |
+
```
|