Update README.md
Browse files
README.md
CHANGED
@@ -20,17 +20,34 @@ Example of usage with vLLM library:
|
|
20 |
```python
|
21 |
from vllm import LLM, SamplingParams
|
22 |
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
|
29 |
sampling_params = SamplingParams(temperature=0.8)
|
30 |
|
31 |
llm = LLM(model="disi-unibo-nlp/pmc-llama-13b-awq", quantization="awq", dtype="half")
|
32 |
|
33 |
-
outputs = llm.generate(
|
34 |
|
35 |
# Print the outputs.
|
36 |
for output in outputs:
|
|
|
20 |
```python
|
21 |
from vllm import LLM, SamplingParams
|
22 |
|
23 |
+
tokenizer = AutoTokenizer.from_pretrained('axiong/PMC_LLaMA_13B')
|
24 |
+
|
25 |
+
prompt_input = (
|
26 |
+
'Below is an instruction that describes a task, paired with an input that provides further context.'
|
27 |
+
'Write a response that appropriately completes the request.\n\n'
|
28 |
+
'### Instruction:\n{instruction}\n\n### Input:\n{input}\n\n### Response:'
|
29 |
+
)
|
30 |
+
|
31 |
+
example = {
|
32 |
+
"instruction": "You're a doctor, kindly address the medical queries according to the patient's account. Answer with the best option directly.",
|
33 |
+
"input": (
|
34 |
+
"###Question: A 23-year-old pregnant woman at 22 weeks gestation presents with burning upon urination. "
|
35 |
+
"She states it started 1 day ago and has been worsening despite drinking more water and taking cranberry extract. "
|
36 |
+
"She otherwise feels well and is followed by a doctor for her pregnancy. "
|
37 |
+
"Her temperature is 97.7°F (36.5°C), blood pressure is 122/77 mmHg, pulse is 80/min, respirations are 19/min, and oxygen saturation is 98% on room air."
|
38 |
+
"Physical exam is notable for an absence of costovertebral angle tenderness and a gravid uterus. "
|
39 |
+
"Which of the following is the best treatment for this patient?"
|
40 |
+
"###Options: A. Ampicillin B. Ceftriaxone C. Doxycycline D. Nitrofurantoin"
|
41 |
+
)
|
42 |
+
}
|
43 |
+
|
44 |
+
prompt_batch = [prompt_input.format_map(example)]
|
45 |
|
46 |
sampling_params = SamplingParams(temperature=0.8)
|
47 |
|
48 |
llm = LLM(model="disi-unibo-nlp/pmc-llama-13b-awq", quantization="awq", dtype="half")
|
49 |
|
50 |
+
outputs = llm.generate(prompt_batch, sampling_params)
|
51 |
|
52 |
# Print the outputs.
|
53 |
for output in outputs:
|