Update instructions
Browse files
README.md
CHANGED
@@ -50,8 +50,8 @@ We evaluate the benefits of pretraining DNA FM 7B by conducting a comprehensive
|
|
50 |
#### Embedding
|
51 |
```python
|
52 |
from genbio_finetune.tasks import Embed
|
53 |
-
model = Embed.from_config({"model.backbone": "dnafm"})
|
54 |
-
collated_batch = model.collate({"sequences": ["ACGT", "
|
55 |
embedding = model(collated_batch)
|
56 |
print(embedding.shape)
|
57 |
print(embedding)
|
@@ -60,8 +60,8 @@ print(embedding)
|
|
60 |
```python
|
61 |
import torch
|
62 |
from genbio_finetune.tasks import SequenceClassification
|
63 |
-
model = SequenceClassification.from_config({"model.backbone": "dnafm", "model.n_classes": 2})
|
64 |
-
collated_batch = model.collate({"sequences": ["ACGT", "
|
65 |
logits = model(collated_batch)
|
66 |
print(logits)
|
67 |
print(torch.argmax(logits, dim=-1))
|
@@ -70,8 +70,8 @@ print(torch.argmax(logits, dim=-1))
|
|
70 |
```python
|
71 |
import torch
|
72 |
from genbio_finetune.tasks import TokenClassification
|
73 |
-
model = TokenClassification.from_config({"model.backbone": "dnafm", "model.n_classes": 3})
|
74 |
-
collated_batch = model.collate({"sequences": ["ACGT", "
|
75 |
logits = model(collated_batch)
|
76 |
print(logits)
|
77 |
print(torch.argmax(logits, dim=-1))
|
@@ -79,8 +79,8 @@ print(torch.argmax(logits, dim=-1))
|
|
79 |
#### Regression
|
80 |
```python
|
81 |
from genbio_finetune.tasks import SequenceRegression
|
82 |
-
model = SequenceRegression.from_config({"model.backbone": "dnafm"})
|
83 |
-
collated_batch = model.collate({"sequences": ["ACGT", "
|
84 |
logits = model(collated_batch)
|
85 |
print(logits)
|
86 |
```
|
@@ -89,7 +89,7 @@ print(logits)
|
|
89 |
gbft fit --model SequenceClassification --model.backbone dnafm --data SequenceClassification --data.path <hf_or_local_path_to_your_dataset>
|
90 |
gbft test --model SequenceClassification --model.backbone dnafm --data SequenceClassification --data.path <hf_or_local_path_to_your_dataset>
|
91 |
```
|
92 |
-
For more information, visit: [Model Generator](https://github.com/genbio-ai/
|
93 |
|
94 |
|
95 |
## Citation
|
|
|
50 |
#### Embedding
|
51 |
```python
|
52 |
from genbio_finetune.tasks import Embed
|
53 |
+
model = Embed.from_config({"model.backbone": "dnafm"}).eval()
|
54 |
+
collated_batch = model.collate({"sequences": ["ACGT", "AGCT"]})
|
55 |
embedding = model(collated_batch)
|
56 |
print(embedding.shape)
|
57 |
print(embedding)
|
|
|
60 |
```python
|
61 |
import torch
|
62 |
from genbio_finetune.tasks import SequenceClassification
|
63 |
+
model = SequenceClassification.from_config({"model.backbone": "dnafm", "model.n_classes": 2}).eval()
|
64 |
+
collated_batch = model.collate({"sequences": ["ACGT", "AGCT"]})
|
65 |
logits = model(collated_batch)
|
66 |
print(logits)
|
67 |
print(torch.argmax(logits, dim=-1))
|
|
|
70 |
```python
|
71 |
import torch
|
72 |
from genbio_finetune.tasks import TokenClassification
|
73 |
+
model = TokenClassification.from_config({"model.backbone": "dnafm", "model.n_classes": 3}).eval()
|
74 |
+
collated_batch = model.collate({"sequences": ["ACGT", "AGCT"]})
|
75 |
logits = model(collated_batch)
|
76 |
print(logits)
|
77 |
print(torch.argmax(logits, dim=-1))
|
|
|
79 |
#### Regression
|
80 |
```python
|
81 |
from genbio_finetune.tasks import SequenceRegression
|
82 |
+
model = SequenceRegression.from_config({"model.backbone": "dnafm"}).eval()
|
83 |
+
collated_batch = model.collate({"sequences": ["ACGT", "AGCT"]})
|
84 |
logits = model(collated_batch)
|
85 |
print(logits)
|
86 |
```
|
|
|
89 |
gbft fit --model SequenceClassification --model.backbone dnafm --data SequenceClassification --data.path <hf_or_local_path_to_your_dataset>
|
90 |
gbft test --model SequenceClassification --model.backbone dnafm --data SequenceClassification --data.path <hf_or_local_path_to_your_dataset>
|
91 |
```
|
92 |
+
For more information, visit: [Model Generator](https://github.com/genbio-ai/modelgenerator)
|
93 |
|
94 |
|
95 |
## Citation
|