Update README.md
Browse files
README.md
CHANGED
@@ -6,7 +6,7 @@ tags:
|
|
6 |
# What does this model do?
|
7 |
This model converts the natural language input to Kusto (KQL) query. It is a fine-tuned CodeT5+ 220M. This model is a part of nl2query repository which is present at https://github.com/Chirayu-Tripathi/nl2query
|
8 |
|
9 |
-
You can use this model via the github repository or via following code.
|
10 |
|
11 |
```python
|
12 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
@@ -15,6 +15,9 @@ model = AutoModelForSeq2SeqLM.from_pretrained("Chirayu/nl2kql")
|
|
15 |
tokenizer = AutoTokenizer.from_pretrained("Chirayu/nl2kql")
|
16 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
17 |
model = model.to(device)
|
|
|
|
|
|
|
18 |
def generate_query(
|
19 |
textual_query: str,
|
20 |
num_beams: int = 10,
|
|
|
6 |
# What does this model do?
|
7 |
This model converts the natural language input to Kusto (KQL) query. It is a fine-tuned CodeT5+ 220M. This model is a part of nl2query repository which is present at https://github.com/Chirayu-Tripathi/nl2query
|
8 |
|
9 |
+
You can use this model via the github repository or via following code. More information can be found on the repository.
|
10 |
|
11 |
```python
|
12 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
|
|
15 |
tokenizer = AutoTokenizer.from_pretrained("Chirayu/nl2kql")
|
16 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
17 |
model = model.to(device)
|
18 |
+
|
19 |
+
textual_query = '''kusto: find the session ids which have duration greater than 10 and having Manoj Raheja as the owner | conferencesessions : conference, sessionid, session_title, session_type, owner, participants, URL, level, session_location, starttime, duration, time_and_duration, kusto_affinity'''
|
20 |
+
|
21 |
def generate_query(
|
22 |
textual_query: str,
|
23 |
num_beams: int = 10,
|