ataberkd's picture
Create README.md
24dbd83 verified
|
raw
history blame
1.2 kB
metadata
license: apache-2.0
datasets:
  - kaxap/llama2-sql-instruct-sys-prompt
pipeline_tag: text-generation

💻 Usage

# pip install transformers accelerate

from transformers import AutoTokenizer
import transformers
import torch

model = "ataberkd/llama-2-7b-SQL_FINETUNED_10K"
prompt = 'You are an expert in SQL and data analysis. Given the table structure described by the CREATE TABLE statement, write an SQL query that provides the solution to the question and give the explanation of result your giving. CREATE TABLE statement: CREATE TABLE "user" ( "name" text, "surname" text, "tel" text, "address" text, "performanceScore" text,"Age" text, "Language" text );. Question: "Can you bring users who speak French and are greater than 20 years old?"'

tokenizer = AutoTokenizer.from_pretrained(model)
pipeline = transformers.pipeline(
    "text-generation",
    model=model,
    torch_dtype=torch.float16,
    device_map="auto",
)

sequences = pipeline(
    f'<s>[INST] {prompt} [/INST]',
    do_sample=True,
    top_k=10,
    num_return_sequences=1,
    eos_token_id=tokenizer.eos_token_id,
    max_length=200,
)
for seq in sequences:
    print(f"Result: {seq['generated_text']}")