Shankarm08 commited on
Commit
fba67f5
1 Parent(s): f1cfe0e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -19
app.py CHANGED
@@ -1,23 +1,10 @@
1
- from transformers import AutoTokenizer, AutoModelForCausalLM
2
- from fastapi import FastAPI, Body
3
-
4
- import huggingface_hub
5
 
6
- huggingface_hub.login(username="shankarm08", password="cskrockz123")
 
7
 
8
- app = FastAPI()
 
9
 
10
- # Load pre-trained model and tokenizer
11
  tokenizer = AutoTokenizer.from_pretrained("meta-llama/Meta-Llama-3-8B")
12
- model = AutoModelForCausalLM.from_pretrained("meta-llama/Meta-Llama-3-8B")
13
-
14
- @app.post("/generate")
15
- async def generate_text(input_text: str = Body(..., embed=True)):
16
- input_ids = tokenizer.encode(input_text, return_tensors='pt')
17
- output = model.generate(input_ids, max_length=50, num_beams=4)
18
- generated_text = tokenizer.decode(output[0], skip_special_tokens=True)
19
- return {"generated_text": generated_text}
20
-
21
- if __name__ == "__main__":
22
- import uvicorn
23
- uvicorn.run(app, host="0.0.0.0", port=8000)
 
1
+ import hf_api
 
 
 
2
 
3
+ # Authenticate with Hugging Face
4
+ hf_api.set_access_token(None) # Use the default access token
5
 
6
+ # Load model directly
7
+ from transformers import AutoTokenizer, AutoModelForCausalLM
8
 
 
9
  tokenizer = AutoTokenizer.from_pretrained("meta-llama/Meta-Llama-3-8B")
10
+ model = AutoModelForCausalLM.from_pretrained("meta-llama/Meta-Llama-3-8B")