PuruAI commited on
Commit
3301a66
·
verified ·
1 Parent(s): 7828e8b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -11
app.py CHANGED
@@ -1,20 +1,47 @@
 
1
  from transformers import pipeline
2
  from sentence_transformers import SentenceTransformer
3
- import os
4
 
5
- HF_TOKEN = os.getenv("HF_TOKEN")
 
 
 
6
 
7
- # Embedding model
 
 
 
 
 
8
  try:
9
- embedding_model = SentenceTransformer("sentence-transformers/all-MiniLM-L6-v2", use_auth_token=HF_TOKEN)
 
 
 
 
10
  except Exception as e:
11
- print(f"Failed to load embedding model: {e}")
 
 
 
12
 
13
- # Main model
14
- MODEL_ID = "PuruAI/Medini_Intelligence"
15
- FALLBACK_MODEL = "gpt2"
16
  try:
17
- generator = pipeline("text-generation", model=MODEL_ID, use_auth_token=HF_TOKEN)
 
18
  except Exception as e:
19
- print(f"Failed to load main model, falling back to GPT-2: {e}")
20
- generator = pipeline("text-generation", model=FALLBACK_MODEL)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
  from transformers import pipeline
3
  from sentence_transformers import SentenceTransformer
 
4
 
5
+ # Models
6
+ MAIN_MODEL = "PuruAI/Medini_Intelligence"
7
+ FALLBACK_MODEL = "gpt2"
8
+ EMBEDDING_MODEL = "sentence-transformers/all-MiniLM-L6-v2"
9
 
10
+ # Check if token is set
11
+ HF_TOKEN = os.getenv("HUGGINGFACE_HUB_TOKEN")
12
+ if not HF_TOKEN:
13
+ print("⚠️ Warning: HUGGINGFACE_HUB_TOKEN not set. Private models may fail to load.")
14
+
15
+ # --- Load text generation model ---
16
  try:
17
+ generator = pipeline(
18
+ "text-generation",
19
+ model=MAIN_MODEL
20
+ )
21
+ print(f"✅ Loaded main model: {MAIN_MODEL}")
22
  except Exception as e:
23
+ print(f"Failed to load main model: {e}")
24
+ print(f"⏩ Falling back to {FALLBACK_MODEL}")
25
+ generator = pipeline("text-generation", model=FALLBACK_MODEL)
26
+ print(f"✅ Loaded fallback model: {FALLBACK_MODEL}")
27
 
28
+ # --- Load embedding model ---
 
 
29
  try:
30
+ embedder = SentenceTransformer(EMBEDDING_MODEL)
31
+ print(f"✅ Loaded embedding model: {EMBEDDING_MODEL}")
32
  except Exception as e:
33
+ print(f"Failed to load embedding model: {e}")
34
+ embedder = None # Safe fallback
35
+
36
+ # --- Example usage ---
37
+ prompt = "Once upon a time"
38
+ output = generator(prompt, max_length=50)
39
+ print("\n--- Generated Text ---")
40
+ print(output[0]['generated_text'])
41
+
42
+ if embedder:
43
+ sentences = ["Hello world", "How are you?"]
44
+ embeddings = embedder.encode(sentences)
45
+ print("\n--- Embeddings ---")
46
+ for s, emb in zip(sentences, embeddings):
47
+ print(f"{s}: {emb[:5]}...") # print first 5 values for brevity