PuruAI commited on
Commit
e69b959
·
verified ·
1 Parent(s): 4f72f98

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -36
app.py CHANGED
@@ -1,44 +1,34 @@
1
  import os
2
  from transformers import pipeline
3
- from sentence_transformers import SentenceTransformer
4
 
5
- # --- Hugging Face Models ---
6
- Medini_AI = "PuruAI/Medini_Intelligence" # Main model (variable name cleaned)
7
- FALLBACK_MODEL = "gpt2" # Fallback model
8
- EMBEDDING_MODEL = "sentence-transformers/all-MiniLM-L6-v2"
9
 
10
- # --- Hugging Face Token from Environment ---
11
- HF_TOKEN = os.getenv("HUGGINGFACE_HUB_TOKEN")
12
- if not HF_TOKEN:
13
- print("⚠️ Warning: HUGGINGFACE_HUB_TOKEN not set. Private models may fail to load.")
14
 
15
- # --- Load Main Text Generation Model ---
16
- try:
17
- generator = pipeline("text-generation", model=Medini_AI)
18
- print(f"✅ Loaded main model: Medini_AI ({Medini_AI})")
19
- except Exception as e:
20
- print(f"❌ Failed to load Medini_AI: {e}")
21
- print(f"⏩ Falling back to {FALLBACK_MODEL}")
22
- generator = pipeline("text-generation", model=FALLBACK_MODEL)
23
- print(f"✅ Loaded fallback model: {FALLBACK_MODEL}")
24
 
25
- # --- Load Embedding Model ---
26
- try:
27
- embedder = SentenceTransformer(EMBEDDING_MODEL)
28
- print(f" Loaded embedding model: {EMBEDDING_MODEL}")
29
- except Exception as e:
30
- print(f" Failed to load embedding model: {e}")
31
- embedder = None # Safe fallback
 
 
 
 
 
32
 
33
- # --- Example Usage ---
34
- prompt = "Once upon a time"
35
- output = generator(prompt, max_length=50)
36
- print("\n--- Generated Text ---")
37
- print(output[0]['generated_text'])
38
 
39
- if embedder:
40
- sentences = ["Hello world", "How are you?"]
41
- embeddings = embedder.encode(sentences)
42
- print("\n--- Embeddings ---")
43
- for s, emb in zip(sentences, embeddings):
44
- print(f"{s}: {emb[:5]}...") # print first 5 values for brevity
 
1
  import os
2
  from transformers import pipeline
 
3
 
4
+ # Set Hugging Face token (replace with your token)
5
+ # Option 1: Directly in script (works locally)
6
+ os.environ["HUGGINGFACE_HUB_TOKEN"] = "YOUR_HUGGINGFACE_TOKEN_HERE"
 
7
 
8
+ # Option 2: If running in a container, make sure HUGGINGFACE_HUB_TOKEN
9
+ # is set as an environment variable externally (Docker, cloud, etc.)
 
 
10
 
11
+ # Model names
12
+ MEDINI_MODEL = "PuruAI/Medini_Intelligence"
13
+ FALLBACK_MODEL = "gpt2"
 
 
 
 
 
 
14
 
15
+ # Function to safely load a model
16
+ def load_model(model_name, fallback_model):
17
+ try:
18
+ print(f"Loading model: {model_name}")
19
+ generator = pipeline("text-generation", model=model_name)
20
+ print(" Model loaded successfully!")
21
+ return generator
22
+ except Exception as e:
23
+ print(f"⚠️ Failed to load {model_name}: {e}")
24
+ print(f"⏩ Falling back to {fallback_model}")
25
+ generator = pipeline("text-generation", model=fallback_model)
26
+ return generator
27
 
28
+ # Load Medini model or fallback
29
+ generator = load_model(MEDINI_MODEL, FALLBACK_MODEL)
 
 
 
30
 
31
+ # Example usage
32
+ prompt = "Hello Medini AI, can you explain AI agents?"
33
+ result = generator(prompt, max_length=100, do_sample=True)
34
+ print(result)