Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,14 +1,13 @@
|
|
1 |
-
|
2 |
-
#from google.cloud import aiplatform
|
3 |
-
from transformers import pipeline
|
4 |
-
from google.cloud import aiplatform
|
5 |
import os
|
6 |
import logging
|
7 |
-
import
|
8 |
-
from
|
9 |
-
import
|
10 |
-
|
11 |
-
|
|
|
|
|
12 |
|
13 |
# Set up Google Cloud credentials
|
14 |
def setup_google_cloud_credentials():
|
@@ -19,31 +18,49 @@ def setup_google_cloud_credentials():
|
|
19 |
else:
|
20 |
raise FileNotFoundError(f"Google Cloud credentials file not found at {google_credentials_path}")
|
21 |
|
22 |
-
# Initialize the client
|
23 |
-
def
|
24 |
aiplatform.init(api_key=api_key)
|
25 |
|
26 |
-
# Function to generate
|
27 |
-
def
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
response = aiplatform.Model.predict(
|
29 |
model_name="gemini-1.5-flash",
|
30 |
instances=[{"prompt": prompt}]
|
31 |
)
|
32 |
return response.predictions[0]['content']
|
33 |
|
34 |
-
# Main
|
35 |
-
|
36 |
try:
|
37 |
# Set up Google Cloud credentials
|
38 |
setup_google_cloud_credentials()
|
39 |
|
40 |
-
#
|
41 |
api_key = "AIzaSyD-49IyRzS6Ok_zymcEdv1QADw0rWQJFI4"
|
42 |
-
|
43 |
-
|
44 |
-
|
|
|
|
|
|
|
|
|
|
|
45 |
|
46 |
-
#
|
|
|
|
|
47 |
|
48 |
except Exception as e:
|
49 |
-
|
|
|
|
|
|
|
|
1 |
+
# Import necessary libraries
|
|
|
|
|
|
|
2 |
import os
|
3 |
import logging
|
4 |
+
from transformers import pipeline
|
5 |
+
from google.cloud import aiplatform
|
6 |
+
from google.oauth2 import service_account
|
7 |
+
|
8 |
+
# Set up logging
|
9 |
+
logging.basicConfig(level=logging.INFO)
|
10 |
+
logger = logging.getLogger(__name__)
|
11 |
|
12 |
# Set up Google Cloud credentials
|
13 |
def setup_google_cloud_credentials():
|
|
|
18 |
else:
|
19 |
raise FileNotFoundError(f"Google Cloud credentials file not found at {google_credentials_path}")
|
20 |
|
21 |
+
# Initialize the client for Google Cloud AI Platform
|
22 |
+
def init_google_ai_platform(api_key):
|
23 |
aiplatform.init(api_key=api_key)
|
24 |
|
25 |
+
# Function to generate a response using Hugging Face
|
26 |
+
def generate_hugging_face_response(prompt):
|
27 |
+
# Load a Hugging Face model for text generation
|
28 |
+
generator = pipeline('text-generation', model='gpt2')
|
29 |
+
# Generate text based on the prompt
|
30 |
+
response = generator(prompt, max_length=50, num_return_sequences=1)
|
31 |
+
return response[0]['generated_text']
|
32 |
+
|
33 |
+
# Function to generate a response using Google Cloud AI Platform
|
34 |
+
def generate_google_ai_response(prompt):
|
35 |
response = aiplatform.Model.predict(
|
36 |
model_name="gemini-1.5-flash",
|
37 |
instances=[{"prompt": prompt}]
|
38 |
)
|
39 |
return response.predictions[0]['content']
|
40 |
|
41 |
+
# Main function to run the application
|
42 |
+
def main():
|
43 |
try:
|
44 |
# Set up Google Cloud credentials
|
45 |
setup_google_cloud_credentials()
|
46 |
|
47 |
+
# Initialize Google Cloud AI Platform
|
48 |
api_key = "AIzaSyD-49IyRzS6Ok_zymcEdv1QADw0rWQJFI4"
|
49 |
+
init_google_ai_platform(api_key)
|
50 |
+
|
51 |
+
# Example prompt
|
52 |
+
prompt = "Tell me about heart health."
|
53 |
+
|
54 |
+
# Generate response from Hugging Face
|
55 |
+
hf_response = generate_hugging_face_response(prompt)
|
56 |
+
logger.info(f"Hugging Face Response: {hf_response}")
|
57 |
|
58 |
+
# Generate response from Google Cloud AI Platform
|
59 |
+
google_response = generate_google_ai_response(prompt)
|
60 |
+
logger.info(f"Google AI Response: {google_response}")
|
61 |
|
62 |
except Exception as e:
|
63 |
+
logger.error(f"Error occurred: {e}")
|
64 |
+
|
65 |
+
if __name__ == "__main__":
|
66 |
+
main()
|