acecalisto3 commited on
Commit
62cc7a7
1 Parent(s): f0b67da

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +150 -101
app.py CHANGED
@@ -1,101 +1,150 @@
1
-
2
- from flask import Flask, request, jsonify
3
- import requests
4
- import json
5
-
6
- app = Flask(__name__)
7
-
8
- class HuggingFace3DModels:
9
- def __init__(self, api_token):
10
- self.api_token = api_token
11
- self.api_url = "https://api-inference.huggingface.co/models/"
12
-
13
- def _request_model(self, model_name, inputs):
14
- headers = {
15
- "Authorization": f"Bearer {self.api_token}",
16
- "Content-Type": "application/json"
17
- }
18
- response = requests.post(
19
- f"{self.api_url}{model_name}",
20
- headers=headers,
21
- data=json.dumps({"inputs": inputs})
22
- )
23
- if response.status_code == 200:
24
- return response.json()
25
- else:
26
- raise Exception(f"Error: {response.status_code} - {response.text}")
27
-
28
- def modify_texture(self, texture_type):
29
- return self._request_model("texture-modifier", {"texture": texture_type})
30
-
31
- def adjust_lighting(self, intensity):
32
- return self._request_model("lighting-adjuster", {"intensity": intensity})
33
-
34
- def change_perspective(self, perspective):
35
- return self._request_model("perspective-changer", {"view": perspective})
36
-
37
- def alter_face(self, features):
38
- return self._request_model("face-alteration-model", {"features": features})
39
-
40
- def generate_face(self, characteristics):
41
- return self._request_model("face-generator", {"characteristics": characteristics})
42
-
43
- def convert_text_to_3d_mesh(self, text):
44
- return self._request_model("text-to-3d-mesh", {"text": text})
45
-
46
- def model_3d_object(self, shape):
47
- return self._request_model("3d-object-modeler", {"shape": shape})
48
-
49
- @app.route('/modify_texture', methods=['POST'])
50
- def modify_texture():
51
- data = request.json
52
- texture_type = data.get('texture_type')
53
- result = modeler.modify_texture(texture_type)
54
- return jsonify(result)
55
-
56
- @app.route('/adjust_lighting', methods=['POST'])
57
- def adjust_lighting():
58
- data = request.json
59
- intensity = data.get('intensity')
60
- result = modeler.adjust_lighting(intensity)
61
- return jsonify(result)
62
-
63
- @app.route('/change_perspective', methods=['POST'])
64
- def change_perspective():
65
- data = request.json
66
- perspective = data.get('perspective')
67
- result = modeler.change_perspective(perspective)
68
- return jsonify(result)
69
-
70
- @app.route('/alter_face', methods=['POST'])
71
- def alter_face():
72
- data = request.json
73
- features = data.get('features')
74
- result = modeler.alter_face(features)
75
- return jsonify(result)
76
-
77
- @app.route('/generate_face', methods=['POST'])
78
- def generate_face():
79
- data = request.json
80
- characteristics = data.get('characteristics')
81
- result = modeler.generate_face(characteristics)
82
- return jsonify(result)
83
-
84
- @app.route('/convert_text_to_3d_mesh', methods=['POST'])
85
- def convert_text_to_3d_mesh():
86
- data = request.json
87
- text = data.get('text')
88
- result = modeler.convert_text_to_3d_mesh(text)
89
- return jsonify(result)
90
-
91
- @app.route('/model_3d_object', methods=['POST'])
92
- def model_3d_object():
93
- data = request.json
94
- shape = data.get('shape')
95
- result = modeler.model_3d_object(shape)
96
- return jsonify(result)
97
-
98
- if __name__ == '__main__':
99
- api_token = "YOUR_HUGGING_FACE_API_TOKEN"
100
- modeler = HuggingFace3DModels(api_token)
101
- app.run(host='0.0.0.0', port=5001/5002)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import datetime
2
+ import os
3
+ import csv
4
+ import time
5
+ import hashlib
6
+ import logging
7
+ import gradio as gr
8
+ from selenium import webdriver
9
+ from selenium.webdriver.chrome.service import Service
10
+ from selenium.webdriver.chrome.options import Options
11
+ from webdriver_manager.chrome import ChromeDriverManager
12
+ from huggingface_hub import InferenceClient
13
+ import random
14
+ import yaml
15
+
16
+ # Configure logging
17
+ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
18
+
19
+ # Define constants
20
+ DATE_TIME_STR = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
21
+ PURPOSE = f"You go to Culvers sites, you continuously seek changes on them since your last observation. Anything new that gets logged and dumped into csv, stored in your log folder at user/app/scraped_data."
22
+ HISTORY = []
23
+ CURRENT_TASK = None
24
+ DEFAULT_FILE_PATH = "user/app/scraped_data/culver/culvers_changes.csv"
25
+
26
+ # Ensure the directory exists
27
+ os.makedirs(os.path.dirname(DEFAULT_FILE_PATH), exist_ok=True)
28
+
29
+ # Function to monitor URLs for changes
30
+ def monitor_urls(storage_location, urls, scrape_interval, content_type):
31
+ global HISTORY
32
+ previous_hashes = [""] * len(urls)
33
+
34
+ try:
35
+ with webdriver.Chrome(service=Service(ChromeDriverManager().install()), options=Options()) as driver:
36
+ while True:
37
+ for i, url in enumerate(urls):
38
+ try:
39
+ driver.get(url)
40
+ time.sleep(2) # Wait for the page to load
41
+ if content_type == "text":
42
+ current_content = driver.page_source
43
+ elif content_type == "media":
44
+ current_content = driver.find_elements_by_tag_name("img")
45
+ else:
46
+ current_content = driver.page_source
47
+ current_hash = hashlib.md5(str(current_content).encode('utf-8')).hexdigest()
48
+ if current_hash != previous_hashes[i]:
49
+ previous_hashes[i] = current_hash
50
+ date_time_str = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
51
+ HISTORY.append(f"Change detected at {url} on {date_time_str}")
52
+ with open(storage_location, "a", newline="") as csvfile:
53
+ csv_writer = csv.DictWriter(csvfile, fieldnames=["date", "time", "url", "change"])
54
+ csv_writer.writerow({"date": date_time_str.split()[0], "time": date_time_str.split()[1], "url": url, "change": "Content changed"})
55
+ logging.info(f"Change detected at {url} on {date_time_str}")
56
+ except Exception as e:
57
+ logging.error(f"Error accessing {url}: {e}")
58
+ time.sleep(scrape_interval * 60) # Check every scrape_interval minutes
59
+ except Exception as e:
60
+ logging.error(f"Error starting ChromeDriver: {e}")
61
+
62
+ # Define main function to handle user input
63
+ def handle_input(storage_location, urls, scrape_interval, content_type):
64
+ global CURRENT_TASK, HISTORY
65
+
66
+ CURRENT_TASK = f"Monitoring URLs: {', '.join(urls)}"
67
+ HISTORY.append(f"Task started: {CURRENT_TASK}")
68
+ monitor_urls(storage_location, urls, scrape_interval, content_type)
69
+ return TASK_PROMPT.format(task=CURRENT_TASK, history="\n".join(map(str, HISTORY)))
70
+
71
+ # Load custom prompts
72
+ try:
73
+ with open("custom_prompts.yaml", "r") as fp:
74
+ custom_prompts = yaml.safe_load(fp)
75
+ except FileNotFoundError:
76
+ custom_prompts = {"WEB_DEV": "", "AI_SYSTEM_PROMPT": "", "PYTHON_CODE_DEV": "", "CODE_GENERATION": "", "CODE_INTERPRETATION": "", "CODE_TRANSLATION": "", "CODE_IMPLEMENTATION": ""}
77
+
78
+ # Define agents
79
+ AGENTS = ["WEB_DEV", "AI_SYSTEM_PROMPT", "PYTHON_CODE_DEV", "CODE_GENERATION", "CODE_INTERPRETATION", "CODE_TRANSLATION", "CODE_IMPLEMENTATION"]
80
+
81
+ # Define the Mistral inference client
82
+ client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
83
+
84
+ # Define the chat response function
85
+ def respond(message, history, system_message, max_tokens, temperature, top_p):
86
+ return generate(message, history, system_message, max_tokens, temperature, top_p)
87
+
88
+ def start_scraping(storage_location, url1, url2, url3, url4, url5, url6, url7, url8, url9, url10, scrape_interval, content_type):
89
+ urls = [url for url in [url1, url2, url3, url4, url5, url6, url7, url8, url9, url10] if url]
90
+ handle_input(storage_location, urls, scrape_interval, content_type)
91
+ # Start transaction
92
+ inspector.start_transaction('start_scraping')
93
+ # Scrape data
94
+ while True:
95
+ # Check for scrape_interval
96
+ time.sleep(scrape_interval * 60) # Check every scrape_interval minutes
97
+ # End transaction
98
+ inspector.end_transaction()
99
+ return f"Started scraping {', '.join(urls)} every {scrape_interval} minutes."
100
+
101
+ # Function to display CSV content
102
+ def display_csv(storage_location):
103
+ if os.path.exists(storage_location):
104
+ with open(storage_location, "r") as file:
105
+ return file.read()
106
+ else:
107
+ return "No data available."
108
+
109
+ # Create Gradio interface
110
+ def chat_interface(message, system_message, max_tokens, temperature, top_p, storage_location, url1, url2, url3, url4, url5, url6, url7, url8, url9, url10, scrape_interval, content_type):
111
+ global HISTORY
112
+ response = respond(message, HISTORY, system_message, max_tokens, temperature, top_p)
113
+ HISTORY.append((message, response))
114
+ return HISTORY, ""
115
+
116
+ demo = gr.Blocks()
117
+
118
+ with demo:
119
+ with gr.Row():
120
+ with gr.Column():
121
+ message = gr.Textbox(label="Message")
122
+ system_message = gr.Textbox(value="You are a friendly Chatbot.", label="System message")
123
+ max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
124
+ temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
125
+ top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
126
+ storage_location = gr.Textbox(value=DEFAULT_FILE_PATH, label="Storage Location")
127
+ url1 = gr.Textbox(value="https://www.culver.k12.in/", label="URL 1")
128
+ url2 = gr.Textbox(value="https://www.facebook.com/CulverCommunitySchools", label="URL 2")
129
+ url3 = gr.Textbox(label="URL 3")
130
+ url4 = gr.Textbox(label="URL 4")
131
+ url5 = gr.Textbox(label="URL 5")
132
+ url6 = gr.Textbox(label="URL 6")
133
+ url7 = gr.Textbox(label="URL 7")
134
+ url8 = gr.Textbox(label="URL 8")
135
+ url9 = gr.Textbox(label="URL 9")
136
+ url10 = gr.Textbox(label="URL 10")
137
+ scrape_interval = gr.Slider(minimum=1, maximum=60, value=5, step=1, label="Scrape Interval (minutes)")
138
+ content_type = gr.Radio(choices=["text", "media", "both"], value="text", label="Content Type")
139
+ start_button = gr.Button("Start Scraping")
140
+ csv_output = gr.Textbox(label="CSV Output", interactive=False)
141
+
142
+ with gr.Column():
143
+ chat_history = gr.Chatbot(label="Chat History")
144
+ response_box = gr.Textbox(label="Response")
145
+
146
+ start_button.click(start_scraping, inputs=[storage_location, url1, url2, url3, url4, url5, url6, url7, url8, url9, url10, scrape_interval, content_type], outputs=csv_output)
147
+ message.submit(chat_interface, inputs=[message, system_message, max_tokens, temperature, top_p, storage_location, url1, url2, url3, url4, url5, url6, url7, url8, url9, url10, scrape_interval, content_type], outputs=[chat_history, response_box])
148
+
149
+ if __name__ == "__main__":
150
+ demo.launch()