acecalisto3 commited on
Commit
83c0150
1 Parent(s): 34ee580

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +304 -48
app.py CHANGED
@@ -1,71 +1,327 @@
 
1
  import os
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  import gradio as gr
3
- import transformers
4
- import blackboxai
5
 
6
- # Set up the Hugging Face Transformers library
7
- model_name = "bert-base-uncased"
8
- tokenizer = transformers.AutoTokenizer.from_pretrained(model_name)
9
- model = transformers.AutoModel.from_pretrained(model_name)
10
 
11
- # Set up the Blackbox.ai API client
12
- blackbox_client.Client.from_api_key(os.environ["BLACKBOX_API_KEY"])
 
 
 
13
 
14
- # Define the user interface for the app
15
- def run_model(input_text):
16
- # Tokenize the input text
17
- inputs = tokenizer(input_text, return_tensors="pt")
18
 
19
- # Run the model on the inputs
20
- outputs = model(**inputs)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
 
22
- # Extract the last hidden state from the model outputs
23
- last_hidden_states = outputs.last_hidden_state
 
 
24
 
25
- # Return the last hidden state as a string
26
- return last_hidden_states.detach().numpy().tolist()
27
 
28
- iface = gr.Interface(fn=run_model, inputs="text", outputs="text")
 
29
 
30
- # Define the GitHub bot functions
31
- def get_issues():
32
- # Code to get issues from the GitHub repository
33
- pass
 
 
 
 
 
 
 
 
 
34
 
35
- def fix_issue(issue):
36
- # Code to fix the issue on the local fork
37
- pass
38
 
39
- def push_fix():
40
- # Code to push the fix to the GitHub repository
41
- pass
42
 
43
- def comment_on_issue(issue, result):
44
- # Code to comment on the issue with the result
45
- pass
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46
 
47
- # Define the main function to run the app and the bot
48
  def main():
49
- # Run the app
50
- iface.launch()
 
 
 
 
 
 
 
 
 
 
 
 
51
 
52
- # Get the issues from the GitHub repository
53
- issues = get_issues()
 
 
 
54
 
55
- # Loop through the issues
56
- for issue in issues:
57
- # Fix the issue on the local fork
58
- fixed_issue = fix_issue(issue)
59
 
60
- # Run the model on the fixed issue
61
- result = run_model(fixed_issue)
 
 
 
 
 
62
 
63
- # Push the fix to the GitHub repository
64
- push_fix()
 
 
65
 
66
- # Comment on the issue with the result
67
- comment_on_issue(issue, result)
68
 
69
- # Run the main function
70
  if __name__ == "__main__":
71
  main()
 
1
+ import argparse
2
  import os
3
+ import re
4
+ import subprocess
5
+ import sys
6
+ import zipfile
7
+ import random
8
+ import string
9
+ import shutil
10
+ import io
11
+ import webbrowser
12
+ from typing import List, Any, Dict, Union
13
+ from concurrent.futures import ThreadPoolExecutor, as_completed
14
+ import logging
15
+ import aiohttp
16
+ import asyncio
17
+ import hashlib
18
  import gradio as gr
19
+ from transformers import AutoTokenizer, pipeline
 
20
 
21
+ # Configure logging
22
+ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
 
 
23
 
24
+ # Updated regex using lookahead assertion
25
+ OPENAI_REGEX = re.compile(r'(?=import\s+)(?:openai|openai_.*)')
26
+ GITIGNORE_CONTENT = '# Converted files will appear here\n'
27
+ WEBAPP_DIRNAME = 'webapp'
28
+ CACHE_DIR = './cache'
29
 
30
+ class OpenAIWrapper:
31
+ # Implement OpenAI interaction methods here
32
+ pass
 
33
 
34
+ async def download_file(url: str) -> bytes:
35
+ """Downloads a file asynchronously with retries and returns its content."""
36
+ logging.info(f"Downloading content from {url}...")
37
+ async with aiohttp.ClientSession() as session:
38
+ for attempt in range(3):
39
+ try:
40
+ async with session.get(url) as response:
41
+ if response.status == 200:
42
+ logging.info("Download complete!")
43
+ return await response.read()
44
+ else:
45
+ logging.error(f"Failed to download content from {url} (status code: {response.status})")
46
+ except aiohttp.ClientError as e:
47
+ logging.error(f"Error downloading {url}: {e}")
48
+ await asyncio.sleep(2 ** attempt) # Exponential backoff
49
+ return None
50
 
51
+ def get_cache_path(url: str) -> str:
52
+ """Returns a unique cache path based on the URL."""
53
+ hash_digest = hashlib.md5(url.encode()).hexdigest()
54
+ return os.path.join(CACHE_DIR, hash_digest)
55
 
56
+ class DownloadItemTask:
57
+ """Class responsible for fetching remote content"""
58
 
59
+ def __init__(self, url: str):
60
+ self.url = url
61
 
62
+ async def download(self) -> bytes:
63
+ """Attempts to download the file using the download_file function"""
64
+ cache_path = get_cache_path(self.url)
65
+ if os.path.exists(cache_path):
66
+ logging.info(f"Using cached file for {self.url}")
67
+ with open(cache_path, 'rb') as f:
68
+ return f.read()
69
+ data = await download_file(self.url)
70
+ if data:
71
+ os.makedirs(CACHE_DIR, exist_ok=True)
72
+ with open(cache_path, 'wb') as f:
73
+ f.write(data)
74
+ return data
75
 
76
+ class UnarchiveTask:
77
+ """Utility class dealing with archives such as .zip or tarballs"""
 
78
 
79
+ def __init__(self, data: bytes):
80
+ self.data = data
 
81
 
82
+ def unarchive(self) -> str:
83
+ """Unpacks and returns root directory holding contents"""
84
+ logging.info("Unarchiving downloaded file...")
85
+ extracted_dir = os.path.join(CACHE_DIR, ''.join(random.choices(string.ascii_uppercase + string.digits, k=10)))
86
+ os.makedirs(extracted_dir, exist_ok=True)
87
+
88
+ try:
89
+ if sys.platform == 'darwin' or sys.platform.startswith('linux'):
90
+ with open(os.path.join(extracted_dir, 'archive.tar'), 'wb') as f:
91
+ f.write(self.data)
92
+ subprocess.run(['tar', '-xf', 'archive.tar', '-C', extracted_dir], check=True)
93
+ else:
94
+ with zipfile.ZipFile(io.BytesIO(self.data), 'r') as zip_ref:
95
+ zip_ref.extractall(extracted_dir)
96
+ logging.info("Unarchiving complete!")
97
+ except Exception as e:
98
+ logging.error(f"Error unarchiving file: {e}")
99
+ return None
100
+
101
+ return extracted_dir
102
+
103
+ class DependencyFinderTask:
104
+ """Scans project structure searching for specific dependencies to be swapped"""
105
+
106
+ DEPENDENCY_REGEXES = {
107
+ 'openai': OPENAI_REGEX,
108
+ }
109
+
110
+ def __init__(self):
111
+ self.found_paths = {'openai': set()}
112
+ self.has_openai_dep = False
113
+
114
+ def find_dependencies(self, dir_path: str):
115
+ """Recursively searches through directories looking for dependencies"""
116
+ logging.info("Searching for dependencies...")
117
+
118
+ for current_root, _, filenames in os.walk(dir_path):
119
+ for filename in filenames:
120
+ full_path = os.path.join(current_root, filename)
121
+ try:
122
+ with open(full_path, mode='rt', encoding='utf-8') as f:
123
+ content = f.read()
124
+
125
+ for dep, regex in self.DEPENDENCY_REGEXES.items():
126
+ if regex.search(content):
127
+ self.found_paths[dep].add(full_path)
128
+ self.has_openai_dep = True
129
+ except Exception as e:
130
+ logging.error(f"Error reading file {full_path}: {e}")
131
+
132
+ logging.info("Dependency search complete!")
133
+ return self
134
+
135
+ class DependencyReplacerTask:
136
+ """
137
+ Replaces specified dependencies with their corresponding wrapper methods
138
+ Also, provides a method to save and load the dependency mapping to improve efficiency
139
+ """
140
+
141
+ def __init__(self, finder: DependencyFinderTask, pipeline: Any, wrapper: Any):
142
+ self.finder = finder
143
+ self.pipeline = pipeline
144
+ self.wrapper = wrapper
145
+ self.num_changed_files = 0
146
+
147
+ def replace(self):
148
+ """Replaces the dependencies in the specified files"""
149
+ logging.info("Replacing dependencies...")
150
+
151
+ for dep, paths in self.finder.found_paths.items():
152
+ if dep == 'openai':
153
+ for path in paths:
154
+ try:
155
+ with open(path, mode='rt', encoding='utf-8') as f:
156
+ content = f.read()
157
+
158
+ replaced_content = content.replace('openai.', 'self.pipeline.')
159
+ with open(path, mode='wt', encoding='utf-8') as f:
160
+ f.write(replaced_content)
161
+
162
+ self.num_changed_files += 1
163
+ except Exception as e:
164
+ logging.error(f"Error replacing dependency in file {path}: {e}")
165
+
166
+ logging.info("Dependency replacement complete!")
167
+ return self
168
+
169
+ def save_mapping(self, mapping_file: str):
170
+ """Saves the dependency mapping to a file for future use"""
171
+ with open(mapping_file, 'w') as f:
172
+ for dep, paths in self.finder.found_paths.items():
173
+ f.write(f"{dep}: {','.join(paths)}\n")
174
+
175
+ def load_mapping(self, mapping_file: str):
176
+ """Loads the dependency mapping from a file for future use"""
177
+ with open(mapping_file, 'r') as f:
178
+ lines = f.readlines()
179
+
180
+ for line in lines:
181
+ dep, paths = line.strip().split(': ')
182
+ self.finder.found_paths[dep] = {path.strip() for path in paths.split(',')}
183
+
184
+ class WebAppCreatorTask:
185
+ """Creates a web app directory and copies converted files to it"""
186
+
187
+ def __init__(self, webapp_dirname: str, unarchived_dir: str):
188
+ self.webapp_dirname = webapp_dirname
189
+ self.unarchived_dir = unarchived_dir
190
+
191
+ def create(self) -> bool:
192
+ """Creates a web app directory and copies converted files to it"""
193
+ logging.info("Creating web app directory...")
194
+
195
+ webapp_dir = os.path.join(self.unarchived_dir, self.webapp_dirname)
196
+ os.makedirs(webapp_dir, exist_ok=True)
197
+
198
+ try:
199
+ for root, _, files in os.walk(self.unarchived_dir):
200
+ for file in files:
201
+ if not file.endswith('.html'):
202
+ continue
203
+
204
+ src_path = os.path.join(root, file)
205
+ dest_path = os.path.join(webapp_dir, file)
206
+ shutil.copy2(src_path, dest_path)
207
+ logging.info("Web app directory creation complete!")
208
+ except Exception as e:
209
+ logging.error(f"Error creating web app directory: {e}")
210
+ return False
211
+
212
+ return os.path.exists(webapp_dir)
213
+
214
+ class DeploymentTask:
215
+ """Class responsible for deploying the web application"""
216
+
217
+ def __init__(self, webapp_dir: str, api_key: str):
218
+ self.webapp_dir = webapp_dir
219
+ self.api_key = api_key
220
+ self.success = False
221
+
222
+ def deploy(self):
223
+ """Deploys the web application using the specified API key"""
224
+ logging.info("Deploying web application...")
225
+
226
+ try:
227
+ # Deployment logic here
228
+ self.success = True
229
+ logging.info("Deployment complete!")
230
+ except Exception as e:
231
+ logging.error(f"Error during deployment: {e}")
232
+ self.success = False
233
+ return self
234
+
235
+ def process_file(file_path: str, api_key: str, action: str, dependency_mapping: Dict[str, List[str]]):
236
+ logging.info(f'\nProcessing local file: {file_path}')
237
+
238
+ with open(file_path, 'rb') as f:
239
+ downloaded_file = f.read()
240
+
241
+ unarchived_dir = UnarchiveTask(downloaded_file).unarchive()
242
+ if not unarchived_dir:
243
+ logging.error("Unarchiving failed! Proceeding to next URL...")
244
+ return
245
+ os.chdir(unarchived_dir)
246
+
247
+ tokenizer = AutoTokenizer.from_pretrained('ELECTRA-base-discriminator')
248
+ pipe = pipeline('text-generation', model='ELECTRA-base-discriminator', tokenizer=tokenizer)
249
+
250
+ finder = DependencyFinderTask().find_dependencies(os.curdir)
251
+
252
+ # Load dependency mapping if provided
253
+ if dependency_mapping:
254
+ finder.load_mapping(dependency_mapping)
255
+
256
+ if finder.has_openai_dep:
257
+ replacer = DependencyReplacerTask(finder, pipe, OpenAIWrapper()).replace()
258
+ replacer.save_mapping(dependency_mapping)
259
+
260
+ created_webapp = WebAppCreatorTask(WEBAPP_DIRNAME, unarchived_dir).create()
261
+ os.chdir('..')
262
+
263
+ if action == 'upload':
264
+ deploy_task = DeploymentTask(created_webapp, api_key).deploy()
265
+ if not deploy_task.success:
266
+ logging.error("Deployment failed! Continuing to next URL...")
267
+
268
+ logging.info(f"Successfully processed local file: {file_path}")
269
+
270
+ def process_urls(urls: List[str], api_key: str, action: str, dependency_mapping: Dict[str, List[str]]):
271
+ async def process_url_task(url: str):
272
+ file_data = await DownloadItemTask(url).download()
273
+ if file_data:
274
+ unarchived_dir = UnarchiveTask(file_data).unarchive()
275
+ if unarchived_dir:
276
+ process_file(unarchived_dir, api_key, action, dependency_mapping)
277
+ os.chdir('..')
278
+
279
+ with ThreadPoolExecutor() as executor:
280
+ futures = {executor.submit(process_url_task, url) for url in urls}
281
+ for future in as_completed(futures):
282
+ future.result()
283
 
 
284
  def main():
285
+ parser = argparse.ArgumentParser()
286
+ parser.add_argument('--api-key', '-a', type=str, help='Hugging Face API Key')
287
+ parser.add_argument('--action', '-t', type=str, choices=['convert', 'upload'], help='Action to perform')
288
+ parser.add_argument('--dependency-mapping', '-d', type=str, help='Dependency mapping file path')
289
+
290
+ args = parser.parse_args()
291
+
292
+ if not args.api_key:
293
+ print("Please provide an API key using --api-key flag.")
294
+ sys.exit(1)
295
+
296
+ if not args.action:
297
+ print("Please provide an action to perform using --action flag.")
298
+ sys.exit(1)
299
 
300
+ dependency_mapping = {}
301
+ if args.dependency_mapping:
302
+ if not os.path.exists(args.dependency_mapping):
303
+ print(f"Dependency mapping file '{args.dependency_mapping}' does not exist.")
304
+ sys.exit(1)
305
 
306
+ with open(args.dependency_mapping, 'r') as f:
307
+ for line in f:
308
+ dep, paths = line.strip().split(': ')
309
+ dependency_mapping[dep] = [path.strip() for path in paths.split(',')]
310
 
311
+ iface = gr.Interface(
312
+ fn=lambda x: None,
313
+ inputs=gr.inputs.Textbox(label="URLs (comma-separated)"),
314
+ outputs="text",
315
+ title="Project Converter and Uploader",
316
+ description="Convert and upload projects to Hugging Face Spaces."
317
+ )
318
 
319
+ def process_urls_and_open_browser(urls: str):
320
+ urls_list = [url.strip() for url in urls.split(',')]
321
+ process_urls(urls_list, args.api_key, args.action, dependency_mapping)
322
+ webbrowser.open("http://localhost:7860")
323
 
324
+ iface.launch(process_urls_and_open_browser)
 
325
 
 
326
  if __name__ == "__main__":
327
  main()