compute_pool / app.py
Oscar Wang
Update app.py
36ca81e verified
raw
history blame
4.14 kB
import os
import tempfile
import shutil
from zipfile import ZipFile
import logging
import psutil
from flask import Flask, request, jsonify, render_template, send_file
import multiprocessing
import subprocess
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Initialize Flask app
app = Flask(__name__)
connected_cpus = {"localhost": {"cpu_count": psutil.cpu_count(logical=False), "usage": 0.0}}
# Define the target function for multiprocessing
def target_function(script_path, folder_path):
output_log = tempfile.TemporaryFile(mode='w+t')
try:
result = subprocess.run(['python', script_path], cwd=folder_path, stdout=output_log, stderr=subprocess.STDOUT)
output_log.seek(0)
log_output = output_log.read()
except Exception as e:
log_output = str(e)
finally:
output_log.close()
return log_output
# Endpoint to handle file uploads and script execution
@app.route('/upload', methods=['POST'])
def handle_upload():
try:
if 'file' not in request.files or 'script_content' not in request.form:
return jsonify({"status": "error", "message": "File or script content not provided"}), 400
files = request.files.getlist('file')
script_content = request.form['script_content']
# Create a temporary directory to store uploaded files
temp_dir = tempfile.mkdtemp()
# Save the uploaded files to the temporary directory
folder_path = os.path.join(temp_dir, 'uploaded_folder')
os.makedirs(folder_path, exist_ok=True)
for file_obj in files:
file_path = os.path.join(folder_path, file_obj.filename)
file_obj.save(file_path)
# Save the script content to a file
script_path = os.path.join(folder_path, 'user_script.py')
with open(script_path, 'w') as script_file:
script_file.write(script_content)
# Run the script using multiprocessing
log_output = run_script(script_path, folder_path)
# Create a zip file of the entire folder
zip_path = os.path.join(temp_dir, 'output_folder.zip')
with ZipFile(zip_path, 'w') as zipf:
for root, _, files in os.walk(folder_path):
for file in files:
zipf.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), folder_path))
return jsonify({"status": "success", "log_output": log_output, "download_url": f"/download/{os.path.basename(zip_path)}"})
except Exception as e:
logger.error(f"Error in handle_upload: {e}")
return jsonify({"status": "error", "message": str(e)}), 500
@app.route('/download/<filename>')
def download_file(filename):
try:
return send_file(os.path.join(tempfile.gettempdir(), filename), as_attachment=True)
except Exception as e:
logger.error(f"Error in download_file: {e}")
return jsonify({"status": "error", "message": str(e)}), 500
# Endpoint to get connected CPUs information
@app.route('/cpu_info', methods=['GET'])
def get_cpu_info():
try:
info = []
for host, data in connected_cpus.items():
info.append(f"{host}: {data['cpu_count']} CPUs, {data['usage']}% usage")
return jsonify({"status": "success", "cpu_info": "\n".join(info)})
except Exception as e:
logger.error(f"Error in get_cpu_info: {e}")
return jsonify({"status": "error", "message": str(e)}), 500
# Main interface
@app.route('/')
def index():
return render_template('index.html')
def run_script(script_path, folder_path):
# Collect all available CPUs including the local host CPU
total_cpus = sum(cpu['cpu_count'] for cpu in connected_cpus.values()) + 1
# Use multiprocessing to run the script
with multiprocessing.Pool(total_cpus) as pool:
log_outputs = pool.starmap(target_function, [(script_path, folder_path)] * total_cpus)
return '\n'.join(log_outputs)
if __name__ == "__main__":
app.run(host='0.0.0.0', port=7860, threaded=True)