File size: 4,137 Bytes
d97f2ec
 
 
 
 
 
a076bea
36ca81e
 
d97f2ec
 
 
 
 
9c660b1
 
 
9e95bc6
d97f2ec
36ca81e
 
 
 
 
 
 
 
 
 
 
 
d97f2ec
36ca81e
a076bea
 
36ca81e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d97f2ec
36ca81e
 
 
a076bea
 
 
36ca81e
 
 
 
 
d97f2ec
a076bea
 
d97f2ec
36ca81e
 
 
 
 
 
 
 
d97f2ec
a076bea
 
 
 
bc57054
36ca81e
 
 
 
 
 
 
 
 
 
b9c6f79
a076bea
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
import os
import tempfile
import shutil
from zipfile import ZipFile
import logging
import psutil
from flask import Flask, request, jsonify, render_template, send_file
import multiprocessing
import subprocess

# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

# Initialize Flask app
app = Flask(__name__)

connected_cpus = {"localhost": {"cpu_count": psutil.cpu_count(logical=False), "usage": 0.0}}

# Define the target function for multiprocessing
def target_function(script_path, folder_path):
    output_log = tempfile.TemporaryFile(mode='w+t')
    try:
        result = subprocess.run(['python', script_path], cwd=folder_path, stdout=output_log, stderr=subprocess.STDOUT)
        output_log.seek(0)
        log_output = output_log.read()
    except Exception as e:
        log_output = str(e)
    finally:
        output_log.close()
    return log_output

# Endpoint to handle file uploads and script execution
@app.route('/upload', methods=['POST'])
def handle_upload():
    try:
        if 'file' not in request.files or 'script_content' not in request.form:
            return jsonify({"status": "error", "message": "File or script content not provided"}), 400
        
        files = request.files.getlist('file')
        script_content = request.form['script_content']
        
        # Create a temporary directory to store uploaded files
        temp_dir = tempfile.mkdtemp()
        
        # Save the uploaded files to the temporary directory
        folder_path = os.path.join(temp_dir, 'uploaded_folder')
        os.makedirs(folder_path, exist_ok=True)
        for file_obj in files:
            file_path = os.path.join(folder_path, file_obj.filename)
            file_obj.save(file_path)
        
        # Save the script content to a file
        script_path = os.path.join(folder_path, 'user_script.py')
        with open(script_path, 'w') as script_file:
            script_file.write(script_content)
        
        # Run the script using multiprocessing
        log_output = run_script(script_path, folder_path)
        
        # Create a zip file of the entire folder
        zip_path = os.path.join(temp_dir, 'output_folder.zip')
        with ZipFile(zip_path, 'w') as zipf:
            for root, _, files in os.walk(folder_path):
                for file in files:
                    zipf.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), folder_path))
        
        return jsonify({"status": "success", "log_output": log_output, "download_url": f"/download/{os.path.basename(zip_path)}"})
    
    except Exception as e:
        logger.error(f"Error in handle_upload: {e}")
        return jsonify({"status": "error", "message": str(e)}), 500

@app.route('/download/<filename>')
def download_file(filename):
    try:
        return send_file(os.path.join(tempfile.gettempdir(), filename), as_attachment=True)
    except Exception as e:
        logger.error(f"Error in download_file: {e}")
        return jsonify({"status": "error", "message": str(e)}), 500

# Endpoint to get connected CPUs information
@app.route('/cpu_info', methods=['GET'])
def get_cpu_info():
    try:
        info = []
        for host, data in connected_cpus.items():
            info.append(f"{host}: {data['cpu_count']} CPUs, {data['usage']}% usage")
        return jsonify({"status": "success", "cpu_info": "\n".join(info)})
    except Exception as e:
        logger.error(f"Error in get_cpu_info: {e}")
        return jsonify({"status": "error", "message": str(e)}), 500

# Main interface
@app.route('/')
def index():
    return render_template('index.html')

def run_script(script_path, folder_path):
    # Collect all available CPUs including the local host CPU
    total_cpus = sum(cpu['cpu_count'] for cpu in connected_cpus.values()) + 1
    
    # Use multiprocessing to run the script
    with multiprocessing.Pool(total_cpus) as pool:
        log_outputs = pool.starmap(target_function, [(script_path, folder_path)] * total_cpus)

    return '\n'.join(log_outputs)

if __name__ == "__main__":
    app.run(host='0.0.0.0', port=7860, threaded=True)