File size: 4,315 Bytes
d97f2ec
 
 
 
 
 
 
 
1dcef25
a076bea
d97f2ec
 
 
 
 
9c660b1
 
 
9e95bc6
d97f2ec
9c660b1
 
 
 
d97f2ec
 
 
 
9c660b1
d97f2ec
9c660b1
 
 
 
d97f2ec
 
 
 
 
9c660b1
d97f2ec
1dcef25
9e95bc6
 
 
 
 
1dcef25
 
 
 
 
 
 
 
 
 
 
 
 
 
d97f2ec
1dcef25
 
 
 
 
d97f2ec
 
a076bea
 
9e95bc6
 
a076bea
 
9e95bc6
a076bea
d97f2ec
 
 
 
 
 
a076bea
 
 
d97f2ec
 
9e95bc6
d97f2ec
 
 
 
 
 
 
 
a076bea
 
 
 
 
d97f2ec
a076bea
 
d97f2ec
 
 
 
a076bea
d97f2ec
a076bea
 
 
 
bc57054
b9c6f79
a076bea
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
import os
import subprocess
import tempfile
import shutil
from zipfile import ZipFile
import logging
import json
import psutil
import multiprocessing
from flask import Flask, request, jsonify, render_template, send_file

# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

# Initialize Flask app
app = Flask(__name__)

connected_cpus = {"localhost": {"cpu_count": psutil.cpu_count(logical=False), "usage": 0.0}}

# Endpoint to donate CPU resources
@app.route('/donate_cpu', methods=['POST'])
def donate_cpu_handler():
    data = request.get_json()
    host = data['host']
    cpu_count = data['cpu_count']
    connected_cpus[host] = {"cpu_count": cpu_count, "usage": 0.0}
    logger.info(f"CPU donated by {host} with {cpu_count} CPUs.")
    return jsonify({"status": "success", "message": f"CPU donated by {host}"})

# Endpoint to update CPU usage
@app.route('/update_cpu_usage', methods=['POST'])
def update_cpu_usage_handler():
    data = request.get_json()
    host = data['host']
    usage = data['usage']
    if host in connected_cpus:
        connected_cpus[host]['usage'] = usage
        logger.info(f"Updated CPU usage for {host}: {usage}%")
    return jsonify({"status": "success"})

# Function to run the provided Python script using multiprocessing
def run_script(script_content, folder_path):
    script_path = os.path.join(folder_path, 'user_script.py')
    with open(script_path, 'w') as script_file:
        script_file.write(script_content)
    
    def target_function(cpu_id):
        output_log = tempfile.TemporaryFile(mode='w+t')
        try:
            result = subprocess.run(['python', script_path], cwd=folder_path, stdout=output_log, stderr=subprocess.STDOUT)
            output_log.seek(0)
            log_output = output_log.read()
        except Exception as e:
            log_output = str(e)
        finally:
            output_log.close()
        return log_output

    # Collect all available CPUs including the local host CPU
    total_cpus = sum(cpu['cpu_count'] for cpu in connected_cpus.values())
    
    # Run the script using multiprocessing
    with multiprocessing.Pool(total_cpus) as pool:
        log_outputs = pool.map(target_function, range(total_cpus))

    return '\n'.join(log_outputs)

# Function to handle file uploads and script execution
@app.route('/upload', methods=['POST'])
def handle_upload():
    if 'file' not in request.files or 'script_content' not in request.form:
        return jsonify({"status": "error", "message": "File or script content not provided"}), 400
    
    files = request.files.getlist('file')
    script_content = request.form['script_content']
    
    # Create a temporary directory to store uploaded files
    temp_dir = tempfile.mkdtemp()
    
    # Save the uploaded folder contents to the temporary directory
    folder_path = os.path.join(temp_dir, 'uploaded_folder')
    os.makedirs(folder_path, exist_ok=True)
    for file_obj in files:
        file_path = os.path.join(folder_path, file_obj.filename)
        file_obj.save(file_path)

    # Run the script
    log_output = run_script(script_content, folder_path)
    
    # Create a zip file of the entire folder (including any new files created by the script)
    zip_path = os.path.join(temp_dir, 'output_folder.zip')
    with ZipFile(zip_path, 'w') as zipf:
        for root, _, files in os.walk(folder_path):
            for file in files:
                zipf.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), folder_path))
    
    return jsonify({"status": "success", "log_output": log_output, "download_url": f"/download/{os.path.basename(zip_path)}"})

@app.route('/download/<filename>')
def download_file(filename):
    return send_file(os.path.join(tempfile.gettempdir(), filename), as_attachment=True)

# Endpoint to get connected CPUs information
@app.route('/cpu_info', methods=['GET'])
def get_cpu_info():
    info = []
    for host, data in connected_cpus.items():
        info.append(f"{host}: {data['cpu_count']} CPUs, {data['usage']}% usage")
    return jsonify({"status": "success", "cpu_info": "\n".join(info)})

# Main interface
@app.route('/')
def index():
    return render_template('index.html')

if __name__ == "__main__":
    app.run(host='0.0.0.0', port=7860, threaded=True)