label
class label 6
classes | code_before
stringlengths 75
187k
| code_after
stringlengths 75
187k
| label_text
stringclasses 6
values | deleted
dict | added
dict | normalized_code_before
stringlengths 75
152k
| normalized_code_after
stringlengths 75
152k
| before_doc_string_pos
sequence | after_doc_string_pos
sequence |
---|---|---|---|---|---|---|---|---|---|
0CWE-22
| """
Defines helper methods useful for setting up ports, launching servers, and handling `ngrok`
"""
import os
import socket
import threading
from flask import Flask, request, session, jsonify, abort, send_file, render_template, redirect
from flask_cachebuster import CacheBuster
from flask_login import LoginManager, login_user, current_user, login_required
from flask_cors import CORS
import threading
import pkg_resources
import datetime
import time
import json
import urllib.request
from shutil import copyfile
import requests
import sys
import csv
import logging
from gradio.tunneling import create_tunnel
from gradio import encryptor
from gradio import queue
from functools import wraps
import io
import inspect
import traceback
from werkzeug.security import safe_join
INITIAL_PORT_VALUE = int(os.getenv(
'GRADIO_SERVER_PORT', "7860")) # The http server will try to open on port 7860. If not available, 7861, 7862, etc.
TRY_NUM_PORTS = int(os.getenv(
'GRADIO_NUM_PORTS', "100")) # Number of ports to try before giving up and throwing an exception.
LOCALHOST_NAME = os.getenv(
'GRADIO_SERVER_NAME', "127.0.0.1")
GRADIO_API_SERVER = "https://api.gradio.app/v1/tunnel-request"
GRADIO_FEATURE_ANALYTICS_URL = "https://api.gradio.app/gradio-feature-analytics/"
STATIC_TEMPLATE_LIB = pkg_resources.resource_filename("gradio", "templates/")
STATIC_PATH_LIB = pkg_resources.resource_filename("gradio", "templates/frontend/static")
VERSION_FILE = pkg_resources.resource_filename("gradio", "version.txt")
with open(VERSION_FILE) as version_file:
GRADIO_STATIC_ROOT = "https://gradio.s3-us-west-2.amazonaws.com/" + \
version_file.read().strip() + "/static/"
app = Flask(__name__,
template_folder=STATIC_TEMPLATE_LIB,
static_folder="",
static_url_path="/none/")
app.url_map.strict_slashes = False
CORS(app)
cache_buster = CacheBuster(
config={'extensions': ['.js', '.css'], 'hash_size': 5})
cache_buster.init_app(app)
app.secret_key = os.getenv("GRADIO_KEY", "secret")
login_manager = LoginManager()
login_manager.login_view = 'login'
login_manager.init_app(app)
# Hide Flask default message
cli = sys.modules['flask.cli']
cli.show_server_banner = lambda *x: None
class User:
def __init__(self, id):
self.is_authenticated = True
self.is_active = True
self.is_anonymous = False
self.id = id
def get_id(self):
return self.id
@login_manager.user_loader
def load_user(_id):
return User(_id)
def login_check(func):
@wraps(func)
def wrapper(*args, **kwargs):
if app.auth:
@login_required
def func2(*args, **kwargs):
return func(*args, **kwargs)
return func2(*args, **kwargs)
else:
return func(*args, **kwargs)
return wrapper
def get_local_ip_address():
try:
ip_address = requests.get('https://api.ipify.org', timeout=3).text
except (requests.ConnectionError, requests.exceptions.ReadTimeout):
ip_address = "No internet connection"
return ip_address
IP_ADDRESS = get_local_ip_address()
def get_first_available_port(initial, final):
"""
Gets the first open port in a specified range of port numbers
:param initial: the initial value in the range of port numbers
:param final: final (exclusive) value in the range of port numbers, should be greater than `initial`
:return:
"""
for port in range(initial, final):
try:
s = socket.socket() # create a socket object
s.bind((LOCALHOST_NAME, port)) # Bind to the port
s.close()
return port
except OSError:
pass
raise OSError(
"All ports from {} to {} are in use. Please close a port.".format(
initial, final
)
)
@app.route("/", methods=["GET"])
@login_check
def main():
session["state"] = None
return render_template("frontend/index.html", config=app.interface.config)
@app.route("/static/<path:path>", methods=["GET"])
def static_resource(path):
if app.interface.share:
return redirect(GRADIO_STATIC_ROOT + path)
else:
return send_file(safe_join(STATIC_PATH_LIB, path))
# TODO(@aliabid94): this throws a 500 error if app.auth is None (should probalbly just redirect to '/')
@app.route('/login', methods=["GET", "POST"])
def login():
if request.method == "GET":
config = get_config()
return render_template("frontend/index.html", config=config)
elif request.method == "POST":
username = request.form.get("username")
password = request.form.get("password")
if ((not callable(app.auth) and username in app.auth and app.auth[username] == password)
or (callable(app.auth) and app.auth.__call__(username, password))):
login_user(User(username))
return redirect("/")
else:
return abort(401)
@app.route("/config/", methods=["GET"])
def get_config():
if app.interface.auth is None or current_user.is_authenticated:
return jsonify(app.interface.config)
else:
return {"auth_required": True, "auth_message": app.interface.auth_message}
@app.route("/enable_sharing/<path:path>", methods=["GET"])
@login_check
def enable_sharing(path):
if path == "None":
path = None
app.interface.config["share_url"] = path
return jsonify(success=True)
@app.route("/shutdown", methods=['GET'])
def shutdown():
shutdown_func = request.environ.get('werkzeug.server.shutdown')
if shutdown_func is None:
raise RuntimeError('Not running werkzeug')
shutdown_func()
return "Shutting down..."
@app.route("/api/predict/", methods=["POST"])
@login_check
def predict():
raw_input = request.json["data"]
# Capture any errors made and pipe to front end
if app.interface.show_error:
try:
prediction, durations = app.interface.process(raw_input)
except BaseException as error:
traceback.print_exc()
return jsonify({"error": str(error)}), 500
else:
prediction, durations = app.interface.process(raw_input)
avg_durations = []
for i, duration in enumerate(durations):
app.interface.predict_durations[i][0] += duration
app.interface.predict_durations[i][1] += 1
avg_durations.append(app.interface.predict_durations[i][0]
/ app.interface.predict_durations[i][1])
app.interface.config["avg_durations"] = avg_durations
output = {"data": prediction, "durations": durations, "avg_durations": avg_durations}
if app.interface.allow_flagging == "auto":
try:
flag_index = flag_data(raw_input, prediction,
flag_option=(None if app.interface.flagging_options is None else ""),
username=current_user.id if current_user.is_authenticated else None)
output["flag_index"] = flag_index
except Exception as e:
print(str(e))
pass
return jsonify(output)
def get_types(cls_set, component):
docset = []
types = []
if component == "input":
for cls in cls_set:
doc = inspect.getdoc(cls.preprocess)
doc_lines = doc.split("\n")
docset.append(doc_lines[1].split(":")[-1])
types.append(doc_lines[1].split(")")[0].split("(")[-1])
else:
for cls in cls_set:
doc = inspect.getdoc(cls.postprocess)
doc_lines = doc.split("\n")
docset.append(doc_lines[-1].split(":")[-1])
types.append(doc_lines[-1].split(")")[0].split("(")[-1])
return docset, types
@app.route("/api/", methods=["GET"])
def api_docs():
inputs = [type(inp) for inp in app.interface.input_components]
outputs = [type(out) for out in app.interface.output_components]
input_types_doc, input_types = get_types(inputs, "input")
output_types_doc, output_types = get_types(outputs, "output")
input_names = [type(inp).__name__ for inp in app.interface.input_components]
output_names = [type(out).__name__ for out in app.interface.output_components]
sample_inputs = [inp.generate_sample() for inp in app.interface.input_components]
docs = {
"inputs": input_names,
"outputs": output_names,
"len_inputs": len(inputs),
"len_outputs": len(outputs),
"inputs_lower": [name.lower() for name in input_names],
"outputs_lower": [name.lower() for name in output_names],
"input_types": input_types,
"output_types": output_types,
"input_types_doc": input_types_doc,
"output_types_doc": output_types_doc,
"sample_inputs": sample_inputs
}
return render_template("api_docs.html", **docs)
def log_feature_analytics(feature):
if app.interface.analytics_enabled:
try:
requests.post(GRADIO_FEATURE_ANALYTICS_URL,
data={
'ip_address': IP_ADDRESS,
'feature': feature}, timeout=3)
except (requests.ConnectionError, requests.exceptions.ReadTimeout):
pass # do not push analytics if no network
def flag_data(input_data, output_data, flag_option=None, flag_index=None, username=None, flag_path=None):
if flag_path is None:
flag_path = os.path.join(app.cwd, app.interface.flagging_dir)
log_fp = "{}/log.csv".format(flag_path)
encryption_key = app.interface.encryption_key if app.interface.encrypt else None
is_new = not os.path.exists(log_fp)
if flag_index is None:
csv_data = []
for i, interface in enumerate(app.interface.input_components):
csv_data.append(interface.save_flagged(
flag_path, app.interface.config["input_components"][i]["label"], input_data[i], encryption_key))
for i, interface in enumerate(app.interface.output_components):
csv_data.append(interface.save_flagged(
flag_path, app.interface.config["output_components"][i]["label"], output_data[i], encryption_key) if output_data[i] is not None else "")
if flag_option is not None:
csv_data.append(flag_option)
if username is not None:
csv_data.append(username)
csv_data.append(str(datetime.datetime.now()))
if is_new:
headers = [interface["label"]
for interface in app.interface.config["input_components"]]
headers += [interface["label"]
for interface in app.interface.config["output_components"]]
if app.interface.flagging_options is not None:
headers.append("flag")
if username is not None:
headers.append("username")
headers.append("timestamp")
def replace_flag_at_index(file_content):
file_content = io.StringIO(file_content)
content = list(csv.reader(file_content))
header = content[0]
flag_col_index = header.index("flag")
content[flag_index][flag_col_index] = flag_option
output = io.StringIO()
writer = csv.writer(output)
writer.writerows(content)
return output.getvalue()
if app.interface.encrypt:
output = io.StringIO()
if not is_new:
with open(log_fp, "rb") as csvfile:
encrypted_csv = csvfile.read()
decrypted_csv = encryptor.decrypt(
app.interface.encryption_key, encrypted_csv)
file_content = decrypted_csv.decode()
if flag_index is not None:
file_content = replace_flag_at_index(file_content)
output.write(file_content)
writer = csv.writer(output)
if flag_index is None:
if is_new:
writer.writerow(headers)
writer.writerow(csv_data)
with open(log_fp, "wb") as csvfile:
csvfile.write(encryptor.encrypt(
app.interface.encryption_key, output.getvalue().encode()))
else:
if flag_index is None:
with open(log_fp, "a", newline="") as csvfile:
writer = csv.writer(csvfile)
if is_new:
writer.writerow(headers)
writer.writerow(csv_data)
else:
with open(log_fp) as csvfile:
file_content = csvfile.read()
file_content = replace_flag_at_index(file_content)
with open(log_fp, "w", newline="") as csvfile: # newline parameter needed for Windows
csvfile.write(file_content)
with open(log_fp, "r") as csvfile:
line_count = len([None for row in csv.reader(csvfile)]) - 1
return line_count
@app.route("/api/flag/", methods=["POST"])
@login_check
def flag():
log_feature_analytics('flag')
data = request.json['data']
flag_data(data['input_data'], data['output_data'], data.get("flag_option"), data.get("flag_index"),
current_user.id if current_user.is_authenticated else None)
return jsonify(success=True)
@app.route("/api/interpret/", methods=["POST"])
@login_check
def interpret():
log_feature_analytics('interpret')
raw_input = request.json["data"]
interpretation_scores, alternative_outputs = app.interface.interpret(
raw_input)
return jsonify({
"interpretation_scores": interpretation_scores,
"alternative_outputs": alternative_outputs
})
@app.route("/file/<path:path>", methods=["GET"])
@login_check
def file(path):
path = secure_filename(path)
if app.interface.encrypt and isinstance(app.interface.examples, str) and path.startswith(app.interface.examples):
with open(os.path.join(app.cwd, path), "rb") as encrypted_file:
encrypted_data = encrypted_file.read()
file_data = encryptor.decrypt(
app.interface.encryption_key, encrypted_data)
return send_file(io.BytesIO(file_data), attachment_filename=os.path.basename(path))
else:
return send_file(os.path.join(app.cwd, path))
@app.route("/api/queue/push/", methods=["POST"])
@login_check
def queue_push():
data = request.json["data"]
action = request.json["action"]
job_hash, queue_position = queue.push({"data": data}, action)
return {"hash": job_hash, "queue_position": queue_position}
@app.route("/api/queue/status/", methods=["POST"])
@login_check
def queue_status():
hash = request.json['hash']
status, data = queue.get_status(hash)
return {"status": status, "data": data}
def queue_thread(path_to_local_server, test_mode=False):
while True:
try:
next_job = queue.pop()
if next_job is not None:
_, hash, input_data, task_type = next_job
queue.start_job(hash)
response = requests.post(
path_to_local_server + "/api/" + task_type + "/", json=input_data)
if response.status_code == 200:
queue.pass_job(hash, response.json())
else:
queue.fail_job(hash, response.text)
else:
time.sleep(1)
except Exception as e:
time.sleep(1)
pass
if test_mode:
break
def start_server(interface, server_name, server_port=None, auth=None, ssl=None):
if server_port is None:
server_port = INITIAL_PORT_VALUE
port = get_first_available_port(
server_port, server_port + TRY_NUM_PORTS
)
path_to_local_server = "http://{}:{}/".format(server_name, port)
if auth is not None:
if not callable(auth):
app.auth = {account[0]: account[1] for account in auth}
else:
app.auth = auth
else:
app.auth = None
app.interface = interface
app.cwd = os.getcwd()
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
if app.interface.enable_queue:
if auth is not None or app.interface.encrypt:
raise ValueError("Cannot queue with encryption or authentication enabled.")
queue.init()
app.queue_thread = threading.Thread(target=queue_thread, args=(path_to_local_server,))
app.queue_thread.start()
if interface.save_to is not None:
interface.save_to["port"] = port
app_kwargs = {"port": port, "host": server_name}
if ssl:
app_kwargs["ssl_context"] = ssl
thread = threading.Thread(target=app.run,
kwargs=app_kwargs,
daemon=True)
thread.start()
return port, path_to_local_server, app, thread
def get_state():
return session.get("state")
def set_state(value):
session["state"] = value
def close_server(process):
process.terminate()
process.join()
def url_request(url):
try:
req = urllib.request.Request(
url=url, headers={"content-type": "application/json"}
)
res = urllib.request.urlopen(req, timeout=10)
return res
except Exception as e:
raise RuntimeError(str(e))
def setup_tunnel(local_server_port, endpoint):
response = url_request(
endpoint + '/v1/tunnel-request' if endpoint is not None else GRADIO_API_SERVER)
if response and response.code == 200:
try:
payload = json.loads(response.read().decode("utf-8"))[0]
return create_tunnel(payload, LOCALHOST_NAME, local_server_port)
except Exception as e:
raise RuntimeError(str(e))
def url_ok(url):
try:
for _ in range(5):
time.sleep(.500)
r = requests.head(url, timeout=3)
if r.status_code in (200, 401, 302): # 401 or 302 if auth is set
return True
except (ConnectionError, requests.exceptions.ConnectionError):
return False
| """
Defines helper methods useful for setting up ports, launching servers, and handling `ngrok`
"""
import os
import socket
import threading
from flask import Flask, request, session, jsonify, abort, send_file, render_template, redirect
from flask_cachebuster import CacheBuster
from flask_login import LoginManager, login_user, current_user, login_required
from flask_cors import CORS
import threading
import pkg_resources
import datetime
import time
import json
import urllib.request
from shutil import copyfile
import requests
import sys
import csv
import logging
from gradio.tunneling import create_tunnel
from gradio import encryptor
from gradio import queue
from functools import wraps
import io
import inspect
import traceback
from werkzeug.security import safe_join
INITIAL_PORT_VALUE = int(os.getenv(
'GRADIO_SERVER_PORT', "7860")) # The http server will try to open on port 7860. If not available, 7861, 7862, etc.
TRY_NUM_PORTS = int(os.getenv(
'GRADIO_NUM_PORTS', "100")) # Number of ports to try before giving up and throwing an exception.
LOCALHOST_NAME = os.getenv(
'GRADIO_SERVER_NAME', "127.0.0.1")
GRADIO_API_SERVER = "https://api.gradio.app/v1/tunnel-request"
GRADIO_FEATURE_ANALYTICS_URL = "https://api.gradio.app/gradio-feature-analytics/"
STATIC_TEMPLATE_LIB = pkg_resources.resource_filename("gradio", "templates/")
STATIC_PATH_LIB = pkg_resources.resource_filename("gradio", "templates/frontend/static")
VERSION_FILE = pkg_resources.resource_filename("gradio", "version.txt")
with open(VERSION_FILE) as version_file:
GRADIO_STATIC_ROOT = "https://gradio.s3-us-west-2.amazonaws.com/" + \
version_file.read().strip() + "/static/"
app = Flask(__name__,
template_folder=STATIC_TEMPLATE_LIB,
static_folder="",
static_url_path="/none/")
app.url_map.strict_slashes = False
CORS(app)
cache_buster = CacheBuster(
config={'extensions': ['.js', '.css'], 'hash_size': 5})
cache_buster.init_app(app)
app.secret_key = os.getenv("GRADIO_KEY", "secret")
login_manager = LoginManager()
login_manager.login_view = 'login'
login_manager.init_app(app)
# Hide Flask default message
cli = sys.modules['flask.cli']
cli.show_server_banner = lambda *x: None
class User:
def __init__(self, id):
self.is_authenticated = True
self.is_active = True
self.is_anonymous = False
self.id = id
def get_id(self):
return self.id
@login_manager.user_loader
def load_user(_id):
return User(_id)
def login_check(func):
@wraps(func)
def wrapper(*args, **kwargs):
if app.auth:
@login_required
def func2(*args, **kwargs):
return func(*args, **kwargs)
return func2(*args, **kwargs)
else:
return func(*args, **kwargs)
return wrapper
def get_local_ip_address():
try:
ip_address = requests.get('https://api.ipify.org', timeout=3).text
except (requests.ConnectionError, requests.exceptions.ReadTimeout):
ip_address = "No internet connection"
return ip_address
IP_ADDRESS = get_local_ip_address()
def get_first_available_port(initial, final):
"""
Gets the first open port in a specified range of port numbers
:param initial: the initial value in the range of port numbers
:param final: final (exclusive) value in the range of port numbers, should be greater than `initial`
:return:
"""
for port in range(initial, final):
try:
s = socket.socket() # create a socket object
s.bind((LOCALHOST_NAME, port)) # Bind to the port
s.close()
return port
except OSError:
pass
raise OSError(
"All ports from {} to {} are in use. Please close a port.".format(
initial, final
)
)
@app.route("/", methods=["GET"])
@login_check
def main():
session["state"] = None
return render_template("frontend/index.html", config=app.interface.config)
@app.route("/static/<path:path>", methods=["GET"])
def static_resource(path):
if app.interface.share:
return redirect(GRADIO_STATIC_ROOT + path)
else:
return send_file(safe_join(STATIC_PATH_LIB, path))
# TODO(@aliabid94): this throws a 500 error if app.auth is None (should probalbly just redirect to '/')
@app.route('/login', methods=["GET", "POST"])
def login():
if request.method == "GET":
config = get_config()
return render_template("frontend/index.html", config=config)
elif request.method == "POST":
username = request.form.get("username")
password = request.form.get("password")
if ((not callable(app.auth) and username in app.auth and app.auth[username] == password)
or (callable(app.auth) and app.auth.__call__(username, password))):
login_user(User(username))
return redirect("/")
else:
return abort(401)
@app.route("/config/", methods=["GET"])
def get_config():
if app.interface.auth is None or current_user.is_authenticated:
return jsonify(app.interface.config)
else:
return {"auth_required": True, "auth_message": app.interface.auth_message}
@app.route("/enable_sharing/<path:path>", methods=["GET"])
@login_check
def enable_sharing(path):
if path == "None":
path = None
app.interface.config["share_url"] = path
return jsonify(success=True)
@app.route("/shutdown", methods=['GET'])
def shutdown():
shutdown_func = request.environ.get('werkzeug.server.shutdown')
if shutdown_func is None:
raise RuntimeError('Not running werkzeug')
shutdown_func()
return "Shutting down..."
@app.route("/api/predict/", methods=["POST"])
@login_check
def predict():
raw_input = request.json["data"]
# Capture any errors made and pipe to front end
if app.interface.show_error:
try:
prediction, durations = app.interface.process(raw_input)
except BaseException as error:
traceback.print_exc()
return jsonify({"error": str(error)}), 500
else:
prediction, durations = app.interface.process(raw_input)
avg_durations = []
for i, duration in enumerate(durations):
app.interface.predict_durations[i][0] += duration
app.interface.predict_durations[i][1] += 1
avg_durations.append(app.interface.predict_durations[i][0]
/ app.interface.predict_durations[i][1])
app.interface.config["avg_durations"] = avg_durations
output = {"data": prediction, "durations": durations, "avg_durations": avg_durations}
if app.interface.allow_flagging == "auto":
try:
flag_index = flag_data(raw_input, prediction,
flag_option=(None if app.interface.flagging_options is None else ""),
username=current_user.id if current_user.is_authenticated else None)
output["flag_index"] = flag_index
except Exception as e:
print(str(e))
pass
return jsonify(output)
def get_types(cls_set, component):
docset = []
types = []
if component == "input":
for cls in cls_set:
doc = inspect.getdoc(cls.preprocess)
doc_lines = doc.split("\n")
docset.append(doc_lines[1].split(":")[-1])
types.append(doc_lines[1].split(")")[0].split("(")[-1])
else:
for cls in cls_set:
doc = inspect.getdoc(cls.postprocess)
doc_lines = doc.split("\n")
docset.append(doc_lines[-1].split(":")[-1])
types.append(doc_lines[-1].split(")")[0].split("(")[-1])
return docset, types
@app.route("/api/", methods=["GET"])
def api_docs():
inputs = [type(inp) for inp in app.interface.input_components]
outputs = [type(out) for out in app.interface.output_components]
input_types_doc, input_types = get_types(inputs, "input")
output_types_doc, output_types = get_types(outputs, "output")
input_names = [type(inp).__name__ for inp in app.interface.input_components]
output_names = [type(out).__name__ for out in app.interface.output_components]
sample_inputs = [inp.generate_sample() for inp in app.interface.input_components]
docs = {
"inputs": input_names,
"outputs": output_names,
"len_inputs": len(inputs),
"len_outputs": len(outputs),
"inputs_lower": [name.lower() for name in input_names],
"outputs_lower": [name.lower() for name in output_names],
"input_types": input_types,
"output_types": output_types,
"input_types_doc": input_types_doc,
"output_types_doc": output_types_doc,
"sample_inputs": sample_inputs
}
return render_template("api_docs.html", **docs)
def log_feature_analytics(feature):
if app.interface.analytics_enabled:
try:
requests.post(GRADIO_FEATURE_ANALYTICS_URL,
data={
'ip_address': IP_ADDRESS,
'feature': feature}, timeout=3)
except (requests.ConnectionError, requests.exceptions.ReadTimeout):
pass # do not push analytics if no network
def flag_data(input_data, output_data, flag_option=None, flag_index=None, username=None, flag_path=None):
if flag_path is None:
flag_path = os.path.join(app.cwd, app.interface.flagging_dir)
log_fp = "{}/log.csv".format(flag_path)
encryption_key = app.interface.encryption_key if app.interface.encrypt else None
is_new = not os.path.exists(log_fp)
if flag_index is None:
csv_data = []
for i, interface in enumerate(app.interface.input_components):
csv_data.append(interface.save_flagged(
flag_path, app.interface.config["input_components"][i]["label"], input_data[i], encryption_key))
for i, interface in enumerate(app.interface.output_components):
csv_data.append(interface.save_flagged(
flag_path, app.interface.config["output_components"][i]["label"], output_data[i], encryption_key) if output_data[i] is not None else "")
if flag_option is not None:
csv_data.append(flag_option)
if username is not None:
csv_data.append(username)
csv_data.append(str(datetime.datetime.now()))
if is_new:
headers = [interface["label"]
for interface in app.interface.config["input_components"]]
headers += [interface["label"]
for interface in app.interface.config["output_components"]]
if app.interface.flagging_options is not None:
headers.append("flag")
if username is not None:
headers.append("username")
headers.append("timestamp")
def replace_flag_at_index(file_content):
file_content = io.StringIO(file_content)
content = list(csv.reader(file_content))
header = content[0]
flag_col_index = header.index("flag")
content[flag_index][flag_col_index] = flag_option
output = io.StringIO()
writer = csv.writer(output)
writer.writerows(content)
return output.getvalue()
if app.interface.encrypt:
output = io.StringIO()
if not is_new:
with open(log_fp, "rb") as csvfile:
encrypted_csv = csvfile.read()
decrypted_csv = encryptor.decrypt(
app.interface.encryption_key, encrypted_csv)
file_content = decrypted_csv.decode()
if flag_index is not None:
file_content = replace_flag_at_index(file_content)
output.write(file_content)
writer = csv.writer(output)
if flag_index is None:
if is_new:
writer.writerow(headers)
writer.writerow(csv_data)
with open(log_fp, "wb") as csvfile:
csvfile.write(encryptor.encrypt(
app.interface.encryption_key, output.getvalue().encode()))
else:
if flag_index is None:
with open(log_fp, "a", newline="") as csvfile:
writer = csv.writer(csvfile)
if is_new:
writer.writerow(headers)
writer.writerow(csv_data)
else:
with open(log_fp) as csvfile:
file_content = csvfile.read()
file_content = replace_flag_at_index(file_content)
with open(log_fp, "w", newline="") as csvfile: # newline parameter needed for Windows
csvfile.write(file_content)
with open(log_fp, "r") as csvfile:
line_count = len([None for row in csv.reader(csvfile)]) - 1
return line_count
@app.route("/api/flag/", methods=["POST"])
@login_check
def flag():
log_feature_analytics('flag')
data = request.json['data']
flag_data(data['input_data'], data['output_data'], data.get("flag_option"), data.get("flag_index"),
current_user.id if current_user.is_authenticated else None)
return jsonify(success=True)
@app.route("/api/interpret/", methods=["POST"])
@login_check
def interpret():
log_feature_analytics('interpret')
raw_input = request.json["data"]
interpretation_scores, alternative_outputs = app.interface.interpret(
raw_input)
return jsonify({
"interpretation_scores": interpretation_scores,
"alternative_outputs": alternative_outputs
})
@app.route("/file/<path:path>", methods=["GET"])
@login_check
def file(path):
if app.interface.encrypt and isinstance(app.interface.examples, str) and path.startswith(app.interface.examples):
with open(safe_join(app.cwd, path), "rb") as encrypted_file:
encrypted_data = encrypted_file.read()
file_data = encryptor.decrypt(
app.interface.encryption_key, encrypted_data)
return send_file(io.BytesIO(file_data), attachment_filename=os.path.basename(path))
else:
return send_file(safe_join(app.cwd, path))
@app.route("/api/queue/push/", methods=["POST"])
@login_check
def queue_push():
data = request.json["data"]
action = request.json["action"]
job_hash, queue_position = queue.push({"data": data}, action)
return {"hash": job_hash, "queue_position": queue_position}
@app.route("/api/queue/status/", methods=["POST"])
@login_check
def queue_status():
hash = request.json['hash']
status, data = queue.get_status(hash)
return {"status": status, "data": data}
def queue_thread(path_to_local_server, test_mode=False):
while True:
try:
next_job = queue.pop()
if next_job is not None:
_, hash, input_data, task_type = next_job
queue.start_job(hash)
response = requests.post(
path_to_local_server + "/api/" + task_type + "/", json=input_data)
if response.status_code == 200:
queue.pass_job(hash, response.json())
else:
queue.fail_job(hash, response.text)
else:
time.sleep(1)
except Exception as e:
time.sleep(1)
pass
if test_mode:
break
def start_server(interface, server_name, server_port=None, auth=None, ssl=None):
if server_port is None:
server_port = INITIAL_PORT_VALUE
port = get_first_available_port(
server_port, server_port + TRY_NUM_PORTS
)
path_to_local_server = "http://{}:{}/".format(server_name, port)
if auth is not None:
if not callable(auth):
app.auth = {account[0]: account[1] for account in auth}
else:
app.auth = auth
else:
app.auth = None
app.interface = interface
app.cwd = os.getcwd()
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
if app.interface.enable_queue:
if auth is not None or app.interface.encrypt:
raise ValueError("Cannot queue with encryption or authentication enabled.")
queue.init()
app.queue_thread = threading.Thread(target=queue_thread, args=(path_to_local_server,))
app.queue_thread.start()
if interface.save_to is not None:
interface.save_to["port"] = port
app_kwargs = {"port": port, "host": server_name}
if ssl:
app_kwargs["ssl_context"] = ssl
thread = threading.Thread(target=app.run,
kwargs=app_kwargs,
daemon=True)
thread.start()
return port, path_to_local_server, app, thread
def get_state():
return session.get("state")
def set_state(value):
session["state"] = value
def close_server(process):
process.terminate()
process.join()
def url_request(url):
try:
req = urllib.request.Request(
url=url, headers={"content-type": "application/json"}
)
res = urllib.request.urlopen(req, timeout=10)
return res
except Exception as e:
raise RuntimeError(str(e))
def setup_tunnel(local_server_port, endpoint):
response = url_request(
endpoint + '/v1/tunnel-request' if endpoint is not None else GRADIO_API_SERVER)
if response and response.code == 200:
try:
payload = json.loads(response.read().decode("utf-8"))[0]
return create_tunnel(payload, LOCALHOST_NAME, local_server_port)
except Exception as e:
raise RuntimeError(str(e))
def url_ok(url):
try:
for _ in range(5):
time.sleep(.500)
r = requests.head(url, timeout=3)
if r.status_code in (200, 401, 302): # 401 or 302 if auth is set
return True
except (ConnectionError, requests.exceptions.ConnectionError):
return False
| path_disclosure | {
"code": [
" path = secure_filename(path)",
" with open(os.path.join(app.cwd, path), \"rb\") as encrypted_file:",
" return send_file(os.path.join(app.cwd, path))"
],
"line_no": [
380,
382,
388
]
} | {
"code": [
" with open(safe_join(app.cwd, path), \"rb\") as encrypted_file:",
" return send_file(safe_join(app.cwd, path))"
],
"line_no": [
381,
387
]
} |
import os
import .socket
import .threading
from flask import Flask, request, VAR_43, jsonify, abort, send_file, render_template, redirect
from flask_cachebuster import CacheBuster
from flask_login import LoginManager, login_user, current_user, login_required
from flask_cors import CORS
import .threading
import pkg_resources
import datetime
import time
import json
import .urllib.request
from shutil import copyfile
import .requests
import .sys
import csv
import .logging
from gradio.tunneling import create_tunnel
from gradio import encryptor
from gradio import queue
from functools import wraps
import io
import inspect
import traceback
from werkzeug.security import .safe_join
VAR_0 = int(os.getenv(
'GRADIO_SERVER_PORT', "7860")) # The http server will try to open on VAR_72 7860. If not available, 7861, 7862, etc.
VAR_1 = int(os.getenv(
'GRADIO_NUM_PORTS', "100")) # Number of ports to try before giving up and throwing an exception.
VAR_2 = os.getenv(
'GRADIO_SERVER_NAME', "127.0.0.1")
VAR_3 = "https://api.gradio.app/v1/tunnel-request"
VAR_4 = "https://api.gradio.app/gradio-VAR_20-analytics/"
VAR_5 = pkg_resources.resource_filename("gradio", "templates/")
VAR_6 = pkg_resources.resource_filename("gradio", "templates/frontend/static")
VAR_7 = pkg_resources.resource_filename("gradio", "version.txt")
with open(VAR_7) as version_file:
VAR_39 = "https://gradio.s3-us-west-2.amazonaws.com/" + \
version_file.read().strip() + "/static/"
VAR_8 = Flask(__name__,
template_folder=VAR_5,
static_folder="",
static_url_path="/none/")
VAR_8.url_map.strict_slashes = False
CORS(VAR_8)
VAR_9 = CacheBuster(
VAR_78={'extensions': ['.js', '.css'], 'hash_size': 5})
VAR_9.init_app(VAR_8)
VAR_8.secret_key = os.getenv("GRADIO_KEY", "secret")
VAR_10 = LoginManager()
VAR_10.login_view = 'login'
VAR_10.init_app(VAR_8)
VAR_11 = sys.modules['flask.cli']
VAR_11.show_server_banner = lambda *x: None
class CLASS_0:
def __init__(self, VAR_40):
self.is_authenticated = True
self.is_active = True
self.is_anonymous = False
self.id = VAR_40
def FUNC_28(self):
return self.id
@VAR_10.user_loader
def FUNC_0(VAR_12):
return CLASS_0(VAR_12)
def FUNC_1(VAR_13):
@wraps(VAR_13)
def FUNC_29(*VAR_41, **VAR_42):
if VAR_8.auth:
@login_required
def FUNC_31(*VAR_41, **VAR_42):
return VAR_13(*VAR_41, **VAR_42)
return FUNC_31(*VAR_41, **VAR_42)
else:
return VAR_13(*VAR_41, **VAR_42)
return FUNC_29
def FUNC_2():
try:
VAR_77 = requests.get('https://api.ipify.org', timeout=3).text
except (requests.ConnectionError, requests.exceptions.ReadTimeout):
VAR_77 = "No internet connection"
return VAR_77
VAR_14 = FUNC_2()
def FUNC_3(VAR_15, VAR_16):
for VAR_72 in range(VAR_15, VAR_16):
try:
VAR_90 = socket.socket() # create a socket object
VAR_90.bind((VAR_2, VAR_72)) # Bind to the VAR_72
VAR_90.close()
return VAR_72
except OSError:
pass
raise OSError(
"All ports from {} to {} are in use. Please close a VAR_72.".format(
VAR_15, VAR_16
)
)
@VAR_8.route("/", methods=["GET"])
@FUNC_1
def FUNC_4():
VAR_43["state"] = None
return render_template("frontend/index.html", VAR_78=VAR_8.interface.config)
@VAR_8.route("/static/<VAR_17:path>", methods=["GET"])
def FUNC_5(VAR_17):
if VAR_8.interface.share:
return redirect(VAR_39 + VAR_17)
else:
return send_file(safe_join(VAR_6, VAR_17))
@VAR_8.route('/login', methods=["GET", "POST"])
def FUNC_6():
if request.method == "GET":
VAR_78 = FUNC_7()
return render_template("frontend/index.html", VAR_78=config)
elif request.method == "POST":
VAR_25 = request.form.get("username")
VAR_91 = request.form.get("password")
if ((not callable(VAR_8.auth) and VAR_25 in VAR_8.auth and VAR_8.auth[VAR_25] == VAR_91)
or (callable(VAR_8.auth) and VAR_8.auth.__call__(VAR_25, VAR_91))):
login_user(CLASS_0(VAR_25))
return redirect("/")
else:
return abort(401)
@VAR_8.route("/VAR_78/", methods=["GET"])
def FUNC_7():
if VAR_8.interface.auth is None or current_user.is_authenticated:
return jsonify(VAR_8.interface.config)
else:
return {"auth_required": True, "auth_message": VAR_8.interface.auth_message}
@VAR_8.route("/FUNC_8/<VAR_17:path>", methods=["GET"])
@FUNC_1
def FUNC_8(VAR_17):
if VAR_17 == "None":
VAR_17 = None
VAR_8.interface.config["share_url"] = VAR_17
return jsonify(success=True)
@VAR_8.route("/shutdown", methods=['GET'])
def FUNC_9():
VAR_44 = request.environ.get('werkzeug.server.shutdown')
if VAR_44 is None:
raise RuntimeError('Not running werkzeug')
VAR_44()
return "Shutting down..."
@VAR_8.route("/api/FUNC_10/", methods=["POST"])
@FUNC_1
def FUNC_10():
VAR_45 = request.json["data"]
if VAR_8.interface.show_error:
try:
VAR_79, VAR_80 = VAR_8.interface.process(VAR_45)
except BaseException as error:
traceback.print_exc()
return jsonify({"error": str(error)}), 500
else:
VAR_79, VAR_80 = VAR_8.interface.process(VAR_45)
VAR_46 = []
for i, duration in enumerate(VAR_80):
VAR_8.interface.predict_durations[i][0] += duration
VAR_8.interface.predict_durations[i][1] += 1
VAR_46.append(VAR_8.interface.predict_durations[i][0]
/ VAR_8.interface.predict_durations[i][1])
VAR_8.interface.config["avg_durations"] = VAR_46
VAR_47 = {"data": VAR_79, "durations": VAR_80, "avg_durations": VAR_46}
if VAR_8.interface.allow_flagging == "auto":
try:
VAR_24 = FUNC_14(VAR_45, VAR_79,
VAR_23=(None if VAR_8.interface.flagging_options is None else ""),
VAR_25=current_user.id if current_user.is_authenticated else None)
VAR_47["flag_index"] = VAR_24
except Exception as e:
print(str(e))
pass
return jsonify(VAR_47)
def FUNC_11(VAR_18, VAR_19):
VAR_48 = []
VAR_49 = []
if VAR_19 == "input":
for cls in VAR_18:
VAR_92 = inspect.getdoc(cls.preprocess)
VAR_93 = VAR_92.split("\n")
VAR_48.append(VAR_93[1].split(":")[-1])
VAR_49.append(VAR_93[1].split(")")[0].split("(")[-1])
else:
for cls in VAR_18:
VAR_92 = inspect.getdoc(cls.postprocess)
VAR_93 = VAR_92.split("\n")
VAR_48.append(VAR_93[-1].split(":")[-1])
VAR_49.append(VAR_93[-1].split(")")[0].split("(")[-1])
return VAR_48, VAR_49
@VAR_8.route("/api/", methods=["GET"])
def FUNC_12():
VAR_50 = [type(inp) for inp in VAR_8.interface.input_components]
VAR_51 = [type(out) for out in VAR_8.interface.output_components]
VAR_52, VAR_53 = FUNC_11(VAR_50, "input")
VAR_54, VAR_55 = FUNC_11(VAR_51, "output")
VAR_56 = [type(inp).__name__ for inp in VAR_8.interface.input_components]
VAR_57 = [type(out).__name__ for out in VAR_8.interface.output_components]
VAR_58 = [inp.generate_sample() for inp in VAR_8.interface.input_components]
VAR_59 = {
"inputs": VAR_56,
"outputs": VAR_57,
"len_inputs": len(VAR_50),
"len_outputs": len(VAR_51),
"inputs_lower": [name.lower() for name in VAR_56],
"outputs_lower": [name.lower() for name in VAR_57],
"input_types": VAR_53,
"output_types": VAR_55,
"input_types_doc": VAR_52,
"output_types_doc": VAR_54,
"sample_inputs": VAR_58
}
return render_template("api_docs.html", **VAR_59)
def FUNC_13(VAR_20):
if VAR_8.interface.analytics_enabled:
try:
requests.post(VAR_4,
VAR_64={
'ip_address': VAR_14,
'feature': VAR_20}, timeout=3)
except (requests.ConnectionError, requests.exceptions.ReadTimeout):
pass # do not push analytics if no network
def FUNC_14(VAR_21, VAR_22, VAR_23=None, VAR_24=None, VAR_25=None, VAR_26=None):
if VAR_26 is None:
VAR_26 = os.path.join(VAR_8.cwd, VAR_8.interface.flagging_dir)
VAR_60 = "{}/VAR_73.csv".format(VAR_26)
VAR_61 = VAR_8.interface.encryption_key if VAR_8.interface.encrypt else None
VAR_62 = not os.path.exists(VAR_60)
if VAR_24 is None:
VAR_81 = []
for i, VAR_29 in enumerate(VAR_8.interface.input_components):
VAR_81.append(VAR_29.save_flagged(
VAR_26, VAR_8.interface.config["input_components"][i]["label"], VAR_21[i], VAR_61))
for i, VAR_29 in enumerate(VAR_8.interface.output_components):
VAR_81.append(VAR_29.save_flagged(
VAR_26, VAR_8.interface.config["output_components"][i]["label"], VAR_22[i], VAR_61) if VAR_22[i] is not None else "")
if VAR_23 is not None:
VAR_81.append(VAR_23)
if VAR_25 is not None:
VAR_81.append(VAR_25)
VAR_81.append(str(datetime.datetime.now()))
if VAR_62:
VAR_94 = [VAR_29["label"]
for VAR_29 in VAR_8.interface.config["input_components"]]
VAR_94 += [VAR_29["label"]
for VAR_29 in VAR_8.interface.config["output_components"]]
if VAR_8.interface.flagging_options is not None:
VAR_94.append("flag")
if VAR_25 is not None:
VAR_94.append("username")
VAR_94.append("timestamp")
def FUNC_30(VAR_63):
file_content = io.StringIO(VAR_63)
VAR_82 = list(csv.reader(VAR_63))
VAR_83 = VAR_82[0]
VAR_84 = VAR_83.index("flag")
VAR_82[VAR_24][VAR_84] = VAR_23
VAR_47 = io.StringIO()
VAR_85 = csv.writer(VAR_47)
VAR_85.writerows(VAR_82)
return VAR_47.getvalue()
if VAR_8.interface.encrypt:
VAR_47 = io.StringIO()
if not VAR_62:
with open(VAR_60, "rb") as csvfile:
VAR_99 = csvfile.read()
VAR_100 = encryptor.decrypt(
VAR_8.interface.encryption_key, VAR_99)
VAR_63 = VAR_100.decode()
if VAR_24 is not None:
VAR_63 = FUNC_30(VAR_63)
VAR_47.write(VAR_63)
VAR_85 = csv.writer(VAR_47)
if VAR_24 is None:
if VAR_62:
VAR_85.writerow(VAR_94)
VAR_85.writerow(VAR_81)
with open(VAR_60, "wb") as csvfile:
csvfile.write(encryptor.encrypt(
VAR_8.interface.encryption_key, VAR_47.getvalue().encode()))
else:
if VAR_24 is None:
with open(VAR_60, "a", newline="") as csvfile:
VAR_85 = csv.writer(csvfile)
if VAR_62:
VAR_85.writerow(VAR_94)
VAR_85.writerow(VAR_81)
else:
with open(VAR_60) as csvfile:
VAR_63 = csvfile.read()
VAR_63 = FUNC_30(VAR_63)
with open(VAR_60, "w", newline="") as csvfile: # newline parameter needed for Windows
csvfile.write(VAR_63)
with open(VAR_60, "r") as csvfile:
VAR_86 = len([None for row in csv.reader(csvfile)]) - 1
return VAR_86
@VAR_8.route("/api/FUNC_15/", methods=["POST"])
@FUNC_1
def FUNC_15():
FUNC_13('flag')
VAR_64 = request.json['data']
FUNC_14(VAR_64['input_data'], VAR_64['output_data'], VAR_64.get("flag_option"), VAR_64.get("flag_index"),
current_user.id if current_user.is_authenticated else None)
return jsonify(success=True)
@VAR_8.route("/api/FUNC_16/", methods=["POST"])
@FUNC_1
def FUNC_16():
FUNC_13('interpret')
VAR_45 = request.json["data"]
VAR_65, VAR_66 = VAR_8.interface.interpret(
VAR_45)
return jsonify({
"interpretation_scores": VAR_65,
"alternative_outputs": VAR_66
})
@VAR_8.route("/FUNC_17/<VAR_17:path>", methods=["GET"])
@FUNC_1
def FUNC_17(VAR_17):
path = secure_filename(VAR_17)
if VAR_8.interface.encrypt and isinstance(VAR_8.interface.examples, str) and VAR_17.startswith(VAR_8.interface.examples):
with open(os.path.join(VAR_8.cwd, VAR_17), "rb") as encrypted_file:
VAR_95 = encrypted_file.read()
VAR_87 = encryptor.decrypt(
VAR_8.interface.encryption_key, VAR_95)
return send_file(io.BytesIO(VAR_87), attachment_filename=os.path.basename(VAR_17))
else:
return send_file(os.path.join(VAR_8.cwd, VAR_17))
@VAR_8.route("/api/queue/push/", methods=["POST"])
@FUNC_1
def FUNC_18():
VAR_64 = request.json["data"]
VAR_67 = request.json["action"]
VAR_68, VAR_69 = queue.push({"data": VAR_64}, VAR_67)
return {"hash": VAR_68, "queue_position": VAR_69}
@VAR_8.route("/api/queue/VAR_71/", methods=["POST"])
@FUNC_1
def FUNC_19():
VAR_70 = request.json['hash']
VAR_71, VAR_64 = queue.get_status(VAR_70)
return {"status": VAR_71, "data": VAR_64}
def FUNC_20(VAR_27, VAR_28=False):
while True:
try:
VAR_96 = queue.pop()
if VAR_96 is not None:
VAR_101, VAR_70, VAR_21, VAR_102 = VAR_96
queue.start_job(VAR_70)
VAR_76 = requests.post(
VAR_27 + "/api/" + VAR_102 + "/", json=VAR_21)
if VAR_76.status_code == 200:
queue.pass_job(VAR_70, VAR_76.json())
else:
queue.fail_job(VAR_70, VAR_76.text)
else:
time.sleep(1)
except Exception as e:
time.sleep(1)
pass
if VAR_28:
break
def FUNC_21(VAR_29, VAR_30, VAR_31=None, VAR_32=None, VAR_33=None):
if VAR_31 is None:
VAR_31 = VAR_0
VAR_72 = FUNC_3(
VAR_31, VAR_31 + VAR_1
)
VAR_27 = "http://{}:{}/".format(VAR_30, VAR_72)
if VAR_32 is not None:
if not callable(VAR_32):
VAR_8.auth = {account[0]: account[1] for account in VAR_32}
else:
VAR_8.auth = VAR_32
else:
VAR_8.auth = None
VAR_8.interface = VAR_29
VAR_8.cwd = os.getcwd()
VAR_73 = logging.getLogger('werkzeug')
VAR_73.setLevel(logging.ERROR)
if VAR_8.interface.enable_queue:
if VAR_32 is not None or VAR_8.interface.encrypt:
raise ValueError("Cannot queue with encryption or authentication enabled.")
queue.init()
VAR_8.queue_thread = threading.Thread(target=FUNC_20, VAR_41=(VAR_27,))
VAR_8.queue_thread.start()
if VAR_29.save_to is not None:
VAR_29.save_to["port"] = VAR_72
VAR_74 = {"port": VAR_72, "host": VAR_30}
if VAR_33:
VAR_74["ssl_context"] = VAR_33
VAR_75 = threading.Thread(target=VAR_8.run,
VAR_42=VAR_74,
daemon=True)
VAR_75.start()
return VAR_72, VAR_27, VAR_8, VAR_75
def FUNC_22():
return VAR_43.get("state")
def FUNC_23(VAR_34):
VAR_43["state"] = VAR_34
def FUNC_24(VAR_35):
process.terminate()
VAR_35.join()
def FUNC_25(VAR_36):
try:
VAR_88 = urllib.request.Request(
VAR_36=url, VAR_94={"content-type": "application/json"}
)
VAR_89 = urllib.request.urlopen(VAR_88, timeout=10)
return VAR_89
except Exception as e:
raise RuntimeError(str(e))
def FUNC_26(VAR_37, VAR_38):
VAR_76 = FUNC_25(
VAR_38 + '/v1/tunnel-request' if VAR_38 is not None else VAR_3)
if VAR_76 and VAR_76.code == 200:
try:
VAR_97 = json.loads(VAR_76.read().decode("utf-8"))[0]
return create_tunnel(VAR_97, VAR_2, VAR_37)
except Exception as e:
raise RuntimeError(str(e))
def FUNC_27(VAR_36):
try:
for VAR_101 in range(5):
time.sleep(.500)
VAR_98 = requests.head(VAR_36, timeout=3)
if VAR_98.status_code in (200, 401, 302): # 401 or 302 if VAR_32 is set
return True
except (ConnectionError, requests.exceptions.ConnectionError):
return False
|
import os
import .socket
import .threading
from flask import Flask, request, VAR_43, jsonify, abort, send_file, render_template, redirect
from flask_cachebuster import CacheBuster
from flask_login import LoginManager, login_user, current_user, login_required
from flask_cors import CORS
import .threading
import pkg_resources
import datetime
import time
import json
import .urllib.request
from shutil import copyfile
import .requests
import .sys
import csv
import .logging
from gradio.tunneling import create_tunnel
from gradio import encryptor
from gradio import queue
from functools import wraps
import io
import inspect
import traceback
from werkzeug.security import .safe_join
VAR_0 = int(os.getenv(
'GRADIO_SERVER_PORT', "7860")) # The http server will try to open on VAR_72 7860. If not available, 7861, 7862, etc.
VAR_1 = int(os.getenv(
'GRADIO_NUM_PORTS', "100")) # Number of ports to try before giving up and throwing an exception.
VAR_2 = os.getenv(
'GRADIO_SERVER_NAME', "127.0.0.1")
VAR_3 = "https://api.gradio.app/v1/tunnel-request"
VAR_4 = "https://api.gradio.app/gradio-VAR_20-analytics/"
VAR_5 = pkg_resources.resource_filename("gradio", "templates/")
VAR_6 = pkg_resources.resource_filename("gradio", "templates/frontend/static")
VAR_7 = pkg_resources.resource_filename("gradio", "version.txt")
with open(VAR_7) as version_file:
VAR_39 = "https://gradio.s3-us-west-2.amazonaws.com/" + \
version_file.read().strip() + "/static/"
VAR_8 = Flask(__name__,
template_folder=VAR_5,
static_folder="",
static_url_path="/none/")
VAR_8.url_map.strict_slashes = False
CORS(VAR_8)
VAR_9 = CacheBuster(
VAR_78={'extensions': ['.js', '.css'], 'hash_size': 5})
VAR_9.init_app(VAR_8)
VAR_8.secret_key = os.getenv("GRADIO_KEY", "secret")
VAR_10 = LoginManager()
VAR_10.login_view = 'login'
VAR_10.init_app(VAR_8)
VAR_11 = sys.modules['flask.cli']
VAR_11.show_server_banner = lambda *x: None
class CLASS_0:
def __init__(self, VAR_40):
self.is_authenticated = True
self.is_active = True
self.is_anonymous = False
self.id = VAR_40
def FUNC_28(self):
return self.id
@VAR_10.user_loader
def FUNC_0(VAR_12):
return CLASS_0(VAR_12)
def FUNC_1(VAR_13):
@wraps(VAR_13)
def FUNC_29(*VAR_41, **VAR_42):
if VAR_8.auth:
@login_required
def FUNC_31(*VAR_41, **VAR_42):
return VAR_13(*VAR_41, **VAR_42)
return FUNC_31(*VAR_41, **VAR_42)
else:
return VAR_13(*VAR_41, **VAR_42)
return FUNC_29
def FUNC_2():
try:
VAR_77 = requests.get('https://api.ipify.org', timeout=3).text
except (requests.ConnectionError, requests.exceptions.ReadTimeout):
VAR_77 = "No internet connection"
return VAR_77
VAR_14 = FUNC_2()
def FUNC_3(VAR_15, VAR_16):
for VAR_72 in range(VAR_15, VAR_16):
try:
VAR_90 = socket.socket() # create a socket object
VAR_90.bind((VAR_2, VAR_72)) # Bind to the VAR_72
VAR_90.close()
return VAR_72
except OSError:
pass
raise OSError(
"All ports from {} to {} are in use. Please close a VAR_72.".format(
VAR_15, VAR_16
)
)
@VAR_8.route("/", methods=["GET"])
@FUNC_1
def FUNC_4():
VAR_43["state"] = None
return render_template("frontend/index.html", VAR_78=VAR_8.interface.config)
@VAR_8.route("/static/<VAR_17:path>", methods=["GET"])
def FUNC_5(VAR_17):
if VAR_8.interface.share:
return redirect(VAR_39 + VAR_17)
else:
return send_file(safe_join(VAR_6, VAR_17))
@VAR_8.route('/login', methods=["GET", "POST"])
def FUNC_6():
if request.method == "GET":
VAR_78 = FUNC_7()
return render_template("frontend/index.html", VAR_78=config)
elif request.method == "POST":
VAR_25 = request.form.get("username")
VAR_91 = request.form.get("password")
if ((not callable(VAR_8.auth) and VAR_25 in VAR_8.auth and VAR_8.auth[VAR_25] == VAR_91)
or (callable(VAR_8.auth) and VAR_8.auth.__call__(VAR_25, VAR_91))):
login_user(CLASS_0(VAR_25))
return redirect("/")
else:
return abort(401)
@VAR_8.route("/VAR_78/", methods=["GET"])
def FUNC_7():
if VAR_8.interface.auth is None or current_user.is_authenticated:
return jsonify(VAR_8.interface.config)
else:
return {"auth_required": True, "auth_message": VAR_8.interface.auth_message}
@VAR_8.route("/FUNC_8/<VAR_17:path>", methods=["GET"])
@FUNC_1
def FUNC_8(VAR_17):
if VAR_17 == "None":
VAR_17 = None
VAR_8.interface.config["share_url"] = VAR_17
return jsonify(success=True)
@VAR_8.route("/shutdown", methods=['GET'])
def FUNC_9():
VAR_44 = request.environ.get('werkzeug.server.shutdown')
if VAR_44 is None:
raise RuntimeError('Not running werkzeug')
VAR_44()
return "Shutting down..."
@VAR_8.route("/api/FUNC_10/", methods=["POST"])
@FUNC_1
def FUNC_10():
VAR_45 = request.json["data"]
if VAR_8.interface.show_error:
try:
VAR_79, VAR_80 = VAR_8.interface.process(VAR_45)
except BaseException as error:
traceback.print_exc()
return jsonify({"error": str(error)}), 500
else:
VAR_79, VAR_80 = VAR_8.interface.process(VAR_45)
VAR_46 = []
for i, duration in enumerate(VAR_80):
VAR_8.interface.predict_durations[i][0] += duration
VAR_8.interface.predict_durations[i][1] += 1
VAR_46.append(VAR_8.interface.predict_durations[i][0]
/ VAR_8.interface.predict_durations[i][1])
VAR_8.interface.config["avg_durations"] = VAR_46
VAR_47 = {"data": VAR_79, "durations": VAR_80, "avg_durations": VAR_46}
if VAR_8.interface.allow_flagging == "auto":
try:
VAR_24 = FUNC_14(VAR_45, VAR_79,
VAR_23=(None if VAR_8.interface.flagging_options is None else ""),
VAR_25=current_user.id if current_user.is_authenticated else None)
VAR_47["flag_index"] = VAR_24
except Exception as e:
print(str(e))
pass
return jsonify(VAR_47)
def FUNC_11(VAR_18, VAR_19):
VAR_48 = []
VAR_49 = []
if VAR_19 == "input":
for cls in VAR_18:
VAR_92 = inspect.getdoc(cls.preprocess)
VAR_93 = VAR_92.split("\n")
VAR_48.append(VAR_93[1].split(":")[-1])
VAR_49.append(VAR_93[1].split(")")[0].split("(")[-1])
else:
for cls in VAR_18:
VAR_92 = inspect.getdoc(cls.postprocess)
VAR_93 = VAR_92.split("\n")
VAR_48.append(VAR_93[-1].split(":")[-1])
VAR_49.append(VAR_93[-1].split(")")[0].split("(")[-1])
return VAR_48, VAR_49
@VAR_8.route("/api/", methods=["GET"])
def FUNC_12():
VAR_50 = [type(inp) for inp in VAR_8.interface.input_components]
VAR_51 = [type(out) for out in VAR_8.interface.output_components]
VAR_52, VAR_53 = FUNC_11(VAR_50, "input")
VAR_54, VAR_55 = FUNC_11(VAR_51, "output")
VAR_56 = [type(inp).__name__ for inp in VAR_8.interface.input_components]
VAR_57 = [type(out).__name__ for out in VAR_8.interface.output_components]
VAR_58 = [inp.generate_sample() for inp in VAR_8.interface.input_components]
VAR_59 = {
"inputs": VAR_56,
"outputs": VAR_57,
"len_inputs": len(VAR_50),
"len_outputs": len(VAR_51),
"inputs_lower": [name.lower() for name in VAR_56],
"outputs_lower": [name.lower() for name in VAR_57],
"input_types": VAR_53,
"output_types": VAR_55,
"input_types_doc": VAR_52,
"output_types_doc": VAR_54,
"sample_inputs": VAR_58
}
return render_template("api_docs.html", **VAR_59)
def FUNC_13(VAR_20):
if VAR_8.interface.analytics_enabled:
try:
requests.post(VAR_4,
VAR_64={
'ip_address': VAR_14,
'feature': VAR_20}, timeout=3)
except (requests.ConnectionError, requests.exceptions.ReadTimeout):
pass # do not push analytics if no network
def FUNC_14(VAR_21, VAR_22, VAR_23=None, VAR_24=None, VAR_25=None, VAR_26=None):
if VAR_26 is None:
VAR_26 = os.path.join(VAR_8.cwd, VAR_8.interface.flagging_dir)
VAR_60 = "{}/VAR_73.csv".format(VAR_26)
VAR_61 = VAR_8.interface.encryption_key if VAR_8.interface.encrypt else None
VAR_62 = not os.path.exists(VAR_60)
if VAR_24 is None:
VAR_81 = []
for i, VAR_29 in enumerate(VAR_8.interface.input_components):
VAR_81.append(VAR_29.save_flagged(
VAR_26, VAR_8.interface.config["input_components"][i]["label"], VAR_21[i], VAR_61))
for i, VAR_29 in enumerate(VAR_8.interface.output_components):
VAR_81.append(VAR_29.save_flagged(
VAR_26, VAR_8.interface.config["output_components"][i]["label"], VAR_22[i], VAR_61) if VAR_22[i] is not None else "")
if VAR_23 is not None:
VAR_81.append(VAR_23)
if VAR_25 is not None:
VAR_81.append(VAR_25)
VAR_81.append(str(datetime.datetime.now()))
if VAR_62:
VAR_94 = [VAR_29["label"]
for VAR_29 in VAR_8.interface.config["input_components"]]
VAR_94 += [VAR_29["label"]
for VAR_29 in VAR_8.interface.config["output_components"]]
if VAR_8.interface.flagging_options is not None:
VAR_94.append("flag")
if VAR_25 is not None:
VAR_94.append("username")
VAR_94.append("timestamp")
def FUNC_30(VAR_63):
file_content = io.StringIO(VAR_63)
VAR_82 = list(csv.reader(VAR_63))
VAR_83 = VAR_82[0]
VAR_84 = VAR_83.index("flag")
VAR_82[VAR_24][VAR_84] = VAR_23
VAR_47 = io.StringIO()
VAR_85 = csv.writer(VAR_47)
VAR_85.writerows(VAR_82)
return VAR_47.getvalue()
if VAR_8.interface.encrypt:
VAR_47 = io.StringIO()
if not VAR_62:
with open(VAR_60, "rb") as csvfile:
VAR_99 = csvfile.read()
VAR_100 = encryptor.decrypt(
VAR_8.interface.encryption_key, VAR_99)
VAR_63 = VAR_100.decode()
if VAR_24 is not None:
VAR_63 = FUNC_30(VAR_63)
VAR_47.write(VAR_63)
VAR_85 = csv.writer(VAR_47)
if VAR_24 is None:
if VAR_62:
VAR_85.writerow(VAR_94)
VAR_85.writerow(VAR_81)
with open(VAR_60, "wb") as csvfile:
csvfile.write(encryptor.encrypt(
VAR_8.interface.encryption_key, VAR_47.getvalue().encode()))
else:
if VAR_24 is None:
with open(VAR_60, "a", newline="") as csvfile:
VAR_85 = csv.writer(csvfile)
if VAR_62:
VAR_85.writerow(VAR_94)
VAR_85.writerow(VAR_81)
else:
with open(VAR_60) as csvfile:
VAR_63 = csvfile.read()
VAR_63 = FUNC_30(VAR_63)
with open(VAR_60, "w", newline="") as csvfile: # newline parameter needed for Windows
csvfile.write(VAR_63)
with open(VAR_60, "r") as csvfile:
VAR_86 = len([None for row in csv.reader(csvfile)]) - 1
return VAR_86
@VAR_8.route("/api/FUNC_15/", methods=["POST"])
@FUNC_1
def FUNC_15():
FUNC_13('flag')
VAR_64 = request.json['data']
FUNC_14(VAR_64['input_data'], VAR_64['output_data'], VAR_64.get("flag_option"), VAR_64.get("flag_index"),
current_user.id if current_user.is_authenticated else None)
return jsonify(success=True)
@VAR_8.route("/api/FUNC_16/", methods=["POST"])
@FUNC_1
def FUNC_16():
FUNC_13('interpret')
VAR_45 = request.json["data"]
VAR_65, VAR_66 = VAR_8.interface.interpret(
VAR_45)
return jsonify({
"interpretation_scores": VAR_65,
"alternative_outputs": VAR_66
})
@VAR_8.route("/FUNC_17/<VAR_17:path>", methods=["GET"])
@FUNC_1
def FUNC_17(VAR_17):
if VAR_8.interface.encrypt and isinstance(VAR_8.interface.examples, str) and VAR_17.startswith(VAR_8.interface.examples):
with open(safe_join(VAR_8.cwd, VAR_17), "rb") as encrypted_file:
VAR_95 = encrypted_file.read()
VAR_87 = encryptor.decrypt(
VAR_8.interface.encryption_key, VAR_95)
return send_file(io.BytesIO(VAR_87), attachment_filename=os.path.basename(VAR_17))
else:
return send_file(safe_join(VAR_8.cwd, VAR_17))
@VAR_8.route("/api/queue/push/", methods=["POST"])
@FUNC_1
def FUNC_18():
VAR_64 = request.json["data"]
VAR_67 = request.json["action"]
VAR_68, VAR_69 = queue.push({"data": VAR_64}, VAR_67)
return {"hash": VAR_68, "queue_position": VAR_69}
@VAR_8.route("/api/queue/VAR_71/", methods=["POST"])
@FUNC_1
def FUNC_19():
VAR_70 = request.json['hash']
VAR_71, VAR_64 = queue.get_status(VAR_70)
return {"status": VAR_71, "data": VAR_64}
def FUNC_20(VAR_27, VAR_28=False):
while True:
try:
VAR_96 = queue.pop()
if VAR_96 is not None:
VAR_101, VAR_70, VAR_21, VAR_102 = VAR_96
queue.start_job(VAR_70)
VAR_76 = requests.post(
VAR_27 + "/api/" + VAR_102 + "/", json=VAR_21)
if VAR_76.status_code == 200:
queue.pass_job(VAR_70, VAR_76.json())
else:
queue.fail_job(VAR_70, VAR_76.text)
else:
time.sleep(1)
except Exception as e:
time.sleep(1)
pass
if VAR_28:
break
def FUNC_21(VAR_29, VAR_30, VAR_31=None, VAR_32=None, VAR_33=None):
if VAR_31 is None:
VAR_31 = VAR_0
VAR_72 = FUNC_3(
VAR_31, VAR_31 + VAR_1
)
VAR_27 = "http://{}:{}/".format(VAR_30, VAR_72)
if VAR_32 is not None:
if not callable(VAR_32):
VAR_8.auth = {account[0]: account[1] for account in VAR_32}
else:
VAR_8.auth = VAR_32
else:
VAR_8.auth = None
VAR_8.interface = VAR_29
VAR_8.cwd = os.getcwd()
VAR_73 = logging.getLogger('werkzeug')
VAR_73.setLevel(logging.ERROR)
if VAR_8.interface.enable_queue:
if VAR_32 is not None or VAR_8.interface.encrypt:
raise ValueError("Cannot queue with encryption or authentication enabled.")
queue.init()
VAR_8.queue_thread = threading.Thread(target=FUNC_20, VAR_41=(VAR_27,))
VAR_8.queue_thread.start()
if VAR_29.save_to is not None:
VAR_29.save_to["port"] = VAR_72
VAR_74 = {"port": VAR_72, "host": VAR_30}
if VAR_33:
VAR_74["ssl_context"] = VAR_33
VAR_75 = threading.Thread(target=VAR_8.run,
VAR_42=VAR_74,
daemon=True)
VAR_75.start()
return VAR_72, VAR_27, VAR_8, VAR_75
def FUNC_22():
return VAR_43.get("state")
def FUNC_23(VAR_34):
VAR_43["state"] = VAR_34
def FUNC_24(VAR_35):
process.terminate()
VAR_35.join()
def FUNC_25(VAR_36):
try:
VAR_88 = urllib.request.Request(
VAR_36=url, VAR_94={"content-type": "application/json"}
)
VAR_89 = urllib.request.urlopen(VAR_88, timeout=10)
return VAR_89
except Exception as e:
raise RuntimeError(str(e))
def FUNC_26(VAR_37, VAR_38):
VAR_76 = FUNC_25(
VAR_38 + '/v1/tunnel-request' if VAR_38 is not None else VAR_3)
if VAR_76 and VAR_76.code == 200:
try:
VAR_97 = json.loads(VAR_76.read().decode("utf-8"))[0]
return create_tunnel(VAR_97, VAR_2, VAR_37)
except Exception as e:
raise RuntimeError(str(e))
def FUNC_27(VAR_36):
try:
for VAR_101 in range(5):
time.sleep(.500)
VAR_98 = requests.head(VAR_36, timeout=3)
if VAR_98.status_code in (200, 401, 302): # 401 or 302 if VAR_32 is set
return True
except (ConnectionError, requests.exceptions.ConnectionError):
return False
| [
4,
31,
40,
47,
53,
62,
63,
66,
67,
74,
77,
78,
82,
83,
91,
96,
97,
104,
105,
107,
108,
129,
130,
136,
137,
144,
145,
146,
161,
162,
169,
170,
178,
179,
187,
188,
193,
220,
221,
238,
239,
263,
264,
274,
275,
282,
306,
317,
353,
362,
363,
375,
376,
389,
390,
398,
399,
406,
407,
428,
429,
463,
465,
468,
471,
475,
476,
486,
487,
495,
498,
499,
509,
1,
2,
3,
110,
111,
112,
113,
114,
115
] | [
4,
31,
40,
47,
53,
62,
63,
66,
67,
74,
77,
78,
82,
83,
91,
96,
97,
104,
105,
107,
108,
129,
130,
136,
137,
144,
145,
146,
161,
162,
169,
170,
178,
179,
187,
188,
193,
220,
221,
238,
239,
263,
264,
274,
275,
282,
306,
317,
353,
362,
363,
375,
376,
388,
389,
397,
398,
405,
406,
427,
428,
462,
464,
467,
470,
474,
475,
485,
486,
494,
497,
498,
508,
1,
2,
3,
110,
111,
112,
113,
114,
115
] |
2CWE-89
| from pypika import Parameter, functions
from pypika.enums import SqlTypes
from pypika.terms import Criterion
from tortoise import Model
from tortoise.backends.base.executor import BaseExecutor
from tortoise.fields import BigIntField, Field, IntField, SmallIntField
from tortoise.filters import (
contains,
ends_with,
insensitive_contains,
insensitive_ends_with,
insensitive_exact,
insensitive_starts_with,
starts_with,
)
def mysql_contains(field: Field, value: str) -> Criterion:
return functions.Cast(field, SqlTypes.CHAR).like(f"%{value}%")
def mysql_starts_with(field: Field, value: str) -> Criterion:
return functions.Cast(field, SqlTypes.CHAR).like(f"{value}%")
def mysql_ends_with(field: Field, value: str) -> Criterion:
return functions.Cast(field, SqlTypes.CHAR).like(f"%{value}")
def mysql_insensitive_exact(field: Field, value: str) -> Criterion:
return functions.Upper(functions.Cast(field, SqlTypes.CHAR)).eq(functions.Upper(f"{value}"))
def mysql_insensitive_contains(field: Field, value: str) -> Criterion:
return functions.Upper(functions.Cast(field, SqlTypes.CHAR)).like(functions.Upper(f"%{value}%"))
def mysql_insensitive_starts_with(field: Field, value: str) -> Criterion:
return functions.Upper(functions.Cast(field, SqlTypes.CHAR)).like(functions.Upper(f"{value}%"))
def mysql_insensitive_ends_with(field: Field, value: str) -> Criterion:
return functions.Upper(functions.Cast(field, SqlTypes.CHAR)).like(functions.Upper(f"%{value}"))
class MySQLExecutor(BaseExecutor):
FILTER_FUNC_OVERRIDE = {
contains: mysql_contains,
starts_with: mysql_starts_with,
ends_with: mysql_ends_with,
insensitive_exact: mysql_insensitive_exact,
insensitive_contains: mysql_insensitive_contains,
insensitive_starts_with: mysql_insensitive_starts_with,
insensitive_ends_with: mysql_insensitive_ends_with,
}
EXPLAIN_PREFIX = "EXPLAIN FORMAT=JSON"
def parameter(self, pos: int) -> Parameter:
return Parameter("%s")
async def _process_insert_result(self, instance: Model, results: int) -> None:
pk_field_object = self.model._meta.pk
if (
isinstance(pk_field_object, (SmallIntField, IntField, BigIntField))
and pk_field_object.generated
):
instance.pk = results
# MySQL can only generate a single ROWID
# so if any other primary key, it won't generate what we want.
| from pypika import Parameter, functions
from pypika.enums import SqlTypes
from pypika.terms import Criterion
from tortoise import Model
from tortoise.backends.base.executor import BaseExecutor
from tortoise.fields import BigIntField, IntField, SmallIntField
from tortoise.filters import (
Like,
Term,
ValueWrapper,
contains,
ends_with,
format_quotes,
insensitive_contains,
insensitive_ends_with,
insensitive_exact,
insensitive_starts_with,
starts_with,
)
class StrWrapper(ValueWrapper): # type: ignore
"""
Naive str wrapper that doesn't use the monkey-patched pypika ValueWraper for MySQL
"""
def get_value_sql(self, **kwargs):
quote_char = kwargs.get("secondary_quote_char") or ""
value = self.value.replace(quote_char, quote_char * 2)
return format_quotes(value, quote_char)
def escape_like(val: str) -> str:
return val.replace("\\", "\\\\\\\\").replace("%", "\\%").replace("_", "\\_")
def mysql_contains(field: Term, value: str) -> Criterion:
return Like(
functions.Cast(field, SqlTypes.CHAR), StrWrapper(f"%{escape_like(value)}%"), escape=""
)
def mysql_starts_with(field: Term, value: str) -> Criterion:
return Like(
functions.Cast(field, SqlTypes.CHAR), StrWrapper(f"{escape_like(value)}%"), escape=""
)
def mysql_ends_with(field: Term, value: str) -> Criterion:
return Like(
functions.Cast(field, SqlTypes.CHAR), StrWrapper(f"%{escape_like(value)}"), escape=""
)
def mysql_insensitive_exact(field: Term, value: str) -> Criterion:
return functions.Upper(functions.Cast(field, SqlTypes.CHAR)).eq(functions.Upper(str(value)))
def mysql_insensitive_contains(field: Term, value: str) -> Criterion:
return Like(
functions.Upper(functions.Cast(field, SqlTypes.CHAR)),
functions.Upper(StrWrapper(f"%{escape_like(value)}%")),
escape="",
)
def mysql_insensitive_starts_with(field: Term, value: str) -> Criterion:
return Like(
functions.Upper(functions.Cast(field, SqlTypes.CHAR)),
functions.Upper(StrWrapper(f"{escape_like(value)}%")),
escape="",
)
def mysql_insensitive_ends_with(field: Term, value: str) -> Criterion:
return Like(
functions.Upper(functions.Cast(field, SqlTypes.CHAR)),
functions.Upper(StrWrapper(f"%{escape_like(value)}")),
escape="",
)
class MySQLExecutor(BaseExecutor):
FILTER_FUNC_OVERRIDE = {
contains: mysql_contains,
starts_with: mysql_starts_with,
ends_with: mysql_ends_with,
insensitive_exact: mysql_insensitive_exact,
insensitive_contains: mysql_insensitive_contains,
insensitive_starts_with: mysql_insensitive_starts_with,
insensitive_ends_with: mysql_insensitive_ends_with,
}
EXPLAIN_PREFIX = "EXPLAIN FORMAT=JSON"
def parameter(self, pos: int) -> Parameter:
return Parameter("%s")
async def _process_insert_result(self, instance: Model, results: int) -> None:
pk_field_object = self.model._meta.pk
if (
isinstance(pk_field_object, (SmallIntField, IntField, BigIntField))
and pk_field_object.generated
):
instance.pk = results
# MySQL can only generate a single ROWID
# so if any other primary key, it won't generate what we want.
| sql | {
"code": [
"from tortoise.fields import BigIntField, Field, IntField, SmallIntField",
"def mysql_contains(field: Field, value: str) -> Criterion:",
" return functions.Cast(field, SqlTypes.CHAR).like(f\"%{value}%\")",
"def mysql_starts_with(field: Field, value: str) -> Criterion:",
" return functions.Cast(field, SqlTypes.CHAR).like(f\"{value}%\")",
"def mysql_ends_with(field: Field, value: str) -> Criterion:",
" return functions.Cast(field, SqlTypes.CHAR).like(f\"%{value}\")",
"def mysql_insensitive_exact(field: Field, value: str) -> Criterion:",
" return functions.Upper(functions.Cast(field, SqlTypes.CHAR)).eq(functions.Upper(f\"{value}\"))",
"def mysql_insensitive_contains(field: Field, value: str) -> Criterion:",
" return functions.Upper(functions.Cast(field, SqlTypes.CHAR)).like(functions.Upper(f\"%{value}%\"))",
"def mysql_insensitive_starts_with(field: Field, value: str) -> Criterion:",
" return functions.Upper(functions.Cast(field, SqlTypes.CHAR)).like(functions.Upper(f\"{value}%\"))",
"def mysql_insensitive_ends_with(field: Field, value: str) -> Criterion:",
" return functions.Upper(functions.Cast(field, SqlTypes.CHAR)).like(functions.Upper(f\"%{value}\"))"
],
"line_no": [
7,
19,
20,
23,
24,
27,
28,
31,
32,
35,
36,
39,
40,
43,
44
]
} | {
"code": [
"from tortoise.fields import BigIntField, IntField, SmallIntField",
" Like,",
" Term,",
" ValueWrapper,",
" format_quotes,",
" \"\"\"",
" \"\"\"",
" def get_value_sql(self, **kwargs):",
" value = self.value.replace(quote_char, quote_char * 2)",
" return format_quotes(value, quote_char)",
" return val.replace(\"\\\\\", \"\\\\\\\\\\\\\\\\\").replace(\"%\", \"\\\\%\").replace(\"_\", \"\\\\_\")",
" return Like(",
" functions.Cast(field, SqlTypes.CHAR), StrWrapper(f\"%{escape_like(value)}%\"), escape=\"\"",
"def mysql_starts_with(field: Term, value: str) -> Criterion:",
" functions.Cast(field, SqlTypes.CHAR), StrWrapper(f\"{escape_like(value)}%\"), escape=\"\"",
" )",
"def mysql_ends_with(field: Term, value: str) -> Criterion:",
" return Like(",
" functions.Cast(field, SqlTypes.CHAR), StrWrapper(f\"%{escape_like(value)}\"), escape=\"\"",
" )",
"def mysql_insensitive_exact(field: Term, value: str) -> Criterion:",
" return functions.Upper(functions.Cast(field, SqlTypes.CHAR)).eq(functions.Upper(str(value)))",
"def mysql_insensitive_contains(field: Term, value: str) -> Criterion:",
" functions.Upper(functions.Cast(field, SqlTypes.CHAR)),",
" functions.Upper(StrWrapper(f\"%{escape_like(value)}%\")),",
" escape=\"\",",
" )",
"def mysql_insensitive_starts_with(field: Term, value: str) -> Criterion:",
" functions.Upper(functions.Cast(field, SqlTypes.CHAR)),",
" escape=\"\",",
" )",
"def mysql_insensitive_ends_with(field: Term, value: str) -> Criterion:",
" return Like(",
" functions.Upper(functions.Cast(field, SqlTypes.CHAR)),",
" functions.Upper(StrWrapper(f\"%{escape_like(value)}\")),",
" escape=\"\",",
" )"
],
"line_no": [
7,
9,
10,
11,
14,
24,
26,
28,
30,
31,
35,
39,
40,
44,
46,
47,
50,
51,
52,
53,
56,
57,
60,
62,
63,
64,
65,
68,
70,
72,
73,
76,
77,
78,
79,
80,
81
]
} | from pypika import Parameter, functions
from pypika.enums import SqlTypes
from pypika.terms import Criterion
from tortoise import Model
from tortoise.backends.base.executor import BaseExecutor
from tortoise.fields import BigIntField, Field, IntField, SmallIntField
from tortoise.filters import (
contains,
ends_with,
insensitive_contains,
insensitive_ends_with,
insensitive_exact,
insensitive_starts_with,
starts_with,
)
def FUNC_0(VAR_0: Field, VAR_1: str) -> Criterion:
return functions.Cast(VAR_0, SqlTypes.CHAR).like(f"%{VAR_1}%")
def FUNC_1(VAR_0: Field, VAR_1: str) -> Criterion:
return functions.Cast(VAR_0, SqlTypes.CHAR).like(f"{VAR_1}%")
def FUNC_2(VAR_0: Field, VAR_1: str) -> Criterion:
return functions.Cast(VAR_0, SqlTypes.CHAR).like(f"%{VAR_1}")
def FUNC_3(VAR_0: Field, VAR_1: str) -> Criterion:
return functions.Upper(functions.Cast(VAR_0, SqlTypes.CHAR)).eq(functions.Upper(f"{VAR_1}"))
def FUNC_4(VAR_0: Field, VAR_1: str) -> Criterion:
return functions.Upper(functions.Cast(VAR_0, SqlTypes.CHAR)).like(functions.Upper(f"%{VAR_1}%"))
def FUNC_5(VAR_0: Field, VAR_1: str) -> Criterion:
return functions.Upper(functions.Cast(VAR_0, SqlTypes.CHAR)).like(functions.Upper(f"{VAR_1}%"))
def FUNC_6(VAR_0: Field, VAR_1: str) -> Criterion:
return functions.Upper(functions.Cast(VAR_0, SqlTypes.CHAR)).like(functions.Upper(f"%{VAR_1}"))
class CLASS_0(BaseExecutor):
VAR_2 = {
contains: FUNC_0,
starts_with: FUNC_1,
ends_with: FUNC_2,
insensitive_exact: FUNC_3,
insensitive_contains: FUNC_4,
insensitive_starts_with: FUNC_5,
insensitive_ends_with: FUNC_6,
}
VAR_3 = "EXPLAIN FORMAT=JSON"
def FUNC_7(self, VAR_4: int) -> Parameter:
return Parameter("%s")
async def FUNC_8(self, VAR_5: Model, VAR_6: int) -> None:
VAR_7 = self.model._meta.pk
if (
isinstance(VAR_7, (SmallIntField, IntField, BigIntField))
and VAR_7.generated
):
VAR_5.pk = VAR_6
| from pypika import Parameter, functions
from pypika.enums import SqlTypes
from pypika.terms import Criterion
from tortoise import Model
from tortoise.backends.base.executor import BaseExecutor
from tortoise.fields import BigIntField, IntField, SmallIntField
from tortoise.filters import (
Like,
Term,
ValueWrapper,
contains,
ends_with,
format_quotes,
insensitive_contains,
insensitive_ends_with,
insensitive_exact,
insensitive_starts_with,
starts_with,
)
class CLASS_0(ValueWrapper): # type: ignore
def FUNC_8(self, **VAR_3):
VAR_9 = VAR_3.get("secondary_quote_char") or ""
VAR_2 = self.value.replace(VAR_9, quote_char * 2)
return format_quotes(VAR_2, VAR_9)
def FUNC_0(VAR_0: str) -> str:
return VAR_0.replace("\\", "\\\\\\\\").replace("%", "\\%").replace("_", "\\_")
def FUNC_1(VAR_1: Term, VAR_2: str) -> Criterion:
return Like(
functions.Cast(VAR_1, SqlTypes.CHAR), CLASS_0(f"%{FUNC_0(VAR_2)}%"), escape=""
)
def FUNC_2(VAR_1: Term, VAR_2: str) -> Criterion:
return Like(
functions.Cast(VAR_1, SqlTypes.CHAR), CLASS_0(f"{FUNC_0(VAR_2)}%"), escape=""
)
def FUNC_3(VAR_1: Term, VAR_2: str) -> Criterion:
return Like(
functions.Cast(VAR_1, SqlTypes.CHAR), CLASS_0(f"%{FUNC_0(VAR_2)}"), escape=""
)
def FUNC_4(VAR_1: Term, VAR_2: str) -> Criterion:
return functions.Upper(functions.Cast(VAR_1, SqlTypes.CHAR)).eq(functions.Upper(str(VAR_2)))
def FUNC_5(VAR_1: Term, VAR_2: str) -> Criterion:
return Like(
functions.Upper(functions.Cast(VAR_1, SqlTypes.CHAR)),
functions.Upper(CLASS_0(f"%{FUNC_0(VAR_2)}%")),
escape="",
)
def FUNC_6(VAR_1: Term, VAR_2: str) -> Criterion:
return Like(
functions.Upper(functions.Cast(VAR_1, SqlTypes.CHAR)),
functions.Upper(CLASS_0(f"{FUNC_0(VAR_2)}%")),
escape="",
)
def FUNC_7(VAR_1: Term, VAR_2: str) -> Criterion:
return Like(
functions.Upper(functions.Cast(VAR_1, SqlTypes.CHAR)),
functions.Upper(CLASS_0(f"%{FUNC_0(VAR_2)}")),
escape="",
)
class CLASS_1(BaseExecutor):
VAR_4 = {
contains: FUNC_1,
starts_with: FUNC_2,
ends_with: FUNC_3,
insensitive_exact: FUNC_4,
insensitive_contains: FUNC_5,
insensitive_starts_with: FUNC_6,
insensitive_ends_with: FUNC_7,
}
VAR_5 = "EXPLAIN FORMAT=JSON"
def FUNC_9(self, VAR_6: int) -> Parameter:
return Parameter("%s")
async def FUNC_10(self, VAR_7: Model, VAR_8: int) -> None:
VAR_10 = self.model._meta.pk
if (
isinstance(VAR_10, (SmallIntField, IntField, BigIntField))
and VAR_10.generated
):
VAR_7.pk = VAR_8
| [
4,
17,
18,
21,
22,
25,
26,
29,
30,
33,
34,
37,
38,
41,
42,
45,
46,
58,
61,
69,
70,
71,
72
] | [
4,
21,
22,
27,
32,
33,
36,
37,
42,
43,
48,
49,
54,
55,
58,
59,
66,
67,
74,
75,
82,
83,
95,
98,
106,
107,
108,
109,
24,
25,
26
] |
1CWE-79
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (C) 2019 tribe29 GmbH - License: GNU General Public License v2
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
import pytest # type: ignore[import]
import cmk.gui.htmllib as htmllib
from cmk.gui import escaping
def test_htmllib_integration(register_builtin_html):
assert escaping.escape_attribute("") == ""
assert escaping.escape_text("") == ""
@pytest.mark.parametrize("inp,out", [
("\">alert(1)", "">alert(1)"),
(None, ""),
(1, "1"),
(htmllib.HTML("\">alert(1)"), "\">alert(1)"),
(1.1, "1.1"),
("<", "<"),
("'", "'"),
])
def test_escape_attribute(inp, out):
assert escaping.escape_attribute(inp) == out
@pytest.mark.parametrize("inp,out", [
("">alert(1)", "\">alert(1)"),
("<", "<"),
])
def test_unescape_attribute(inp, out):
assert escaping.unescape_attributes(inp) == out
@pytest.mark.parametrize(
"inp,out",
[
("<script>alert(1)</script>", "<script>alert(1)</script>"),
("<h1>abc</h1>", None),
("<h2>abc</h2>", None),
("<b>abc</b>", None),
("<tt>abc</tt>", None),
("<i>abc</i>", None),
("<u>abc</u>", None),
("<br>", None),
("<nobr></nobr>", None),
("<pre></pre>", None),
("<sup></sup>", None),
("<p></p>", None),
("<li></li>", None),
("<ul></ul>", None),
("<ol></ol>", None),
("<a href=\"xyz\">abc</a>", None),
("<a href=\"xyz\" target=\"123\">abc</a>", None),
# Links with target 1st and href 2nd will not be unescaped
("<a target=\"123\" href=\"xyz\">abc</a>",
"<a target="123" href="xyz">abc</a>"),
("blah<a href=\"link0\">aaa</a>blah<a href=\"link1\" target=\"ttt\">bbb</a>", None),
("\"I am not a link\" target=\"still not a link\"",
""I am not a link" target="still not a link""),
# The next test is perverse: it contains the string `target=` inside of an
# <a> tag (which must be unescaped) as well as outside (which must not).
("<a href=\"aaa\">bbb</a>\"not a link\" target=\"really\"<a href=\"ccc\" target=\"ttt\">ddd</a>",
"<a href=\"aaa\">bbb</a>"not a link" target="really"<a href=\"ccc\" target=\"ttt\">ddd</a>"
),
(
"<a href=\"xyz\">abc</a><script>alert(1)</script><a href=\"xyz\">abc</a>",
"<a href=\"xyz\">abc</a><script>alert(1)</script><a href=\"xyz\">abc</a>",
),
(" ", None),
# At the moment also javascript URLs are accepted. This will be refused in the next step
("<a href=\"javascript:alert(1)\">abc</a>", None),
])
def test_escape_text(inp, out):
if out is None:
out = inp
assert escaping.escape_text(inp) == out
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (C) 2019 tribe29 GmbH - License: GNU General Public License v2
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
import pytest # type: ignore[import]
import cmk.gui.htmllib as htmllib
from cmk.gui import escaping
def test_htmllib_integration(register_builtin_html):
assert escaping.escape_attribute("") == ""
assert escaping.escape_text("") == ""
@pytest.mark.parametrize("inp,out", [
("\">alert(1)", "">alert(1)"),
(None, ""),
(1, "1"),
(htmllib.HTML("\">alert(1)"), "\">alert(1)"),
(1.1, "1.1"),
("<", "<"),
("'", "'"),
])
def test_escape_attribute(inp, out):
assert escaping.escape_attribute(inp) == out
@pytest.mark.parametrize("inp,out", [
("">alert(1)", "\">alert(1)"),
("<", "<"),
])
def test_unescape_attribute(inp, out):
assert escaping.unescape_attributes(inp) == out
@pytest.mark.parametrize(
"inp,out",
[
("<script>alert(1)</script>", "<script>alert(1)</script>"),
("<h1>abc</h1>", None),
("<h2>abc</h2>", None),
("<b>abc</b>", None),
("<tt>abc</tt>", None),
("<i>abc</i>", None),
("<u>abc</u>", None),
("<br>", None),
("<nobr></nobr>", None),
("<pre></pre>", None),
("<sup></sup>", None),
("<p></p>", None),
("<li></li>", None),
("<ul></ul>", None),
("<ol></ol>", None),
("<a href=\"xyz\">abc</a>", None),
("<a href=\"xyz\" target=\"123\">abc</a>", None),
# Links with target 1st and href 2nd will not be unescaped
("<a target=\"123\" href=\"xyz\">abc</a>",
"<a target="123" href="xyz">abc</a>"),
("blah<a href=\"link0\">aaa</a>blah<a href=\"link1\" target=\"ttt\">bbb</a>", None),
("\"I am not a link\" target=\"still not a link\"",
""I am not a link" target="still not a link""),
# The next test is perverse: it contains the string `target=` inside of an
# <a> tag (which must be unescaped) as well as outside (which must not).
("<a href=\"aaa\">bbb</a>\"not a link\" target=\"really\"<a href=\"ccc\" target=\"ttt\">ddd</a>",
"<a href=\"aaa\">bbb</a>"not a link" target="really"<a href=\"ccc\" target=\"ttt\">ddd</a>"
),
(
"<a href=\"xyz\">abc</a><script>alert(1)</script><a href=\"xyz\">abc</a>",
"<a href=\"xyz\">abc</a><script>alert(1)</script><a href=\"xyz\">abc</a>",
),
(" ", None),
# Only http/https are allowed as schemes
("<a href=\"http://checkmk.com/\">abc</a>", None),
("<a href=\"https://checkmk.com/\">abc</a>", None),
("<a href=\"HTTP://CHECKMK.COM/\">abc</a>", None),
("<a href=\"ftp://checkmk.com/\">abc</a>",
"<a href="ftp://checkmk.com/">abc</a>"),
("<a href=\"javascript:alert(1)\">abc</a>",
"<a href="javascript:alert(1)">abc</a>"),
])
def test_escape_text(inp, out):
if out is None:
out = inp
assert escaping.escape_text(inp) == out
| xss | {
"code": [
" (\"<a href=\\\"javascript:alert(1)\\\">abc</a>\", None),"
],
"line_no": [
76
]
} | {
"code": [
" (\"<a href=\\\"http://checkmk.com/\\\">abc</a>\", None),",
" (\"<a href=\\\"https://checkmk.com/\\\">abc</a>\", None),",
" (\"<a href=\\\"HTTP://CHECKMK.COM/\\\">abc</a>\", None),",
" (\"<a href=\\\"ftp://checkmk.com/\\\">abc</a>\",",
" \"<a href="ftp://checkmk.com/">abc</a>\"),",
" (\"<a href=\\\"javascript:alert(1)\\\">abc</a>\","
],
"line_no": [
76,
77,
78,
79,
80,
81
]
} |
import pytest # type: ignore[import]
import cmk.gui.htmllib as htmllib
from cmk.gui import escaping
def FUNC_0(VAR_0):
assert escaping.escape_attribute("") == ""
assert escaping.escape_text("") == ""
@pytest.mark.parametrize("inp,out", [
("\">alert(1)", "">alert(1)"),
(None, ""),
(1, "1"),
(htmllib.HTML("\">alert(1)"), "\">alert(1)"),
(1.1, "1.1"),
("<", "<"),
("'", "'"),
])
def FUNC_1(VAR_1, VAR_2):
assert escaping.escape_attribute(VAR_1) == VAR_2
@pytest.mark.parametrize("inp,out", [
("">alert(1)", "\">alert(1)"),
("<", "<"),
])
def FUNC_2(VAR_1, VAR_2):
assert escaping.unescape_attributes(VAR_1) == VAR_2
@pytest.mark.parametrize(
"inp,out",
[
("<script>alert(1)</script>", "<script>alert(1)</script>"),
("<h1>abc</h1>", None),
("<h2>abc</h2>", None),
("<b>abc</b>", None),
("<tt>abc</tt>", None),
("<i>abc</i>", None),
("<u>abc</u>", None),
("<br>", None),
("<nobr></nobr>", None),
("<pre></pre>", None),
("<sup></sup>", None),
("<p></p>", None),
("<li></li>", None),
("<ul></ul>", None),
("<ol></ol>", None),
("<a href=\"xyz\">abc</a>", None),
("<a href=\"xyz\" target=\"123\">abc</a>", None),
("<a target=\"123\" href=\"xyz\">abc</a>",
"<a target="123" href="xyz">abc</a>"),
("blah<a href=\"link0\">aaa</a>blah<a href=\"link1\" target=\"ttt\">bbb</a>", None),
("\"I am not a link\" target=\"still not a link\"",
""I am not a link" target="still not a link""),
("<a href=\"aaa\">bbb</a>\"not a link\" target=\"really\"<a href=\"ccc\" target=\"ttt\">ddd</a>",
"<a href=\"aaa\">bbb</a>"not a link" target="really"<a href=\"ccc\" target=\"ttt\">ddd</a>"
),
(
"<a href=\"xyz\">abc</a><script>alert(1)</script><a href=\"xyz\">abc</a>",
"<a href=\"xyz\">abc</a><script>alert(1)</script><a href=\"xyz\">abc</a>",
),
(" ", None),
("<a href=\"javascript:alert(1)\">abc</a>", None),
])
def FUNC_3(VAR_1, VAR_2):
if VAR_2 is None:
VAR_2 = VAR_1
assert escaping.escape_text(VAR_1) == VAR_2
|
import pytest # type: ignore[import]
import cmk.gui.htmllib as htmllib
from cmk.gui import escaping
def FUNC_0(VAR_0):
assert escaping.escape_attribute("") == ""
assert escaping.escape_text("") == ""
@pytest.mark.parametrize("inp,out", [
("\">alert(1)", "">alert(1)"),
(None, ""),
(1, "1"),
(htmllib.HTML("\">alert(1)"), "\">alert(1)"),
(1.1, "1.1"),
("<", "<"),
("'", "'"),
])
def FUNC_1(VAR_1, VAR_2):
assert escaping.escape_attribute(VAR_1) == VAR_2
@pytest.mark.parametrize("inp,out", [
("">alert(1)", "\">alert(1)"),
("<", "<"),
])
def FUNC_2(VAR_1, VAR_2):
assert escaping.unescape_attributes(VAR_1) == VAR_2
@pytest.mark.parametrize(
"inp,out",
[
("<script>alert(1)</script>", "<script>alert(1)</script>"),
("<h1>abc</h1>", None),
("<h2>abc</h2>", None),
("<b>abc</b>", None),
("<tt>abc</tt>", None),
("<i>abc</i>", None),
("<u>abc</u>", None),
("<br>", None),
("<nobr></nobr>", None),
("<pre></pre>", None),
("<sup></sup>", None),
("<p></p>", None),
("<li></li>", None),
("<ul></ul>", None),
("<ol></ol>", None),
("<a href=\"xyz\">abc</a>", None),
("<a href=\"xyz\" target=\"123\">abc</a>", None),
("<a target=\"123\" href=\"xyz\">abc</a>",
"<a target="123" href="xyz">abc</a>"),
("blah<a href=\"link0\">aaa</a>blah<a href=\"link1\" target=\"ttt\">bbb</a>", None),
("\"I am not a link\" target=\"still not a link\"",
""I am not a link" target="still not a link""),
("<a href=\"aaa\">bbb</a>\"not a link\" target=\"really\"<a href=\"ccc\" target=\"ttt\">ddd</a>",
"<a href=\"aaa\">bbb</a>"not a link" target="really"<a href=\"ccc\" target=\"ttt\">ddd</a>"
),
(
"<a href=\"xyz\">abc</a><script>alert(1)</script><a href=\"xyz\">abc</a>",
"<a href=\"xyz\">abc</a><script>alert(1)</script><a href=\"xyz\">abc</a>",
),
(" ", None),
("<a href=\"http://checkmk.com/\">abc</a>", None),
("<a href=\"https://checkmk.com/\">abc</a>", None),
("<a href=\"HTTP://CHECKMK.COM/\">abc</a>", None),
("<a href=\"ftp://checkmk.com/\">abc</a>",
"<a href="ftp://checkmk.com/">abc</a>"),
("<a href=\"javascript:alert(1)\">abc</a>",
"<a href="javascript:alert(1)">abc</a>"),
])
def FUNC_3(VAR_1, VAR_2):
if VAR_2 is None:
VAR_2 = VAR_1
assert escaping.escape_text(VAR_1) == VAR_2
| [
1,
2,
3,
4,
5,
6,
8,
11,
12,
16,
17,
29,
30,
37,
38,
59,
65,
66,
75,
82
] | [
1,
2,
3,
4,
5,
6,
8,
11,
12,
16,
17,
29,
30,
37,
38,
59,
65,
66,
75,
88
] |
0CWE-22
| # pylint: disable=R1732
import io
import os
from ..InputSource import InputSource
from ..messages import *
from .main import scriptPath
from .status import splitStatus
class DataFileRequester:
def __init__(self, type=None, fallback=None):
self.type = type
if self.type not in ("readonly", "latest"):
raise Exception(f"Bad value for DataFileRequester.type, got '{type}'.")
# fallback is another requester, used if the main one fails.
self.fallback = fallback
def fetch(self, *segs, **kwargs):
str = kwargs.get("str", False)
okayToFail = kwargs.get("okayToFail", False)
fileType = kwargs.get("type", self.type)
location = self._buildPath(segs=segs, fileType=fileType)
try:
if str:
with open(location, encoding="utf-8") as fh:
return fh.read()
else:
return open(location, encoding="utf-8")
except OSError:
if self.fallback:
try:
return self.fallback.fetch(*segs, str=str, okayToFail=okayToFail)
except OSError:
return self._fail(location, str, okayToFail)
return self._fail(location, str, okayToFail)
def walkFiles(self, *segs, **kwargs):
fileType = kwargs.get("type", self.type)
for _, _, files in os.walk(self._buildPath(segs, fileType=fileType)):
yield from files
def _buildPath(self, segs, fileType=None):
if fileType is None:
fileType = self.type
if fileType == "readonly":
return scriptPath("spec-data", "readonly", *segs)
else:
return scriptPath("spec-data", *segs)
def _fail(self, location, str, okayToFail):
if okayToFail:
if str:
return ""
else:
return io.StringIO("")
raise OSError(f"Couldn't find file '{location}'")
defaultRequester = DataFileRequester(
type="latest", fallback=DataFileRequester(type="readonly")
)
def retrieveBoilerplateFile(doc, name, group=None, status=None, error=True):
# Looks in three or four locations, in order:
# the folder the spec source is in, the group's boilerplate folder, the megagroup's boilerplate folder, and the generic boilerplate folder.
# In each location, it first looks for the file specialized on status, and then for the generic file.
# Filenames must be of the format NAME.include or NAME-STATUS.include
if group is None and doc.md.group is not None:
group = doc.md.group.lower()
if status is None:
if doc.md.status is not None:
status = doc.md.status
elif doc.md.rawStatus is not None:
status = doc.md.rawStatus
megaGroup, status = splitStatus(status)
searchLocally = doc.md.localBoilerplate[name]
def boilerplatePath(*segs):
return scriptPath("boilerplate", *segs)
statusFile = f"{name}-{status}.include"
genericFile = f"{name}.include"
sources = []
if searchLocally:
sources.append(doc.inputSource.relative(statusFile)) # Can be None.
sources.append(doc.inputSource.relative(genericFile))
else:
for f in (statusFile, genericFile):
if doc.inputSource.cheaplyExists(f):
warn(
(
"Found {0} next to the specification without a matching\n"
+ "Local Boilerplate: {1} yes\n"
+ "in the metadata. This include won't be found when building via a URL."
).format(f, name)
)
# We should remove this after giving specs time to react to the warning:
sources.append(doc.inputSource.relative(f))
if group:
sources.append(InputSource(boilerplatePath(group, statusFile)))
sources.append(InputSource(boilerplatePath(group, genericFile)))
if megaGroup:
sources.append(InputSource(boilerplatePath(megaGroup, statusFile)))
sources.append(InputSource(boilerplatePath(megaGroup, genericFile)))
sources.append(InputSource(boilerplatePath(statusFile)))
sources.append(InputSource(boilerplatePath(genericFile)))
# Watch all the possible sources, not just the one that got used, because if
# an earlier one appears, we want to rebuild.
doc.recordDependencies(*sources)
for source in sources:
if source is not None:
try:
return source.read().content
except OSError:
# That input doesn't exist.
pass
else:
if error:
die(
"Couldn't find an appropriate include file for the {0} inclusion, given group='{1}' and status='{2}'.",
name,
group,
status,
)
return ""
| # pylint: disable=R1732
import io
import os
from ..InputSource import InputSource
from ..messages import *
from .main import scriptPath
from .status import splitStatus
class DataFileRequester:
def __init__(self, type=None, fallback=None):
self.type = type
if self.type not in ("readonly", "latest"):
raise Exception(f"Bad value for DataFileRequester.type, got '{type}'.")
# fallback is another requester, used if the main one fails.
self.fallback = fallback
def fetch(self, *segs, **kwargs):
str = kwargs.get("str", False)
okayToFail = kwargs.get("okayToFail", False)
fileType = kwargs.get("type", self.type)
location = self._buildPath(segs=segs, fileType=fileType)
try:
if str:
with open(location, encoding="utf-8") as fh:
return fh.read()
else:
return open(location, encoding="utf-8")
except OSError:
if self.fallback:
try:
return self.fallback.fetch(*segs, str=str, okayToFail=okayToFail)
except OSError:
return self._fail(location, str, okayToFail)
return self._fail(location, str, okayToFail)
def walkFiles(self, *segs, **kwargs):
fileType = kwargs.get("type", self.type)
for _, _, files in os.walk(self._buildPath(segs, fileType=fileType)):
yield from files
def _buildPath(self, segs, fileType=None):
if fileType is None:
fileType = self.type
if fileType == "readonly":
return scriptPath("spec-data", "readonly", *segs)
else:
return scriptPath("spec-data", *segs)
def _fail(self, location, str, okayToFail):
if okayToFail:
if str:
return ""
else:
return io.StringIO("")
raise OSError(f"Couldn't find file '{location}'")
defaultRequester = DataFileRequester(
type="latest", fallback=DataFileRequester(type="readonly")
)
def retrieveBoilerplateFile(doc, name, group=None, status=None, error=True, allowLocal=True):
# Looks in three or four locations, in order:
# the folder the spec source is in, the group's boilerplate folder, the megagroup's boilerplate folder, and the generic boilerplate folder.
# In each location, it first looks for the file specialized on status, and then for the generic file.
# Filenames must be of the format NAME.include or NAME-STATUS.include
if group is None and doc.md.group is not None:
group = doc.md.group.lower()
if status is None:
if doc.md.status is not None:
status = doc.md.status
elif doc.md.rawStatus is not None:
status = doc.md.rawStatus
megaGroup, status = splitStatus(status)
searchLocally = allowLocal and doc.md.localBoilerplate[name]
def boilerplatePath(*segs):
return scriptPath("boilerplate", *segs)
statusFile = f"{name}-{status}.include"
genericFile = f"{name}.include"
sources = []
if searchLocally:
sources.append(doc.inputSource.relative(statusFile)) # Can be None.
sources.append(doc.inputSource.relative(genericFile))
else:
for f in (statusFile, genericFile):
if doc.inputSource.cheaplyExists(f):
warn(
(
"Found {0} next to the specification without a matching\n"
+ "Local Boilerplate: {1} yes\n"
+ "in the metadata. This include won't be found when building via a URL."
).format(f, name)
)
# We should remove this after giving specs time to react to the warning:
sources.append(doc.inputSource.relative(f))
if group:
sources.append(InputSource(boilerplatePath(group, statusFile), chroot=False))
sources.append(InputSource(boilerplatePath(group, genericFile), chroot=False))
if megaGroup:
sources.append(InputSource(boilerplatePath(megaGroup, statusFile), chroot=False))
sources.append(InputSource(boilerplatePath(megaGroup, genericFile), chroot=False))
sources.append(InputSource(boilerplatePath(statusFile), chroot=False))
sources.append(InputSource(boilerplatePath(genericFile), chroot=False))
# Watch all the possible sources, not just the one that got used, because if
# an earlier one appears, we want to rebuild.
doc.recordDependencies(*sources)
for source in sources:
if source is not None:
try:
return source.read().content
except OSError:
# That input doesn't exist.
pass
else:
if error:
die(
"Couldn't find an appropriate include file for the {0} inclusion, given group='{1}' and status='{2}'.",
name,
group,
status,
)
return ""
| path_disclosure | {
"code": [
"def retrieveBoilerplateFile(doc, name, group=None, status=None, error=True):",
" searchLocally = doc.md.localBoilerplate[name]",
" sources.append(InputSource(boilerplatePath(group, statusFile)))",
" sources.append(InputSource(boilerplatePath(group, genericFile)))",
" sources.append(InputSource(boilerplatePath(megaGroup, statusFile)))",
" sources.append(InputSource(boilerplatePath(megaGroup, genericFile)))",
" sources.append(InputSource(boilerplatePath(statusFile)))",
" sources.append(InputSource(boilerplatePath(genericFile)))"
],
"line_no": [
66,
80,
104,
105,
107,
108,
109,
110
]
} | {
"code": [
"def retrieveBoilerplateFile(doc, name, group=None, status=None, error=True, allowLocal=True):",
" searchLocally = allowLocal and doc.md.localBoilerplate[name]",
" sources.append(InputSource(boilerplatePath(group, statusFile), chroot=False))",
" sources.append(InputSource(boilerplatePath(group, genericFile), chroot=False))",
" sources.append(InputSource(boilerplatePath(megaGroup, statusFile), chroot=False))",
" sources.append(InputSource(boilerplatePath(megaGroup, genericFile), chroot=False))",
" sources.append(InputSource(boilerplatePath(statusFile), chroot=False))",
" sources.append(InputSource(boilerplatePath(genericFile), chroot=False))"
],
"line_no": [
66,
80,
104,
105,
107,
108,
109,
110
]
} |
import io
import os
from ..InputSource import InputSource
from ..messages import *
from .main import scriptPath
from .status import splitStatus
class CLASS_0:
def __init__(self, VAR_6=None, VAR_7=None):
self.type = VAR_6
if self.type not in ("readonly", "latest"):
raise Exception(f"Bad value for CLASS_0.type, got '{VAR_6}'.")
self.fallback = VAR_7
def FUNC_1(self, *VAR_8, **VAR_9):
VAR_12 = VAR_9.get("str", False)
VAR_13 = VAR_9.get("okayToFail", False)
VAR_10 = VAR_9.get("type", self.type)
VAR_11 = self._buildPath(VAR_8=segs, VAR_10=fileType)
try:
if VAR_12:
with open(VAR_11, encoding="utf-8") as fh:
return fh.read()
else:
return open(VAR_11, encoding="utf-8")
except OSError:
if self.fallback:
try:
return self.fallback.fetch(*VAR_8, VAR_12=str, VAR_13=okayToFail)
except OSError:
return self._fail(VAR_11, VAR_12, VAR_13)
return self._fail(VAR_11, VAR_12, VAR_13)
def FUNC_2(self, *VAR_8, **VAR_9):
VAR_10 = VAR_9.get("type", self.type)
for _, _, files in os.walk(self._buildPath(VAR_8, VAR_10=fileType)):
yield from files
def FUNC_3(self, VAR_8, VAR_10=None):
if VAR_10 is None:
VAR_10 = self.type
if VAR_10 == "readonly":
return scriptPath("spec-data", "readonly", *VAR_8)
else:
return scriptPath("spec-data", *VAR_8)
def FUNC_4(self, VAR_11, VAR_12, VAR_13):
if VAR_13:
if VAR_12:
return ""
else:
return io.StringIO("")
raise OSError(f"Couldn't find file '{VAR_11}'")
VAR_0 = CLASS_0(
VAR_6="latest", VAR_7=CLASS_0(VAR_6="readonly")
)
def FUNC_0(VAR_1, VAR_2, VAR_3=None, VAR_4=None, VAR_5=True):
if VAR_3 is None and VAR_1.md.group is not None:
VAR_3 = VAR_1.md.group.lower()
if VAR_4 is None:
if VAR_1.md.status is not None:
VAR_4 = VAR_1.md.status
elif VAR_1.md.rawStatus is not None:
VAR_4 = VAR_1.md.rawStatus
VAR_14, VAR_4 = splitStatus(VAR_4)
VAR_15 = VAR_1.md.localBoilerplate[VAR_2]
def FUNC_5(*VAR_8):
return scriptPath("boilerplate", *VAR_8)
VAR_16 = f"{VAR_2}-{VAR_4}.include"
VAR_17 = f"{VAR_2}.include"
VAR_18 = []
if VAR_15:
VAR_18.append(VAR_1.inputSource.relative(VAR_16)) # Can be None.
VAR_18.append(VAR_1.inputSource.relative(VAR_17))
else:
for f in (VAR_16, VAR_17):
if VAR_1.inputSource.cheaplyExists(f):
warn(
(
"Found {0} next to the specification without a matching\n"
+ "Local Boilerplate: {1} yes\n"
+ "in the metadata. This include won't be found when building via a URL."
).format(f, VAR_2)
)
VAR_18.append(VAR_1.inputSource.relative(f))
if VAR_3:
VAR_18.append(InputSource(FUNC_5(VAR_3, VAR_16)))
VAR_18.append(InputSource(FUNC_5(VAR_3, VAR_17)))
if VAR_14:
VAR_18.append(InputSource(FUNC_5(VAR_14, VAR_16)))
VAR_18.append(InputSource(FUNC_5(VAR_14, VAR_17)))
VAR_18.append(InputSource(FUNC_5(VAR_16)))
VAR_18.append(InputSource(FUNC_5(VAR_17)))
VAR_1.recordDependencies(*VAR_18)
for source in VAR_18:
if source is not None:
try:
return source.read().content
except OSError:
pass
else:
if VAR_5:
die(
"Couldn't find an appropriate include file for the {0} inclusion, given VAR_3='{1}' and VAR_4='{2}'.",
VAR_2,
VAR_3,
VAR_4,
)
return ""
|
import io
import os
from ..InputSource import InputSource
from ..messages import *
from .main import scriptPath
from .status import splitStatus
class CLASS_0:
def __init__(self, VAR_7=None, VAR_8=None):
self.type = VAR_7
if self.type not in ("readonly", "latest"):
raise Exception(f"Bad value for CLASS_0.type, got '{VAR_7}'.")
self.fallback = VAR_8
def FUNC_1(self, *VAR_9, **VAR_10):
VAR_13 = VAR_10.get("str", False)
VAR_14 = VAR_10.get("okayToFail", False)
VAR_11 = VAR_10.get("type", self.type)
VAR_12 = self._buildPath(VAR_9=segs, VAR_11=fileType)
try:
if VAR_13:
with open(VAR_12, encoding="utf-8") as fh:
return fh.read()
else:
return open(VAR_12, encoding="utf-8")
except OSError:
if self.fallback:
try:
return self.fallback.fetch(*VAR_9, VAR_13=str, VAR_14=okayToFail)
except OSError:
return self._fail(VAR_12, VAR_13, VAR_14)
return self._fail(VAR_12, VAR_13, VAR_14)
def FUNC_2(self, *VAR_9, **VAR_10):
VAR_11 = VAR_10.get("type", self.type)
for _, _, files in os.walk(self._buildPath(VAR_9, VAR_11=fileType)):
yield from files
def FUNC_3(self, VAR_9, VAR_11=None):
if VAR_11 is None:
VAR_11 = self.type
if VAR_11 == "readonly":
return scriptPath("spec-data", "readonly", *VAR_9)
else:
return scriptPath("spec-data", *VAR_9)
def FUNC_4(self, VAR_12, VAR_13, VAR_14):
if VAR_14:
if VAR_13:
return ""
else:
return io.StringIO("")
raise OSError(f"Couldn't find file '{VAR_12}'")
VAR_0 = CLASS_0(
VAR_7="latest", VAR_8=CLASS_0(VAR_7="readonly")
)
def FUNC_0(VAR_1, VAR_2, VAR_3=None, VAR_4=None, VAR_5=True, VAR_6=True):
if VAR_3 is None and VAR_1.md.group is not None:
VAR_3 = VAR_1.md.group.lower()
if VAR_4 is None:
if VAR_1.md.status is not None:
VAR_4 = VAR_1.md.status
elif VAR_1.md.rawStatus is not None:
VAR_4 = VAR_1.md.rawStatus
VAR_15, VAR_4 = splitStatus(VAR_4)
VAR_16 = VAR_6 and VAR_1.md.localBoilerplate[VAR_2]
def FUNC_5(*VAR_9):
return scriptPath("boilerplate", *VAR_9)
VAR_17 = f"{VAR_2}-{VAR_4}.include"
VAR_18 = f"{VAR_2}.include"
VAR_19 = []
if VAR_16:
VAR_19.append(VAR_1.inputSource.relative(VAR_17)) # Can be None.
VAR_19.append(VAR_1.inputSource.relative(VAR_18))
else:
for f in (VAR_17, VAR_18):
if VAR_1.inputSource.cheaplyExists(f):
warn(
(
"Found {0} next to the specification without a matching\n"
+ "Local Boilerplate: {1} yes\n"
+ "in the metadata. This include won't be found when building via a URL."
).format(f, VAR_2)
)
VAR_19.append(VAR_1.inputSource.relative(f))
if VAR_3:
VAR_19.append(InputSource(FUNC_5(VAR_3, VAR_17), chroot=False))
VAR_19.append(InputSource(FUNC_5(VAR_3, VAR_18), chroot=False))
if VAR_15:
VAR_19.append(InputSource(FUNC_5(VAR_15, VAR_17), chroot=False))
VAR_19.append(InputSource(FUNC_5(VAR_15, VAR_18), chroot=False))
VAR_19.append(InputSource(FUNC_5(VAR_17), chroot=False))
VAR_19.append(InputSource(FUNC_5(VAR_18), chroot=False))
VAR_1.recordDependencies(*VAR_19)
for source in VAR_19:
if source is not None:
try:
return source.read().content
except OSError:
pass
else:
if VAR_5:
die(
"Couldn't find an appropriate include file for the {0} inclusion, given VAR_3='{1}' and VAR_4='{2}'.",
VAR_2,
VAR_3,
VAR_4,
)
return ""
| [
1,
2,
5,
10,
11,
17,
19,
38,
43,
51,
59,
60,
64,
65,
67,
68,
69,
70,
79,
81,
84,
101,
111,
112,
113,
115,
121,
132
] | [
1,
2,
5,
10,
11,
17,
19,
38,
43,
51,
59,
60,
64,
65,
67,
68,
69,
70,
79,
81,
84,
101,
111,
112,
113,
115,
121,
132
] |