pile_js / BelaPlatform__Bela.jsonl
Hamhams's picture
commit files to HF hub
c7f4bd0
{"nwo":"BelaPlatform\/Bela","sha":"b6491178b859eb8977f494eb9ba4806920eea4b0","path":"scripts\/hvresources\/uploader.py","language":"python","identifier":"__zip_dir","parameters":"(in_dir, zip_path, file_filter=None)","argument_list":"","return_statement":"return zip_path","docstring":"Recursively zip an entire directory with an optional file filter","docstring_summary":"Recursively zip an entire directory with an optional file filter","docstring_tokens":["Recursively","zip","an","entire","directory","with","an","optional","file","filter"],"function":"def __zip_dir(in_dir, zip_path, file_filter=None):\n \"\"\" Recursively zip an entire directory with an optional file filter\n \"\"\"\n zf = zipfile.ZipFile(zip_path, mode=\"w\", compression=zipfile.ZIP_DEFLATED)\n for subdir, dirs, files in os.walk(in_dir):\n for f in files:\n if (file_filter is None) or (f.lower().split(\".\")[-1] in file_filter):\n zf.write(\n filename=os.path.join(subdir,f),\n arcname=os.path.relpath(os.path.join(subdir,f), start=in_dir))\n return zip_path","function_tokens":["def","__zip_dir","(","in_dir",",","zip_path",",","file_filter","=","None",")",":","zf","=","zipfile",".","ZipFile","(","zip_path",",","mode","=","\"w\"",",","compression","=","zipfile",".","ZIP_DEFLATED",")","for","subdir",",","dirs",",","files","in","os",".","walk","(","in_dir",")",":","for","f","in","files",":","if","(","file_filter","is","None",")","or","(","f",".","lower","(",")",".","split","(","\".\"",")","[","-","1","]","in","file_filter",")",":","zf",".","write","(","filename","=","os",".","path",".","join","(","subdir",",","f",")",",","arcname","=","os",".","path",".","relpath","(","os",".","path",".","join","(","subdir",",","f",")",",","start","=","in_dir",")",")","return","zip_path"],"url":"https:\/\/github.com\/BelaPlatform\/Bela\/blob\/b6491178b859eb8977f494eb9ba4806920eea4b0\/scripts\/hvresources\/uploader.py#L92-L102"}
{"nwo":"BelaPlatform\/Bela","sha":"b6491178b859eb8977f494eb9ba4806920eea4b0","path":"scripts\/hvresources\/uploader.py","language":"python","identifier":"__unzip","parameters":"(zip_path, target_dir)","argument_list":"","return_statement":"","docstring":"Unzip a file to a given directory. All destination files are overwritten.","docstring_summary":"Unzip a file to a given directory. All destination files are overwritten.","docstring_tokens":["Unzip","a","file","to","a","given","directory",".","All","destination","files","are","overwritten","."],"function":"def __unzip(zip_path, target_dir):\n \"\"\" Unzip a file to a given directory. All destination files are overwritten.\n \"\"\"\n zipfile.ZipFile(zip_path).extractall(target_dir)","function_tokens":["def","__unzip","(","zip_path",",","target_dir",")",":","zipfile",".","ZipFile","(","zip_path",")",".","extractall","(","target_dir",")"],"url":"https:\/\/github.com\/BelaPlatform\/Bela\/blob\/b6491178b859eb8977f494eb9ba4806920eea4b0\/scripts\/hvresources\/uploader.py#L104-L107"}
{"nwo":"BelaPlatform\/Bela","sha":"b6491178b859eb8977f494eb9ba4806920eea4b0","path":"scripts\/hvresources\/uploader.py","language":"python","identifier":"__get_file_url_stub_for_generator","parameters":"(json_api, g)","argument_list":"","return_statement":"return None","docstring":"Returns the file link for a specific generator.\n Returns None if no link could be found.","docstring_summary":"Returns the file link for a specific generator.\n Returns None if no link could be found.","docstring_tokens":["Returns","the","file","link","for","a","specific","generator",".","Returns","None","if","no","link","could","be","found","."],"function":"def __get_file_url_stub_for_generator(json_api, g):\n \"\"\" Returns the file link for a specific generator.\n Returns None if no link could be found.\n \"\"\"\n for i in json_api[\"included\"]:\n if (i[\"type\"] == \"file\") and (g == i[\"data\"][\"buildId\"]):\n return i[\"links\"][\"self\"]\n return None","function_tokens":["def","__get_file_url_stub_for_generator","(","json_api",",","g",")",":","for","i","in","json_api","[","\"included\"","]",":","if","(","i","[","\"type\"","]","==","\"file\"",")","and","(","g","==","i","[","\"data\"","]","[","\"buildId\"","]",")",":","return","i","[","\"links\"","]","[","\"self\"","]","return","None"],"url":"https:\/\/github.com\/BelaPlatform\/Bela\/blob\/b6491178b859eb8977f494eb9ba4806920eea4b0\/scripts\/hvresources\/uploader.py#L109-L116"}
{"nwo":"BelaPlatform\/Bela","sha":"b6491178b859eb8977f494eb9ba4806920eea4b0","path":"scripts\/hvresources\/uploader.py","language":"python","identifier":"upload","parameters":"(input_dir, output_dirs=None, name=None, owner=None, generators=None, b=False, y=False, release=None, release_override=False, domain=None, verbose=False, token=None, clear_token=False, service_token=None, force_new_patch=False, archive_only=False)","argument_list":"","return_statement":"return exit_code, reply_json","docstring":"Upload a directory to the Heavy Cloud Service.\n\n Parameters\n ----------\n input_dir : str\n Directory containing _main.pd file.\n\n output_dirs : list, optional\n List of directories where the output should be placed. Usually the output directory list has only one element.\n If no argument is given, the input directory will be used.\n\n name : str, optional\n The name of the patch.\n If no argument is given, the name \"heavy\" is used.\n\n owner : str, optional\n The name of the owner of the patch. Usually this is an organisation.\n If no argument is given, the submitting user name is used.\n\n generators : list, optional\n A list of generators e.g. 'c', 'unity', or 'vst2-x86'\n\n b : bool, optional\n If True, puts the results of each generator into its own directory.\n False by default in which case all files are put into the same directory.\n\n y : bool, optional\n If True, extract only generated C files, static files are deleted. False by default.\n\n release : str, optional\n The name of the release to use for compiling.\n\n release_override : bool, optional\n Disable the validity check for a requested release. Forces sending a\n release request to the server.\n\n verbose : bool, optional\n False by default.\n\n token : str, optional\n The token used to identify the user to Heavy Cloud Service.\n By default the stored token will be used.\n\n clear_token : bool, optional\n Clears and ignores any existing stored tokens. Requests a new one from the command line.\n\n service_token : str, optional\n Pass an optional service token to be used instead of the default heavy_uploader.\n\n force_new_patch : bool, optional\n Indicate that a new patch should be created with the given name, if it does not yet exist.\n\n archive_only : bool, optional\n Only retrieve the archive from the server and place it in the destination folder without unzipping it.","docstring_summary":"Upload a directory to the Heavy Cloud Service.","docstring_tokens":["Upload","a","directory","to","the","Heavy","Cloud","Service","."],"function":"def upload(input_dir, output_dirs=None, name=None, owner=None, generators=None, b=False, y=False, release=None, release_override=False, domain=None, verbose=False, token=None, clear_token=False, service_token=None, force_new_patch=False, archive_only=False):\n \"\"\" Upload a directory to the Heavy Cloud Service.\n\n Parameters\n ----------\n input_dir : str\n Directory containing _main.pd file.\n\n output_dirs : list, optional\n List of directories where the output should be placed. Usually the output directory list has only one element.\n If no argument is given, the input directory will be used.\n\n name : str, optional\n The name of the patch.\n If no argument is given, the name \"heavy\" is used.\n\n owner : str, optional\n The name of the owner of the patch. Usually this is an organisation.\n If no argument is given, the submitting user name is used.\n\n generators : list, optional\n A list of generators e.g. 'c', 'unity', or 'vst2-x86'\n\n b : bool, optional\n If True, puts the results of each generator into its own directory.\n False by default in which case all files are put into the same directory.\n\n y : bool, optional\n If True, extract only generated C files, static files are deleted. False by default.\n\n release : str, optional\n The name of the release to use for compiling.\n\n release_override : bool, optional\n Disable the validity check for a requested release. Forces sending a\n release request to the server.\n\n verbose : bool, optional\n False by default.\n\n token : str, optional\n The token used to identify the user to Heavy Cloud Service.\n By default the stored token will be used.\n\n clear_token : bool, optional\n Clears and ignores any existing stored tokens. Requests a new one from the command line.\n\n service_token : str, optional\n Pass an optional service token to be used instead of the default heavy_uploader.\n\n force_new_patch : bool, optional\n Indicate that a new patch should be created with the given name, if it does not yet exist.\n\n archive_only : bool, optional\n Only retrieve the archive from the server and place it in the destination folder without unzipping it.\n \"\"\"\n # https:\/\/github.com\/numpy\/numpy\/blob\/master\/doc\/HOWTO_DOCUMENT.rst.txt\n\n try:\n # set default values\n name = name or \"heavy\"\n domain = domain or \"https:\/\/enzienaudio.com\"\n exit_code = ErrorCodes.CODE_OK\n reply_json = {}\n temp_dir = None\n post_data = {}\n\n # token should be stored in ~\/.heavy\/token\n token_path = os.path.expanduser(os.path.join(\"~\/\", \".heavy\", \"token\"))\n\n if token is None:\n if os.path.exists(token_path):\n if clear_token:\n os.remove(token_path)\n else:\n with open(token_path, \"r\") as f:\n token = f.read().strip()\n\n if token is None:\n print \"Please provide a user token from enzienaudio.com. \" \\\n \"Create or copy one from https:\/\/enzienaudio.com\/getmytokens\/.\"\n token = getpass.getpass(\"Enter user token: \")\n token = token.strip()\n\n # write token to file\n if not os.path.exists(os.path.dirname(token_path)):\n # ensure that the .heavy directory exists\n os.makedirs(os.path.dirname(token_path))\n with open(token_path, \"w\") as f:\n f.write(token)\n os.chmod(token_path, stat.S_IRUSR | stat.S_IWUSR) # force rw------- permissions on the file\n\n tick = time.time()\n\n # check the validity of the token\n try:\n # check the valifity of the token\n payload = json.loads(base64.urlsafe_b64decode(token.split(\".\")[1]))\n payload[\"startDate\"] = datetime.datetime.strptime(payload[\"startDate\"], \"%Y-%m-%dT%H:%M:%S.%f\")\n\n # ensure that the token is valid\n now = datetime.datetime.utcnow()\n assert payload[\"startDate\"] <= now\n\n if owner is None:\n # if an owner is not supplied, default to the user name in the token\n owner = payload[\"name\"]\n except Exception as e:\n print \"The user token is invalid. Generate a new one at https:\/\/enzienaudio.com\/getmytokens\/.\"\n os.remove(token_path)\n exit_code = ErrorCodes.CODE_INVALID_TOKEN\n raise e\n\n # if there is a user-supplied service token, do a basic validity check\n if service_token:\n try:\n # check the valifity of the token\n payload = json.loads(base64.urlsafe_b64decode(token.split(\".\")[1]))\n payload[\"startDate\"] = datetime.datetime.strptime(payload[\"startDate\"], \"%Y-%m-%dT%H:%M:%S.%f\")\n\n # ensure that the token is valid\n now = datetime.datetime.utcnow()\n assert payload[\"startDate\"] <= now\n\n assert \"service\" in payload, \"'service' field required in service token payload.\"\n except Exception as e:\n print \"The supplied service token is invalid. A default token will be used.\"\n service_token = __HV_UPLOADER_SERVICE_TOKEN\n else:\n service_token = __HV_UPLOADER_SERVICE_TOKEN\n\n # create the session to pool all requests\n s = requests.Session()\n\n # parse the optional release argument\n if release:\n if not release_override:\n # check the validity of the current release\n releases_json = s.get(urlparse.urljoin(domain, \"\/a\/releases\/\")).json()\n if release in releases_json:\n today = datetime.datetime.now()\n valid_until = datetime.datetime.strptime(releases_json[release][\"validUntil\"], \"%Y-%m-%d\")\n if today > valid_until:\n print \"{0}Warning:{1} The release \\\"{2}\\\" expired on {3}. It may be removed at any time!\".format(\n Colours.yellow, Colours.end,\n release,\n releases_json[release][\"validUntil\"])\n elif (valid_until - today) <= datetime.timedelta(weeks=4):\n print \"{0}Warning:{1} The release \\\"{2}\\\" will expire soon on {3}.\".format(\n Colours.yellow, Colours.end,\n release,\n releases_json[release][\"validUntil\"])\n else:\n print \"{0}Error:{1} The release \\\"{2}\\\" is not available. Available releases are:\".format(\n Colours.red, Colours.end,\n release)\n for k,v in releases_json.items():\n print \"* {0} ({1})\".format(\n k,\n v[\"releaseDate\"])\n raise UploaderException(ErrorCodes.CODE_RELEASE_NOT_AVAILABLE)\n\n post_data[\"release\"] = release\n\n # make a temporary directory\n temp_dir = tempfile.mkdtemp(prefix=\"lroyal-\")\n\n # zip up the pd directory into the temporary directory\n if not os.path.exists(os.path.join(input_dir, \"_main.pd\")):\n raise UploaderException(\n ErrorCodes.CODE_MAIN_NOT_FOUND,\n \"Root Pd directory does not contain a file named _main.pd.\")\n zip_path = __zip_dir(\n input_dir,\n os.path.join(temp_dir, \"archive.zip\"),\n file_filter={\"pd\"})\n if os.stat(zip_path).st_size > __HV_MAX_UPLOAD_SIZE:\n raise UploaderException(\n ErrorCodes.CODE_UPLOAD_ASSET_TOO_LARGE,\n \"The target directory, zipped, is {0} bytes. The maximum upload size of 1MB.\".format(\n os.stat(zip_path).st_size))\n\n # the outputs to generate\n generators = list({s.lower() for s in set(generators or [])} & __SUPPORTED_GENERATOR_SET)\n\n # check if the patch exists already. Ask to create it if it doesn't exist\n r = s.get(\n urlparse.urljoin(domain, \"\/a\/patches\/{0}\/{1}\/\".format(owner, name)),\n headers={\n \"Accept\": \"application\/json\",\n \"Authorization\": \"Bearer \" + token,\n \"X-Heavy-Service-Token\": service_token\n })\n r.raise_for_status()\n reply_json = r.json()\n if \"errors\" in reply_json:\n if reply_json[\"errors\"][0][\"status\"] == \"404\":\n # the patch does not exist\n if force_new_patch:\n create_new_patch = True\n else:\n create_new_patch = raw_input(\"A patch called \\\"{0}\\\" does not exist for owner \\\"{1}\\\". Create it? (y\/n):\".format(name, owner))\n create_new_patch = (create_new_patch == \"y\")\n if create_new_patch:\n r = s.post(\n urlparse.urljoin(domain, \"\/a\/patches\/\"),\n data={\"owner_name\":owner, \"name\":name, \"public\":\"true\"},\n headers={\n \"Accept\": \"application\/json\",\n \"Authorization\": \"Bearer \" + token,\n \"X-Heavy-Service-Token\": service_token\n })\n r.raise_for_status()\n reply_json = r.json()\n if \"errors\" in reply_json:\n raise UploaderException(\n ErrorCodes.CODE_NEW_PATCH_FAIL,\n reply_json[\"errors\"][0][\"detail\"])\n else:\n pass # no errors? everything is cool! Proceed.\n else:\n UploaderException(\n ErrorCodes.CODE_NEW_PATCH_FAIL,\n \"A patch called \\\"{0}\\\" does not exist for owner \\\"{1}\\\"\".format(owner, name))\n else:\n raise UploaderException(\n ErrorCodes.CODE_NEW_PATCH_FAIL,\n reply_json[\"errors\"][0][\"detail\"])\n else:\n pass # the patch exists, move on\n\n # upload the job, get the response back\n r = s.post(\n urlparse.urljoin(domain, \"\/a\/patches\/{0}\/{1}\/jobs\/\".format(owner, name)),\n data=post_data,\n headers={\n \"Accept\": \"application\/json\",\n \"Authorization\": \"Bearer \" + token,\n \"X-Heavy-Service-Token\": service_token\n },\n timeout=None, # some builds can take a very long time\n files={\"file\": (os.path.basename(zip_path), open(zip_path, \"rb\"), \"application\/zip\")})\n r.raise_for_status()\n\n # decode the JSON API response (See below for an example response)\n reply_json = r.json()\n if verbose:\n print json.dumps(reply_json, sort_keys=True, indent=2, separators=(\",\", \": \"))\n\n # print any warnings\n for i,x in enumerate(reply_json.get(\"warnings\",[])):\n print \"{3}) {0}Warning:{1} {2}\".format(\n Colours.yellow, Colours.end, x[\"detail\"], i+1)\n\n # check for errors\n if len(reply_json.get(\"errors\",[])) > 0:\n for i,x in enumerate(reply_json[\"errors\"]):\n print \"{3}) {0}Error:{1} {2}\".format(\n Colours.red, Colours.end, x[\"detail\"], i+1)\n raise UploaderException(ErrorCodes.CODE_HEAVY_COMPILE_ERRORS)\n\n print \"Job URL:\", urlparse.urljoin(domain, reply_json[\"data\"][\"links\"][\"html\"])\n print \"Heavy release:\", reply_json[\"data\"][\"attributes\"][\"release\"]\n\n if len(generators) > 0:\n print \"Downloaded files placed in:\"\n\n # retrieve all requested files\n for i,g in enumerate(generators):\n file_url = urlparse.urljoin(\n domain,\n \"\/\".join([\n reply_json[\"data\"][\"links\"][\"html\"],\n g.replace(\"-\", \"\/\"),\n \"archive.zip\"\n ])\n )\n if file_url and (len(output_dirs) > i or b):\n r = s.get(\n file_url,\n headers={\n \"Authorization\": \"Bearer \" + token,\n \"X-Heavy-Service-Token\": service_token\n },\n timeout=None # some builds can take a very long time\n )\n r.raise_for_status()\n\n # prepare the target directory for the files\n if b:\n target_dir = os.path.join(os.path.abspath(os.path.expanduser(output_dirs[0])), g)\n else:\n target_dir = os.path.abspath(os.path.expanduser(output_dirs[i]))\n if not os.path.exists(target_dir):\n os.makedirs(target_dir) # ensure that the output directory exists\n\n # write the reply to file to the final destination if archive_only, else temporary\n c_zip_path = os.path.join(target_dir if archive_only else temp_dir, \"archive.{0}.zip\".format(g))\n with open(c_zip_path, \"wb\") as f:\n f.write(r.content)\n\n if not archive_only:\n # unzip the files to where they belong\n __unzip(c_zip_path, target_dir)\n\n if g == \"c-src\" and y:\n keep_files = (\"_{0}.h\".format(name), \"_{0}.hpp\".format(name), \"_{0}.cpp\".format(name))\n for f in os.listdir(target_dir):\n if not f.endswith(keep_files):\n os.remove(os.path.join(target_dir, f))\n\n print \" * {0}: {1}\".format(g, target_dir)\n else:\n print \" * {0}Warning:{1} {2} files could not be retrieved.\".format(\n Colours.yellow, Colours.end,\n g)\n\n print \"Total request time: {0}ms\".format(int(1000.0*(time.time()-tick)))\n\n except UploaderException as e:\n exit_code = e.code\n if e.message:\n print \"{0}Error:{1} {2}\".format(Colours.red, Colours.end, e.message)\n except requests.ConnectionError as e:\n print \"{0}Error:{1} Could not connect to server. Is the server down? Is the internet down?\\n{2}\".format(Colours.red, Colours.end, e)\n exit_code = ErrorCodes.CODE_CONNECTION_ERROR\n except requests.Timeout as e:\n print \"{0}Error:{1} Connection to server timed out. The server might be overloaded. Try again later?\\n{2}\".format(Colours.red, Colours.end, e)\n exit_code = ErrorCodes.CODE_CONNECTION_TIMEOUT\n except requests.HTTPError as e:\n if e.response.status_code == requests.status_codes.codes.unauthorized:\n print \"{0}Error:{1} Unknown username or password.\".format(Colours.red, Colours.end)\n else:\n print \"{0}Error:{1} An HTTP error has occurred with URL {2}\\n{3}\".format(Colours.red, Colours.end, e.request.path_url, e)\n exit_code = ErrorCodes.CODE_CONNECTION_400_500\n except Exception as e:\n # a generic catch for any other exception\n exit_code = exit_code if exit_code != ErrorCodes.CODE_OK else ErrorCodes.CODE_EXCEPTION\n print \"{0}Error:{1} ({2}) {3}\".format(Colours.red, Colours.end, e.__class__, e)\n print \"Getting a weird error? Get the latest version with 'pip install hv-uploader -U', or check for issues at https:\/\/github.com\/enzienaudio\/heavy\/issues.\"\n finally:\n if temp_dir:\n shutil.rmtree(temp_dir) # delete the temporary directory no matter what\n\n return exit_code, reply_json","function_tokens":["def","upload","(","input_dir",",","output_dirs","=","None",",","name","=","None",",","owner","=","None",",","generators","=","None",",","b","=","False",",","y","=","False",",","release","=","None",",","release_override","=","False",",","domain","=","None",",","verbose","=","False",",","token","=","None",",","clear_token","=","False",",","service_token","=","None",",","force_new_patch","=","False",",","archive_only","=","False",")",":","# https:\/\/github.com\/numpy\/numpy\/blob\/master\/doc\/HOWTO_DOCUMENT.rst.txt","try",":","# set default values","name","=","name","or","\"heavy\"","domain","=","domain","or","\"https:\/\/enzienaudio.com\"","exit_code","=","ErrorCodes",".","CODE_OK","reply_json","=","{","}","temp_dir","=","None","post_data","=","{","}","# token should be stored in ~\/.heavy\/token","token_path","=","os",".","path",".","expanduser","(","os",".","path",".","join","(","\"~\/\"",",","\".heavy\"",",","\"token\"",")",")","if","token","is","None",":","if","os",".","path",".","exists","(","token_path",")",":","if","clear_token",":","os",".","remove","(","token_path",")","else",":","with","open","(","token_path",",","\"r\"",")","as","f",":","token","=","f",".","read","(",")",".","strip","(",")","if","token","is","None",":","print","\"Please provide a user token from enzienaudio.com. \"","\"Create or copy one from https:\/\/enzienaudio.com\/getmytokens\/.\"","token","=","getpass",".","getpass","(","\"Enter user token: \"",")","token","=","token",".","strip","(",")","# write token to file","if","not","os",".","path",".","exists","(","os",".","path",".","dirname","(","token_path",")",")",":","# ensure that the .heavy directory exists","os",".","makedirs","(","os",".","path",".","dirname","(","token_path",")",")","with","open","(","token_path",",","\"w\"",")","as","f",":","f",".","write","(","token",")","os",".","chmod","(","token_path",",","stat",".","S_IRUSR","|","stat",".","S_IWUSR",")","# force rw------- permissions on the file","tick","=","time",".","time","(",")","# check the validity of the token","try",":","# check the valifity of the token","payload","=","json",".","loads","(","base64",".","urlsafe_b64decode","(","token",".","split","(","\".\"",")","[","1","]",")",")","payload","[","\"startDate\"","]","=","datetime",".","datetime",".","strptime","(","payload","[","\"startDate\"","]",",","\"%Y-%m-%dT%H:%M:%S.%f\"",")","# ensure that the token is valid","now","=","datetime",".","datetime",".","utcnow","(",")","assert","payload","[","\"startDate\"","]","<=","now","if","owner","is","None",":","# if an owner is not supplied, default to the user name in the token","owner","=","payload","[","\"name\"","]","except","Exception","as","e",":","print","\"The user token is invalid. Generate a new one at https:\/\/enzienaudio.com\/getmytokens\/.\"","os",".","remove","(","token_path",")","exit_code","=","ErrorCodes",".","CODE_INVALID_TOKEN","raise","e","# if there is a user-supplied service token, do a basic validity check","if","service_token",":","try",":","# check the valifity of the token","payload","=","json",".","loads","(","base64",".","urlsafe_b64decode","(","token",".","split","(","\".\"",")","[","1","]",")",")","payload","[","\"startDate\"","]","=","datetime",".","datetime",".","strptime","(","payload","[","\"startDate\"","]",",","\"%Y-%m-%dT%H:%M:%S.%f\"",")","# ensure that the token is valid","now","=","datetime",".","datetime",".","utcnow","(",")","assert","payload","[","\"startDate\"","]","<=","now","assert","\"service\"","in","payload",",","\"'service' field required in service token payload.\"","except","Exception","as","e",":","print","\"The supplied service token is invalid. A default token will be used.\"","service_token","=","__HV_UPLOADER_SERVICE_TOKEN","else",":","service_token","=","__HV_UPLOADER_SERVICE_TOKEN","# create the session to pool all requests","s","=","requests",".","Session","(",")","# parse the optional release argument","if","release",":","if","not","release_override",":","# check the validity of the current release","releases_json","=","s",".","get","(","urlparse",".","urljoin","(","domain",",","\"\/a\/releases\/\"",")",")",".","json","(",")","if","release","in","releases_json",":","today","=","datetime",".","datetime",".","now","(",")","valid_until","=","datetime",".","datetime",".","strptime","(","releases_json","[","release","]","[","\"validUntil\"","]",",","\"%Y-%m-%d\"",")","if","today",">","valid_until",":","print","\"{0}Warning:{1} The release \\\"{2}\\\" expired on {3}. It may be removed at any time!\"",".","format","(","Colours",".","yellow",",","Colours",".","end",",","release",",","releases_json","[","release","]","[","\"validUntil\"","]",")","elif","(","valid_until","-","today",")","<=","datetime",".","timedelta","(","weeks","=","4",")",":","print","\"{0}Warning:{1} The release \\\"{2}\\\" will expire soon on {3}.\"",".","format","(","Colours",".","yellow",",","Colours",".","end",",","release",",","releases_json","[","release","]","[","\"validUntil\"","]",")","else",":","print","\"{0}Error:{1} The release \\\"{2}\\\" is not available. Available releases are:\"",".","format","(","Colours",".","red",",","Colours",".","end",",","release",")","for","k",",","v","in","releases_json",".","items","(",")",":","print","\"* {0} ({1})\"",".","format","(","k",",","v","[","\"releaseDate\"","]",")","raise","UploaderException","(","ErrorCodes",".","CODE_RELEASE_NOT_AVAILABLE",")","post_data","[","\"release\"","]","=","release","# make a temporary directory","temp_dir","=","tempfile",".","mkdtemp","(","prefix","=","\"lroyal-\"",")","# zip up the pd directory into the temporary directory","if","not","os",".","path",".","exists","(","os",".","path",".","join","(","input_dir",",","\"_main.pd\"",")",")",":","raise","UploaderException","(","ErrorCodes",".","CODE_MAIN_NOT_FOUND",",","\"Root Pd directory does not contain a file named _main.pd.\"",")","zip_path","=","__zip_dir","(","input_dir",",","os",".","path",".","join","(","temp_dir",",","\"archive.zip\"",")",",","file_filter","=","{","\"pd\"","}",")","if","os",".","stat","(","zip_path",")",".","st_size",">","__HV_MAX_UPLOAD_SIZE",":","raise","UploaderException","(","ErrorCodes",".","CODE_UPLOAD_ASSET_TOO_LARGE",",","\"The target directory, zipped, is {0} bytes. The maximum upload size of 1MB.\"",".","format","(","os",".","stat","(","zip_path",")",".","st_size",")",")","# the outputs to generate","generators","=","list","(","{","s",".","lower","(",")","for","s","in","set","(","generators","or","[","]",")","}","&","__SUPPORTED_GENERATOR_SET",")","# check if the patch exists already. Ask to create it if it doesn't exist","r","=","s",".","get","(","urlparse",".","urljoin","(","domain",",","\"\/a\/patches\/{0}\/{1}\/\"",".","format","(","owner",",","name",")",")",",","headers","=","{","\"Accept\"",":","\"application\/json\"",",","\"Authorization\"",":","\"Bearer \"","+","token",",","\"X-Heavy-Service-Token\"",":","service_token","}",")","r",".","raise_for_status","(",")","reply_json","=","r",".","json","(",")","if","\"errors\"","in","reply_json",":","if","reply_json","[","\"errors\"","]","[","0","]","[","\"status\"","]","==","\"404\"",":","# the patch does not exist","if","force_new_patch",":","create_new_patch","=","True","else",":","create_new_patch","=","raw_input","(","\"A patch called \\\"{0}\\\" does not exist for owner \\\"{1}\\\". Create it? (y\/n):\"",".","format","(","name",",","owner",")",")","create_new_patch","=","(","create_new_patch","==","\"y\"",")","if","create_new_patch",":","r","=","s",".","post","(","urlparse",".","urljoin","(","domain",",","\"\/a\/patches\/\"",")",",","data","=","{","\"owner_name\"",":","owner",",","\"name\"",":","name",",","\"public\"",":","\"true\"","}",",","headers","=","{","\"Accept\"",":","\"application\/json\"",",","\"Authorization\"",":","\"Bearer \"","+","token",",","\"X-Heavy-Service-Token\"",":","service_token","}",")","r",".","raise_for_status","(",")","reply_json","=","r",".","json","(",")","if","\"errors\"","in","reply_json",":","raise","UploaderException","(","ErrorCodes",".","CODE_NEW_PATCH_FAIL",",","reply_json","[","\"errors\"","]","[","0","]","[","\"detail\"","]",")","else",":","pass","# no errors? everything is cool! Proceed.","else",":","UploaderException","(","ErrorCodes",".","CODE_NEW_PATCH_FAIL",",","\"A patch called \\\"{0}\\\" does not exist for owner \\\"{1}\\\"\"",".","format","(","owner",",","name",")",")","else",":","raise","UploaderException","(","ErrorCodes",".","CODE_NEW_PATCH_FAIL",",","reply_json","[","\"errors\"","]","[","0","]","[","\"detail\"","]",")","else",":","pass","# the patch exists, move on","# upload the job, get the response back","r","=","s",".","post","(","urlparse",".","urljoin","(","domain",",","\"\/a\/patches\/{0}\/{1}\/jobs\/\"",".","format","(","owner",",","name",")",")",",","data","=","post_data",",","headers","=","{","\"Accept\"",":","\"application\/json\"",",","\"Authorization\"",":","\"Bearer \"","+","token",",","\"X-Heavy-Service-Token\"",":","service_token","}",",","timeout","=","None",",","# some builds can take a very long time","files","=","{","\"file\"",":","(","os",".","path",".","basename","(","zip_path",")",",","open","(","zip_path",",","\"rb\"",")",",","\"application\/zip\"",")","}",")","r",".","raise_for_status","(",")","# decode the JSON API response (See below for an example response)","reply_json","=","r",".","json","(",")","if","verbose",":","print","json",".","dumps","(","reply_json",",","sort_keys","=","True",",","indent","=","2",",","separators","=","(","\",\"",",","\": \"",")",")","# print any warnings","for","i",",","x","in","enumerate","(","reply_json",".","get","(","\"warnings\"",",","[","]",")",")",":","print","\"{3}) {0}Warning:{1} {2}\"",".","format","(","Colours",".","yellow",",","Colours",".","end",",","x","[","\"detail\"","]",",","i","+","1",")","# check for errors","if","len","(","reply_json",".","get","(","\"errors\"",",","[","]",")",")",">","0",":","for","i",",","x","in","enumerate","(","reply_json","[","\"errors\"","]",")",":","print","\"{3}) {0}Error:{1} {2}\"",".","format","(","Colours",".","red",",","Colours",".","end",",","x","[","\"detail\"","]",",","i","+","1",")","raise","UploaderException","(","ErrorCodes",".","CODE_HEAVY_COMPILE_ERRORS",")","print","\"Job URL:\"",",","urlparse",".","urljoin","(","domain",",","reply_json","[","\"data\"","]","[","\"links\"","]","[","\"html\"","]",")","print","\"Heavy release:\"",",","reply_json","[","\"data\"","]","[","\"attributes\"","]","[","\"release\"","]","if","len","(","generators",")",">","0",":","print","\"Downloaded files placed in:\"","# retrieve all requested files","for","i",",","g","in","enumerate","(","generators",")",":","file_url","=","urlparse",".","urljoin","(","domain",",","\"\/\"",".","join","(","[","reply_json","[","\"data\"","]","[","\"links\"","]","[","\"html\"","]",",","g",".","replace","(","\"-\"",",","\"\/\"",")",",","\"archive.zip\"","]",")",")","if","file_url","and","(","len","(","output_dirs",")",">","i","or","b",")",":","r","=","s",".","get","(","file_url",",","headers","=","{","\"Authorization\"",":","\"Bearer \"","+","token",",","\"X-Heavy-Service-Token\"",":","service_token","}",",","timeout","=","None","# some builds can take a very long time",")","r",".","raise_for_status","(",")","# prepare the target directory for the files","if","b",":","target_dir","=","os",".","path",".","join","(","os",".","path",".","abspath","(","os",".","path",".","expanduser","(","output_dirs","[","0","]",")",")",",","g",")","else",":","target_dir","=","os",".","path",".","abspath","(","os",".","path",".","expanduser","(","output_dirs","[","i","]",")",")","if","not","os",".","path",".","exists","(","target_dir",")",":","os",".","makedirs","(","target_dir",")","# ensure that the output directory exists","# write the reply to file to the final destination if archive_only, else temporary","c_zip_path","=","os",".","path",".","join","(","target_dir","if","archive_only","else","temp_dir",",","\"archive.{0}.zip\"",".","format","(","g",")",")","with","open","(","c_zip_path",",","\"wb\"",")","as","f",":","f",".","write","(","r",".","content",")","if","not","archive_only",":","# unzip the files to where they belong","__unzip","(","c_zip_path",",","target_dir",")","if","g","==","\"c-src\"","and","y",":","keep_files","=","(","\"_{0}.h\"",".","format","(","name",")",",","\"_{0}.hpp\"",".","format","(","name",")",",","\"_{0}.cpp\"",".","format","(","name",")",")","for","f","in","os",".","listdir","(","target_dir",")",":","if","not","f",".","endswith","(","keep_files",")",":","os",".","remove","(","os",".","path",".","join","(","target_dir",",","f",")",")","print","\" * {0}: {1}\"",".","format","(","g",",","target_dir",")","else",":","print","\" * {0}Warning:{1} {2} files could not be retrieved.\"",".","format","(","Colours",".","yellow",",","Colours",".","end",",","g",")","print","\"Total request time: {0}ms\"",".","format","(","int","(","1000.0","*","(","time",".","time","(",")","-","tick",")",")",")","except","UploaderException","as","e",":","exit_code","=","e",".","code","if","e",".","message",":","print","\"{0}Error:{1} {2}\"",".","format","(","Colours",".","red",",","Colours",".","end",",","e",".","message",")","except","requests",".","ConnectionError","as","e",":","print","\"{0}Error:{1} Could not connect to server. Is the server down? Is the internet down?\\n{2}\"",".","format","(","Colours",".","red",",","Colours",".","end",",","e",")","exit_code","=","ErrorCodes",".","CODE_CONNECTION_ERROR","except","requests",".","Timeout","as","e",":","print","\"{0}Error:{1} Connection to server timed out. The server might be overloaded. Try again later?\\n{2}\"",".","format","(","Colours",".","red",",","Colours",".","end",",","e",")","exit_code","=","ErrorCodes",".","CODE_CONNECTION_TIMEOUT","except","requests",".","HTTPError","as","e",":","if","e",".","response",".","status_code","==","requests",".","status_codes",".","codes",".","unauthorized",":","print","\"{0}Error:{1} Unknown username or password.\"",".","format","(","Colours",".","red",",","Colours",".","end",")","else",":","print","\"{0}Error:{1} An HTTP error has occurred with URL {2}\\n{3}\"",".","format","(","Colours",".","red",",","Colours",".","end",",","e",".","request",".","path_url",",","e",")","exit_code","=","ErrorCodes",".","CODE_CONNECTION_400_500","except","Exception","as","e",":","# a generic catch for any other exception","exit_code","=","exit_code","if","exit_code","!=","ErrorCodes",".","CODE_OK","else","ErrorCodes",".","CODE_EXCEPTION","print","\"{0}Error:{1} ({2}) {3}\"",".","format","(","Colours",".","red",",","Colours",".","end",",","e",".","__class__",",","e",")","print","\"Getting a weird error? Get the latest version with 'pip install hv-uploader -U', or check for issues at https:\/\/github.com\/enzienaudio\/heavy\/issues.\"","finally",":","if","temp_dir",":","shutil",".","rmtree","(","temp_dir",")","# delete the temporary directory no matter what","return","exit_code",",","reply_json"],"url":"https:\/\/github.com\/BelaPlatform\/Bela\/blob\/b6491178b859eb8977f494eb9ba4806920eea4b0\/scripts\/hvresources\/uploader.py#L120-L464"}