{"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"setup.py","language":"python","identifier":"unittest_suite","parameters":"()","argument_list":"","return_statement":"return test_suite","docstring":"Get test suite (Python unit tests only).","docstring_summary":"Get test suite (Python unit tests only).","docstring_tokens":["Get","test","suite","(","Python","unit","tests","only",")","."],"function":"def unittest_suite():\n \"\"\"Get test suite (Python unit tests only).\"\"\"\n test_loader = unittest.TestLoader()\n test_suite = test_loader.discover('test\/unit', pattern='test_*.py')\n return test_suite","function_tokens":["def","unittest_suite","(",")",":","test_loader","=","unittest",".","TestLoader","(",")","test_suite","=","test_loader",".","discover","(","'test\/unit'",",","pattern","=","'test_*.py'",")","return","test_suite"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/setup.py#L44-L48"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"setup.py","language":"python","identifier":"get_dsub_version","parameters":"()","argument_list":"","return_statement":"","docstring":"Get the dsub version out of the _dsub_version.py source file.\n\n Setup.py should not import dsub version from dsub directly since ambiguity in\n import order could lead to an old version of dsub setting the version number.\n Parsing the file directly is simpler than using import tools (whose interface\n varies between python 2.7, 3.4, and 3.5).\n\n Returns:\n string of dsub version.\n\n Raises:\n ValueError: if the version is not found.","docstring_summary":"Get the dsub version out of the _dsub_version.py source file.","docstring_tokens":["Get","the","dsub","version","out","of","the","_dsub_version",".","py","source","file","."],"function":"def get_dsub_version():\n \"\"\"Get the dsub version out of the _dsub_version.py source file.\n\n Setup.py should not import dsub version from dsub directly since ambiguity in\n import order could lead to an old version of dsub setting the version number.\n Parsing the file directly is simpler than using import tools (whose interface\n varies between python 2.7, 3.4, and 3.5).\n\n Returns:\n string of dsub version.\n\n Raises:\n ValueError: if the version is not found.\n \"\"\"\n filename = os.path.join(os.path.dirname(__file__), 'dsub\/_dsub_version.py')\n with open(filename, 'r') as versionfile:\n for line in versionfile:\n if line.startswith('DSUB_VERSION ='):\n # Get the version then strip whitespace and quote characters.\n version = line.partition('=')[2]\n return version.strip().strip('\\'\"')\n raise ValueError('Could not find version.')","function_tokens":["def","get_dsub_version","(",")",":","filename","=","os",".","path",".","join","(","os",".","path",".","dirname","(","__file__",")",",","'dsub\/_dsub_version.py'",")","with","open","(","filename",",","'r'",")","as","versionfile",":","for","line","in","versionfile",":","if","line",".","startswith","(","'DSUB_VERSION ='",")",":","# Get the version then strip whitespace and quote characters.","version","=","line",".","partition","(","'='",")","[","2","]","return","version",".","strip","(",")",".","strip","(","'\\'\"'",")","raise","ValueError","(","'Could not find version.'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/setup.py#L51-L72"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"setup.py","language":"python","identifier":"get_readme_contents","parameters":"()","argument_list":"","return_statement":"","docstring":"Get the README.md contents.","docstring_summary":"Get the README.md contents.","docstring_tokens":["Get","the","README",".","md","contents","."],"function":"def get_readme_contents():\n \"\"\"Get the README.md contents.\"\"\"\n with open('README.md', 'r') as f:\n return f.read()","function_tokens":["def","get_readme_contents","(",")",":","with","open","(","'README.md'",",","'r'",")","as","f",":","return","f",".","read","(",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/setup.py#L75-L78"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/dsub_util.py","language":"python","identifier":"replace_print","parameters":"(fileobj=sys.stderr)","argument_list":"","return_statement":"","docstring":"Sys.out replacer, by default with stderr.\n\n Use it like this:\n with replace_print_with(fileobj):\n print \"hello\" # writes to the file\n print \"done\" # prints to stdout\n\n Args:\n fileobj: a file object to replace stdout.\n\n Yields:\n The printer.","docstring_summary":"Sys.out replacer, by default with stderr.","docstring_tokens":["Sys",".","out","replacer","by","default","with","stderr","."],"function":"def replace_print(fileobj=sys.stderr):\n \"\"\"Sys.out replacer, by default with stderr.\n\n Use it like this:\n with replace_print_with(fileobj):\n print \"hello\" # writes to the file\n print \"done\" # prints to stdout\n\n Args:\n fileobj: a file object to replace stdout.\n\n Yields:\n The printer.\n \"\"\"\n printer = _Printer(fileobj)\n\n previous_stdout = sys.stdout\n sys.stdout = printer\n try:\n yield printer\n finally:\n sys.stdout = previous_stdout","function_tokens":["def","replace_print","(","fileobj","=","sys",".","stderr",")",":","printer","=","_Printer","(","fileobj",")","previous_stdout","=","sys",".","stdout","sys",".","stdout","=","printer","try",":","yield","printer","finally",":","sys",".","stdout","=","previous_stdout"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/dsub_util.py#L63-L84"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/dsub_util.py","language":"python","identifier":"print_error","parameters":"(msg)","argument_list":"","return_statement":"","docstring":"Utility routine to emit messages to stderr.","docstring_summary":"Utility routine to emit messages to stderr.","docstring_tokens":["Utility","routine","to","emit","messages","to","stderr","."],"function":"def print_error(msg):\n \"\"\"Utility routine to emit messages to stderr.\"\"\"\n print(msg, file=sys.stderr)","function_tokens":["def","print_error","(","msg",")",":","print","(","msg",",","file","=","sys",".","stderr",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/dsub_util.py#L87-L89"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/dsub_util.py","language":"python","identifier":"get_os_user","parameters":"()","argument_list":"","return_statement":"return pwd.getpwuid(os.getuid())[0]","docstring":"Returns the current OS user, this may be different from the dsub user.","docstring_summary":"Returns the current OS user, this may be different from the dsub user.","docstring_tokens":["Returns","the","current","OS","user","this","may","be","different","from","the","dsub","user","."],"function":"def get_os_user():\n \"\"\"Returns the current OS user, this may be different from the dsub user.\"\"\"\n return pwd.getpwuid(os.getuid())[0]","function_tokens":["def","get_os_user","(",")",":","return","pwd",".","getpwuid","(","os",".","getuid","(",")",")","[","0","]"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/dsub_util.py#L92-L94"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/dsub_util.py","language":"python","identifier":"tasks_to_job_ids","parameters":"(task_list)","argument_list":"","return_statement":"return set([t.get_field('job-id') for t in task_list])","docstring":"Returns the set of job IDs for the given tasks.","docstring_summary":"Returns the set of job IDs for the given tasks.","docstring_tokens":["Returns","the","set","of","job","IDs","for","the","given","tasks","."],"function":"def tasks_to_job_ids(task_list):\n \"\"\"Returns the set of job IDs for the given tasks.\"\"\"\n return set([t.get_field('job-id') for t in task_list])","function_tokens":["def","tasks_to_job_ids","(","task_list",")",":","return","set","(","[","t",".","get_field","(","'job-id'",")","for","t","in","task_list","]",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/dsub_util.py#L97-L99"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/dsub_util.py","language":"python","identifier":"compact_interval_string","parameters":"(value_list)","argument_list":"","return_statement":"return ','.join([\n '{}-{}'.format(pair[0], pair[1]) if pair[0] != pair[1] else str(pair[0])\n for pair in interval_list\n ])","docstring":"Compact a list of integers into a comma-separated string of intervals.\n\n Args:\n value_list: A list of sortable integers such as a list of numbers\n\n Returns:\n A compact string representation, such as \"1-5,8,12-15\"","docstring_summary":"Compact a list of integers into a comma-separated string of intervals.","docstring_tokens":["Compact","a","list","of","integers","into","a","comma","-","separated","string","of","intervals","."],"function":"def compact_interval_string(value_list):\n \"\"\"Compact a list of integers into a comma-separated string of intervals.\n\n Args:\n value_list: A list of sortable integers such as a list of numbers\n\n Returns:\n A compact string representation, such as \"1-5,8,12-15\"\n \"\"\"\n\n if not value_list:\n return ''\n\n value_list.sort()\n\n # Start by simply building up a list of separate contiguous intervals\n interval_list = []\n curr = []\n for val in value_list:\n if curr and (val > curr[-1] + 1):\n interval_list.append((curr[0], curr[-1]))\n curr = [val]\n else:\n curr.append(val)\n\n if curr:\n interval_list.append((curr[0], curr[-1]))\n\n # For each interval collapse it down to \"first, last\" or just \"first\" if\n # if first == last.\n return ','.join([\n '{}-{}'.format(pair[0], pair[1]) if pair[0] != pair[1] else str(pair[0])\n for pair in interval_list\n ])","function_tokens":["def","compact_interval_string","(","value_list",")",":","if","not","value_list",":","return","''","value_list",".","sort","(",")","# Start by simply building up a list of separate contiguous intervals","interval_list","=","[","]","curr","=","[","]","for","val","in","value_list",":","if","curr","and","(","val",">","curr","[","-","1","]","+","1",")",":","interval_list",".","append","(","(","curr","[","0","]",",","curr","[","-","1","]",")",")","curr","=","[","val","]","else",":","curr",".","append","(","val",")","if","curr",":","interval_list",".","append","(","(","curr","[","0","]",",","curr","[","-","1","]",")",")","# For each interval collapse it down to \"first, last\" or just \"first\" if","# if first == last.","return","','",".","join","(","[","'{}-{}'",".","format","(","pair","[","0","]",",","pair","[","1","]",")","if","pair","[","0","]","!=","pair","[","1","]","else","str","(","pair","[","0","]",")","for","pair","in","interval_list","]",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/dsub_util.py#L102-L135"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/dsub_util.py","language":"python","identifier":"get_storage_service","parameters":"(credentials)","argument_list":"","return_statement":"return googleapiclient.discovery.build(\n 'storage', 'v1', credentials=credentials, cache_discovery=False)","docstring":"Get a storage client using the provided credentials or defaults.","docstring_summary":"Get a storage client using the provided credentials or defaults.","docstring_tokens":["Get","a","storage","client","using","the","provided","credentials","or","defaults","."],"function":"def get_storage_service(credentials):\n \"\"\"Get a storage client using the provided credentials or defaults.\"\"\"\n # dsub is not a server application, so it is ok to filter this warning.\n warnings.filterwarnings(\n 'ignore', 'Your application has authenticated using end user credentials')\n if credentials is None:\n credentials, _ = google.auth.default()\n # Set cache_discovery to False because we use google-auth\n # See https:\/\/github.com\/googleapis\/google-api-python-client\/issues\/299\n return googleapiclient.discovery.build(\n 'storage', 'v1', credentials=credentials, cache_discovery=False)","function_tokens":["def","get_storage_service","(","credentials",")",":","# dsub is not a server application, so it is ok to filter this warning.","warnings",".","filterwarnings","(","'ignore'",",","'Your application has authenticated using end user credentials'",")","if","credentials","is","None",":","credentials",",","_","=","google",".","auth",".","default","(",")","# Set cache_discovery to False because we use google-auth","# See https:\/\/github.com\/googleapis\/google-api-python-client\/issues\/299","return","googleapiclient",".","discovery",".","build","(","'storage'",",","'v1'",",","credentials","=","credentials",",","cache_discovery","=","False",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/dsub_util.py#L138-L148"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/dsub_util.py","language":"python","identifier":"_downloader_next_chunk","parameters":"(downloader)","argument_list":"","return_statement":"return downloader.next_chunk()","docstring":"Downloads the next chunk.","docstring_summary":"Downloads the next chunk.","docstring_tokens":["Downloads","the","next","chunk","."],"function":"def _downloader_next_chunk(downloader):\n \"\"\"Downloads the next chunk.\"\"\"\n return downloader.next_chunk()","function_tokens":["def","_downloader_next_chunk","(","downloader",")",":","return","downloader",".","next_chunk","(",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/dsub_util.py#L165-L167"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/dsub_util.py","language":"python","identifier":"_load_file_from_gcs","parameters":"(gcs_file_path, credentials=None)","argument_list":"","return_statement":"return filevalue","docstring":"Load context from a text file in gcs.\n\n Args:\n gcs_file_path: The target file path; should have the 'gs:\/\/' prefix.\n credentials: Optional credential to be used to load the file from gcs.\n\n Returns:\n The content of the text file as a string.","docstring_summary":"Load context from a text file in gcs.","docstring_tokens":["Load","context","from","a","text","file","in","gcs","."],"function":"def _load_file_from_gcs(gcs_file_path, credentials=None):\n \"\"\"Load context from a text file in gcs.\n\n Args:\n gcs_file_path: The target file path; should have the 'gs:\/\/' prefix.\n credentials: Optional credential to be used to load the file from gcs.\n\n Returns:\n The content of the text file as a string.\n \"\"\"\n gcs_service = get_storage_service(credentials)\n\n bucket_name, object_name = gcs_file_path[len('gs:\/\/'):].split('\/', 1)\n request = gcs_service.objects().get_media(\n bucket=bucket_name, object=object_name)\n\n file_handle = io.BytesIO()\n downloader = googleapiclient.http.MediaIoBaseDownload(\n file_handle, request, chunksize=1024 * 1024)\n done = False\n while not done:\n _, done = _downloader_next_chunk(downloader)\n filevalue = file_handle.getvalue()\n if not isinstance(filevalue, str):\n filevalue = filevalue.decode()\n return filevalue","function_tokens":["def","_load_file_from_gcs","(","gcs_file_path",",","credentials","=","None",")",":","gcs_service","=","get_storage_service","(","credentials",")","bucket_name",",","object_name","=","gcs_file_path","[","len","(","'gs:\/\/'",")",":","]",".","split","(","'\/'",",","1",")","request","=","gcs_service",".","objects","(",")",".","get_media","(","bucket","=","bucket_name",",","object","=","object_name",")","file_handle","=","io",".","BytesIO","(",")","downloader","=","googleapiclient",".","http",".","MediaIoBaseDownload","(","file_handle",",","request",",","chunksize","=","1024","*","1024",")","done","=","False","while","not","done",":","_",",","done","=","_downloader_next_chunk","(","downloader",")","filevalue","=","file_handle",".","getvalue","(",")","if","not","isinstance","(","filevalue",",","str",")",":","filevalue","=","filevalue",".","decode","(",")","return","filevalue"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/dsub_util.py#L170-L195"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/dsub_util.py","language":"python","identifier":"load_file","parameters":"(file_path, credentials=None)","argument_list":"","return_statement":"","docstring":"Load a file from either local or gcs.\n\n Args:\n file_path: The target file path, which should have the prefix 'gs:\/\/' if\n to be loaded from gcs.\n credentials: Optional credential to be used to load the file from gcs.\n\n Returns:\n The contents of the file as a string.","docstring_summary":"Load a file from either local or gcs.","docstring_tokens":["Load","a","file","from","either","local","or","gcs","."],"function":"def load_file(file_path, credentials=None):\n \"\"\"Load a file from either local or gcs.\n\n Args:\n file_path: The target file path, which should have the prefix 'gs:\/\/' if\n to be loaded from gcs.\n credentials: Optional credential to be used to load the file from gcs.\n\n Returns:\n The contents of the file as a string.\n \"\"\"\n if file_path.startswith('gs:\/\/'):\n return _load_file_from_gcs(file_path, credentials)\n else:\n with open(file_path, 'r') as f:\n return f.read()","function_tokens":["def","load_file","(","file_path",",","credentials","=","None",")",":","if","file_path",".","startswith","(","'gs:\/\/'",")",":","return","_load_file_from_gcs","(","file_path",",","credentials",")","else",":","with","open","(","file_path",",","'r'",")","as","f",":","return","f",".","read","(",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/dsub_util.py#L198-L213"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/dsub_util.py","language":"python","identifier":"_file_exists_in_gcs","parameters":"(gcs_file_path, credentials=None, storage_service=None)","argument_list":"","return_statement":"","docstring":"Check whether the file exists, in GCS.\n\n Args:\n gcs_file_path: The target file path; should have the 'gs:\/\/' prefix.\n credentials: Optional credential to be used to load the file from gcs.\n storage_service: GCS API service object.\n\n Returns:\n True if the file's there.","docstring_summary":"Check whether the file exists, in GCS.","docstring_tokens":["Check","whether","the","file","exists","in","GCS","."],"function":"def _file_exists_in_gcs(gcs_file_path, credentials=None, storage_service=None):\n \"\"\"Check whether the file exists, in GCS.\n\n Args:\n gcs_file_path: The target file path; should have the 'gs:\/\/' prefix.\n credentials: Optional credential to be used to load the file from gcs.\n storage_service: GCS API service object.\n\n Returns:\n True if the file's there.\n \"\"\"\n if storage_service is None:\n storage_service = get_storage_service(credentials)\n\n bucket_name, object_name = gcs_file_path[len('gs:\/\/'):].split('\/', 1)\n request = storage_service.objects().get(\n bucket=bucket_name, object=object_name, projection='noAcl')\n try:\n request.execute()\n return True\n except googleapiclient.errors.HttpError:\n return False","function_tokens":["def","_file_exists_in_gcs","(","gcs_file_path",",","credentials","=","None",",","storage_service","=","None",")",":","if","storage_service","is","None",":","storage_service","=","get_storage_service","(","credentials",")","bucket_name",",","object_name","=","gcs_file_path","[","len","(","'gs:\/\/'",")",":","]",".","split","(","'\/'",",","1",")","request","=","storage_service",".","objects","(",")",".","get","(","bucket","=","bucket_name",",","object","=","object_name",",","projection","=","'noAcl'",")","try",":","request",".","execute","(",")","return","True","except","googleapiclient",".","errors",".","HttpError",":","return","False"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/dsub_util.py#L230-L251"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/dsub_util.py","language":"python","identifier":"_prefix_exists_in_gcs","parameters":"(gcs_prefix, credentials=None, storage_service=None)","argument_list":"","return_statement":"return response.get('items', None)","docstring":"Check whether there is a GCS object whose name starts with the prefix.\n\n Since GCS doesn't actually have folders, this is how we check instead.\n\n Args:\n gcs_prefix: The path; should start with 'gs:\/\/'.\n credentials: Optional credential to be used to load the file from gcs.\n storage_service: GCS API service object.\n\n Returns:\n True if the prefix matches at least one object in GCS.\n\n Raises:\n googleapiclient.errors.HttpError: if it can't talk to the server","docstring_summary":"Check whether there is a GCS object whose name starts with the prefix.","docstring_tokens":["Check","whether","there","is","a","GCS","object","whose","name","starts","with","the","prefix","."],"function":"def _prefix_exists_in_gcs(gcs_prefix, credentials=None, storage_service=None):\n \"\"\"Check whether there is a GCS object whose name starts with the prefix.\n\n Since GCS doesn't actually have folders, this is how we check instead.\n\n Args:\n gcs_prefix: The path; should start with 'gs:\/\/'.\n credentials: Optional credential to be used to load the file from gcs.\n storage_service: GCS API service object.\n\n Returns:\n True if the prefix matches at least one object in GCS.\n\n Raises:\n googleapiclient.errors.HttpError: if it can't talk to the server\n \"\"\"\n if storage_service is None:\n storage_service = get_storage_service(credentials)\n\n bucket_name, prefix = gcs_prefix[len('gs:\/\/'):].split('\/', 1)\n # documentation in\n # https:\/\/cloud.google.com\/storage\/docs\/json_api\/v1\/objects\/list\n request = storage_service.objects().list(\n bucket=bucket_name, prefix=prefix, maxResults=1)\n response = request.execute()\n return response.get('items', None)","function_tokens":["def","_prefix_exists_in_gcs","(","gcs_prefix",",","credentials","=","None",",","storage_service","=","None",")",":","if","storage_service","is","None",":","storage_service","=","get_storage_service","(","credentials",")","bucket_name",",","prefix","=","gcs_prefix","[","len","(","'gs:\/\/'",")",":","]",".","split","(","'\/'",",","1",")","# documentation in","# https:\/\/cloud.google.com\/storage\/docs\/json_api\/v1\/objects\/list","request","=","storage_service",".","objects","(",")",".","list","(","bucket","=","bucket_name",",","prefix","=","prefix",",","maxResults","=","1",")","response","=","request",".","execute","(",")","return","response",".","get","(","'items'",",","None",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/dsub_util.py#L268-L293"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/dsub_util.py","language":"python","identifier":"simple_pattern_exists_in_gcs","parameters":"(file_pattern,\n credentials=None,\n storage_service=None)","argument_list":"","return_statement":"return any(fnmatch.fnmatch(i, prefix) for i in items_list)","docstring":"True iff an object exists matching the input GCS pattern.\n\n The GCS pattern must be a full object reference or a \"simple pattern\" that\n conforms to the dsub input and output parameter restrictions:\n\n * No support for **, ? wildcards or [] character ranges\n * Wildcards may only appear in the file name\n\n Args:\n file_pattern: eg. 'gs:\/\/foo\/ba*'\n credentials: Optional credential to be used to load the file from gcs.\n storage_service: GCS API service object.\n\n Raises:\n ValueError: if file_pattern breaks the rules.\n\n Returns:\n True iff a file exists that matches that pattern.","docstring_summary":"True iff an object exists matching the input GCS pattern.","docstring_tokens":["True","iff","an","object","exists","matching","the","input","GCS","pattern","."],"function":"def simple_pattern_exists_in_gcs(file_pattern,\n credentials=None,\n storage_service=None):\n \"\"\"True iff an object exists matching the input GCS pattern.\n\n The GCS pattern must be a full object reference or a \"simple pattern\" that\n conforms to the dsub input and output parameter restrictions:\n\n * No support for **, ? wildcards or [] character ranges\n * Wildcards may only appear in the file name\n\n Args:\n file_pattern: eg. 'gs:\/\/foo\/ba*'\n credentials: Optional credential to be used to load the file from gcs.\n storage_service: GCS API service object.\n\n Raises:\n ValueError: if file_pattern breaks the rules.\n\n Returns:\n True iff a file exists that matches that pattern.\n \"\"\"\n if '*' not in file_pattern:\n return _file_exists_in_gcs(file_pattern, credentials, storage_service)\n if not file_pattern.startswith('gs:\/\/'):\n raise ValueError('file name must start with gs:\/\/')\n\n if storage_service is None:\n storage_service = get_storage_service(credentials)\n\n bucket_name, prefix = file_pattern[len('gs:\/\/'):].split('\/', 1)\n if '*' in bucket_name:\n raise ValueError('Wildcards may not appear in the bucket name')\n # There is a '*' in prefix because we checked there's one in file_pattern\n # and there isn't one in bucket_name. Hence it must be in prefix.\n assert '*' in prefix\n prefix_no_wildcard = prefix[:prefix.index('*')]\n request = storage_service.objects().list(\n bucket=bucket_name, prefix=prefix_no_wildcard)\n response = request.execute()\n if 'items' not in response:\n return False\n items_list = [i['name'] for i in response['items']]\n return any(fnmatch.fnmatch(i, prefix) for i in items_list)","function_tokens":["def","simple_pattern_exists_in_gcs","(","file_pattern",",","credentials","=","None",",","storage_service","=","None",")",":","if","'*'","not","in","file_pattern",":","return","_file_exists_in_gcs","(","file_pattern",",","credentials",",","storage_service",")","if","not","file_pattern",".","startswith","(","'gs:\/\/'",")",":","raise","ValueError","(","'file name must start with gs:\/\/'",")","if","storage_service","is","None",":","storage_service","=","get_storage_service","(","credentials",")","bucket_name",",","prefix","=","file_pattern","[","len","(","'gs:\/\/'",")",":","]",".","split","(","'\/'",",","1",")","if","'*'","in","bucket_name",":","raise","ValueError","(","'Wildcards may not appear in the bucket name'",")","# There is a '*' in prefix because we checked there's one in file_pattern","# and there isn't one in bucket_name. Hence it must be in prefix.","assert","'*'","in","prefix","prefix_no_wildcard","=","prefix","[",":","prefix",".","index","(","'*'",")","]","request","=","storage_service",".","objects","(",")",".","list","(","bucket","=","bucket_name",",","prefix","=","prefix_no_wildcard",")","response","=","request",".","execute","(",")","if","'items'","not","in","response",":","return","False","items_list","=","[","i","[","'name'","]","for","i","in","response","[","'items'","]","]","return","any","(","fnmatch",".","fnmatch","(","i",",","prefix",")","for","i","in","items_list",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/dsub_util.py#L318-L361"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/dsub_util.py","language":"python","identifier":"outputs_are_present","parameters":"(outputs, storage_service=None)","argument_list":"","return_statement":"return True","docstring":"True if each output contains at least one file or no output specified.","docstring_summary":"True if each output contains at least one file or no output specified.","docstring_tokens":["True","if","each","output","contains","at","least","one","file","or","no","output","specified","."],"function":"def outputs_are_present(outputs, storage_service=None):\n \"\"\"True if each output contains at least one file or no output specified.\"\"\"\n # outputs are OutputFileParam (see param_util.py)\n\n # If outputs contain a pattern, then there is no way for `dsub` to verify\n # that *all* output is present. The best that `dsub` can do is to verify\n # that *some* output was created for each such parameter.\n for o in outputs:\n if not o.value:\n continue\n if o.recursive:\n if not folder_exists(o.value, storage_service=storage_service):\n return False\n else:\n if not simple_pattern_exists_in_gcs(\n o.value, storage_service=storage_service):\n return False\n return True","function_tokens":["def","outputs_are_present","(","outputs",",","storage_service","=","None",")",":","# outputs are OutputFileParam (see param_util.py)","# If outputs contain a pattern, then there is no way for `dsub` to verify","# that *all* output is present. The best that `dsub` can do is to verify","# that *some* output was created for each such parameter.","for","o","in","outputs",":","if","not","o",".","value",":","continue","if","o",".","recursive",":","if","not","folder_exists","(","o",".","value",",","storage_service","=","storage_service",")",":","return","False","else",":","if","not","simple_pattern_exists_in_gcs","(","o",".","value",",","storage_service","=","storage_service",")",":","return","False","return","True"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/dsub_util.py#L364-L381"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/dsub_errors.py","language":"python","identifier":"JobError.__init__","parameters":"(self, message, error_list, launched_job)","argument_list":"","return_statement":"","docstring":"Create a JobError to indicate something went wrong.\n\n Args:\n message: user-friendly message\n error_list: what went wrong\n launched_job: if the job is launched, but has errors in\n \"--wait\"ing on the tasks.","docstring_summary":"Create a JobError to indicate something went wrong.","docstring_tokens":["Create","a","JobError","to","indicate","something","went","wrong","."],"function":"def __init__(self, message, error_list, launched_job):\n \"\"\"Create a JobError to indicate something went wrong.\n\n Args:\n message: user-friendly message\n error_list: what went wrong\n launched_job: if the job is launched, but has errors in\n \"--wait\"ing on the tasks.\n \"\"\"\n super(JobError, self).__init__(message)\n self.message = message\n self.error_list = error_list\n self.launched_job = launched_job","function_tokens":["def","__init__","(","self",",","message",",","error_list",",","launched_job",")",":","super","(","JobError",",","self",")",".","__init__","(","message",")","self",".","message","=","message","self",".","error_list","=","error_list","self",".","launched_job","=","launched_job"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/dsub_errors.py#L21-L33"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/retry_util.py","language":"python","identifier":"_print_error","parameters":"(msg)","argument_list":"","return_statement":"","docstring":"Utility routine to emit messages to stderr.","docstring_summary":"Utility routine to emit messages to stderr.","docstring_tokens":["Utility","routine","to","emit","messages","to","stderr","."],"function":"def _print_error(msg):\n \"\"\"Utility routine to emit messages to stderr.\"\"\"\n print(msg, file=sys.stderr)","function_tokens":["def","_print_error","(","msg",")",":","print","(","msg",",","file","=","sys",".","stderr",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/retry_util.py#L53-L55"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/retry_util.py","language":"python","identifier":"_print_retry_error","parameters":"(attempt_number, max_attempts, exception)","argument_list":"","return_statement":"","docstring":"Prints an error message if appropriate.","docstring_summary":"Prints an error message if appropriate.","docstring_tokens":["Prints","an","error","message","if","appropriate","."],"function":"def _print_retry_error(attempt_number, max_attempts, exception):\n \"\"\"Prints an error message if appropriate.\"\"\"\n now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')\n try:\n status_code = exception.resp.status\n except AttributeError:\n status_code = ''\n\n if attempt_number % 5 == 0:\n _print_error('{}: Caught exception {} {}'.format(\n now, get_exception_type_string(exception), status_code))\n _print_error('{}: This request is being retried (attempt {} of {}).'.format(\n now, attempt_number, max_attempts))","function_tokens":["def","_print_retry_error","(","attempt_number",",","max_attempts",",","exception",")",":","now","=","datetime",".","datetime",".","now","(",")",".","strftime","(","'%Y-%m-%d %H:%M:%S.%f'",")","try",":","status_code","=","exception",".","resp",".","status","except","AttributeError",":","status_code","=","''","if","attempt_number","%","5","==","0",":","_print_error","(","'{}: Caught exception {} {}'",".","format","(","now",",","get_exception_type_string","(","exception",")",",","status_code",")",")","_print_error","(","'{}: This request is being retried (attempt {} of {}).'",".","format","(","now",",","attempt_number",",","max_attempts",")",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/retry_util.py#L58-L70"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/retry_util.py","language":"python","identifier":"get_exception_type_string","parameters":"(exception)","argument_list":"","return_statement":"","docstring":"Returns the full path of the exception.","docstring_summary":"Returns the full path of the exception.","docstring_tokens":["Returns","the","full","path","of","the","exception","."],"function":"def get_exception_type_string(exception):\n \"\"\"Returns the full path of the exception.\"\"\"\n exception_type_string = str(type(exception))\n\n # This is expected to look something like\n # \"\"\n if exception_type_string.startswith(\n \"\"):\n # Slice off the parts\n return exception_type_string[len(\"\")]\n else:\n # If the exception type looks different than expected,\n # just print out the whole type.\n return exception_type_string","function_tokens":["def","get_exception_type_string","(","exception",")",":","exception_type_string","=","str","(","type","(","exception",")",")","# This is expected to look something like","# \"\"","if","exception_type_string",".","startswith","(","\"\"",")",":","# Slice off the parts","return","exception_type_string","[","len","(","\"\"",")","]","else",":","# If the exception type looks different than expected,","# just print out the whole type.","return","exception_type_string"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/retry_util.py#L73-L86"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/retry_util.py","language":"python","identifier":"retry_api_check","parameters":"(retry_state: tenacity.RetryCallState)","argument_list":"","return_statement":"return False","docstring":"Return True if we should retry.\n\n False otherwise.\n\n Args:\n retry_state: A retry state including exception to test for transience.\n\n Returns:\n True if we should retry. False otherwise.","docstring_summary":"Return True if we should retry.","docstring_tokens":["Return","True","if","we","should","retry","."],"function":"def retry_api_check(retry_state: tenacity.RetryCallState) -> bool:\n \"\"\"Return True if we should retry.\n\n False otherwise.\n\n Args:\n retry_state: A retry state including exception to test for transience.\n\n Returns:\n True if we should retry. False otherwise.\n \"\"\"\n exception = retry_state.outcome.exception()\n attempt_number = retry_state.attempt_number\n now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')\n\n if isinstance(exception, googleapiclient.errors.HttpError):\n if exception.resp.status in TRANSIENT_HTTP_ERROR_CODES:\n _print_retry_error(attempt_number, MAX_API_ATTEMPTS, exception)\n return True\n\n if isinstance(exception, socket.error):\n if exception.errno in TRANSIENT_SOCKET_ERROR_CODES:\n _print_retry_error(attempt_number, MAX_API_ATTEMPTS, exception)\n return True\n\n if isinstance(exception, socket.timeout):\n _print_retry_error(attempt_number, MAX_API_ATTEMPTS, exception)\n return True\n\n if isinstance(exception, google.auth.exceptions.RefreshError):\n _print_retry_error(attempt_number, MAX_API_ATTEMPTS, exception)\n return True\n\n # For a given installation, this could be a permanent error, but has only\n # been observed as transient.\n if isinstance(exception, ssl.SSLError):\n _print_retry_error(attempt_number, MAX_API_ATTEMPTS, exception)\n return True\n\n # This has been observed as a transient error:\n # ServerNotFoundError: Unable to find the server at genomics.googleapis.com\n if isinstance(exception, httplib2.ServerNotFoundError):\n _print_retry_error(attempt_number, MAX_API_ATTEMPTS, exception)\n return True\n\n # Observed to be thrown transiently from auth libraries which use httplib2\n if isinstance(exception, http.client.ResponseNotReady):\n _print_retry_error(attempt_number, MAX_API_ATTEMPTS, exception)\n return True\n\n if not exception and attempt_number > 5:\n _print_error('{}: Retry SUCCEEDED'.format(now))\n\n return False","function_tokens":["def","retry_api_check","(","retry_state",":","tenacity",".","RetryCallState",")","->","bool",":","exception","=","retry_state",".","outcome",".","exception","(",")","attempt_number","=","retry_state",".","attempt_number","now","=","datetime",".","datetime",".","now","(",")",".","strftime","(","'%Y-%m-%d %H:%M:%S.%f'",")","if","isinstance","(","exception",",","googleapiclient",".","errors",".","HttpError",")",":","if","exception",".","resp",".","status","in","TRANSIENT_HTTP_ERROR_CODES",":","_print_retry_error","(","attempt_number",",","MAX_API_ATTEMPTS",",","exception",")","return","True","if","isinstance","(","exception",",","socket",".","error",")",":","if","exception",".","errno","in","TRANSIENT_SOCKET_ERROR_CODES",":","_print_retry_error","(","attempt_number",",","MAX_API_ATTEMPTS",",","exception",")","return","True","if","isinstance","(","exception",",","socket",".","timeout",")",":","_print_retry_error","(","attempt_number",",","MAX_API_ATTEMPTS",",","exception",")","return","True","if","isinstance","(","exception",",","google",".","auth",".","exceptions",".","RefreshError",")",":","_print_retry_error","(","attempt_number",",","MAX_API_ATTEMPTS",",","exception",")","return","True","# For a given installation, this could be a permanent error, but has only","# been observed as transient.","if","isinstance","(","exception",",","ssl",".","SSLError",")",":","_print_retry_error","(","attempt_number",",","MAX_API_ATTEMPTS",",","exception",")","return","True","# This has been observed as a transient error:","# ServerNotFoundError: Unable to find the server at genomics.googleapis.com","if","isinstance","(","exception",",","httplib2",".","ServerNotFoundError",")",":","_print_retry_error","(","attempt_number",",","MAX_API_ATTEMPTS",",","exception",")","return","True","# Observed to be thrown transiently from auth libraries which use httplib2","if","isinstance","(","exception",",","http",".","client",".","ResponseNotReady",")",":","_print_retry_error","(","attempt_number",",","MAX_API_ATTEMPTS",",","exception",")","return","True","if","not","exception","and","attempt_number",">","5",":","_print_error","(","'{}: Retry SUCCEEDED'",".","format","(","now",")",")","return","False"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/retry_util.py#L89-L142"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/retry_util.py","language":"python","identifier":"retry_auth_check","parameters":"(retry_state: tenacity.RetryCallState)","argument_list":"","return_statement":"return False","docstring":"Specific check for auth error codes.\n\n Return True if we should retry.\n\n False otherwise.\n Args:\n retry_state: A retry state including exception to test for transience.\n\n Returns:\n True if we should retry. False otherwise.","docstring_summary":"Specific check for auth error codes.","docstring_tokens":["Specific","check","for","auth","error","codes","."],"function":"def retry_auth_check(retry_state: tenacity.RetryCallState) -> bool:\n \"\"\"Specific check for auth error codes.\n\n Return True if we should retry.\n\n False otherwise.\n Args:\n retry_state: A retry state including exception to test for transience.\n\n Returns:\n True if we should retry. False otherwise.\n \"\"\"\n exception = retry_state.outcome.exception()\n attempt_number = retry_state.attempt_number\n now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')\n\n if isinstance(exception, googleapiclient.errors.HttpError):\n if exception.resp.status in HTTP_AUTH_ERROR_CODES:\n _print_retry_error(attempt_number, MAX_AUTH_ATTEMPTS, exception)\n return True\n\n if not exception and attempt_number > 4:\n _print_error('{}: Retry SUCCEEDED'.format(now))\n\n return False","function_tokens":["def","retry_auth_check","(","retry_state",":","tenacity",".","RetryCallState",")","->","bool",":","exception","=","retry_state",".","outcome",".","exception","(",")","attempt_number","=","retry_state",".","attempt_number","now","=","datetime",".","datetime",".","now","(",")",".","strftime","(","'%Y-%m-%d %H:%M:%S.%f'",")","if","isinstance","(","exception",",","googleapiclient",".","errors",".","HttpError",")",":","if","exception",".","resp",".","status","in","HTTP_AUTH_ERROR_CODES",":","_print_retry_error","(","attempt_number",",","MAX_AUTH_ATTEMPTS",",","exception",")","return","True","if","not","exception","and","attempt_number",">","4",":","_print_error","(","'{}: Retry SUCCEEDED'",".","format","(","now",")",")","return","False"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/retry_util.py#L145-L169"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/retry_util.py","language":"python","identifier":"on_give_up","parameters":"(retry_state: tenacity.RetryCallState)","argument_list":"","return_statement":"","docstring":"Called after all retries failed.\n\n Simply outputs a message and re-raises.\n\n Args:\n retry_state: info about current retry invocation.\n\n Returns:\n None.","docstring_summary":"Called after all retries failed.","docstring_tokens":["Called","after","all","retries","failed","."],"function":"def on_give_up(retry_state: tenacity.RetryCallState) -> None:\n \"\"\"Called after all retries failed.\n\n Simply outputs a message and re-raises.\n\n Args:\n retry_state: info about current retry invocation.\n\n Returns:\n None.\n \"\"\"\n exception = retry_state.outcome.exception()\n attempt_number = retry_state.attempt_number\n _print_error('Giving up after {} attempts'.format(attempt_number))\n raise exception","function_tokens":["def","on_give_up","(","retry_state",":","tenacity",".","RetryCallState",")","->","None",":","exception","=","retry_state",".","outcome",".","exception","(",")","attempt_number","=","retry_state",".","attempt_number","_print_error","(","'Giving up after {} attempts'",".","format","(","attempt_number",")",")","raise","exception"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/retry_util.py#L172-L186"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/providers_util.py","language":"python","identifier":"get_file_environment_variables","parameters":"(file_params)","argument_list":"","return_statement":"return env","docstring":"Return a dictionary of environment variables for the user container.","docstring_summary":"Return a dictionary of environment variables for the user container.","docstring_tokens":["Return","a","dictionary","of","environment","variables","for","the","user","container","."],"function":"def get_file_environment_variables(file_params):\n \"\"\"Return a dictionary of environment variables for the user container.\"\"\"\n env = {}\n for param in file_params:\n # We have no cases where the environment variable provided to user\n # scripts have a trailing slash, so be sure to always strip it.\n # The case that this is specifically handling is --input-recursive and\n # --output-recursive variables, which are directory values.\n env[param.name] = os.path.join(\n DATA_MOUNT_POINT, param.docker_path.rstrip('\/')) if param.value else ''\n return env","function_tokens":["def","get_file_environment_variables","(","file_params",")",":","env","=","{","}","for","param","in","file_params",":","# We have no cases where the environment variable provided to user","# scripts have a trailing slash, so be sure to always strip it.","# The case that this is specifically handling is --input-recursive and","# --output-recursive variables, which are directory values.","env","[","param",".","name","]","=","os",".","path",".","join","(","DATA_MOUNT_POINT",",","param",".","docker_path",".","rstrip","(","'\/'",")",")","if","param",".","value","else","''","return","env"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/providers_util.py#L62-L72"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/providers_util.py","language":"python","identifier":"build_recursive_localize_env","parameters":"(destination, inputs)","argument_list":"","return_statement":"return export_input_dirs","docstring":"Return a multi-line string with export statements for the variables.\n\n Arguments:\n destination: Folder where the data will be put.\n For example \/mnt\/data\n inputs: a list of InputFileParam\n\n Returns:\n a multi-line string with a shell script that sets environment variables\n corresponding to the inputs.","docstring_summary":"Return a multi-line string with export statements for the variables.","docstring_tokens":["Return","a","multi","-","line","string","with","export","statements","for","the","variables","."],"function":"def build_recursive_localize_env(destination, inputs):\n \"\"\"Return a multi-line string with export statements for the variables.\n\n Arguments:\n destination: Folder where the data will be put.\n For example \/mnt\/data\n inputs: a list of InputFileParam\n\n Returns:\n a multi-line string with a shell script that sets environment variables\n corresponding to the inputs.\n \"\"\"\n # pylint: disable=g-complex-comprehension\n export_input_dirs = '\\n'.join([\n 'export {0}={1}\/{2}'.format(var.name, destination.rstrip('\/'),\n var.docker_path.rstrip('\/'))\n for var in inputs\n if var.recursive and var.docker_path\n ])\n # pylint: enable=g-complex-comprehension\n return export_input_dirs","function_tokens":["def","build_recursive_localize_env","(","destination",",","inputs",")",":","# pylint: disable=g-complex-comprehension","export_input_dirs","=","'\\n'",".","join","(","[","'export {0}={1}\/{2}'",".","format","(","var",".","name",",","destination",".","rstrip","(","'\/'",")",",","var",".","docker_path",".","rstrip","(","'\/'",")",")","for","var","in","inputs","if","var",".","recursive","and","var",".","docker_path","]",")","# pylint: enable=g-complex-comprehension","return","export_input_dirs"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/providers_util.py#L75-L95"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/providers_util.py","language":"python","identifier":"build_recursive_localize_command","parameters":"(destination, inputs, file_provider)","argument_list":"","return_statement":"return copy_input_dirs","docstring":"Return a multi-line string with a shell script to copy recursively.\n\n Arguments:\n destination: Folder where to put the data.\n For example \/mnt\/data\n inputs: a list of InputFileParam\n file_provider: file provider string used to filter the output params; the\n returned command will only apply outputs whose file provider\n matches this file filter.\n\n Returns:\n a multi-line string with a shell script that copies the inputs\n recursively from GCS.","docstring_summary":"Return a multi-line string with a shell script to copy recursively.","docstring_tokens":["Return","a","multi","-","line","string","with","a","shell","script","to","copy","recursively","."],"function":"def build_recursive_localize_command(destination, inputs, file_provider):\n \"\"\"Return a multi-line string with a shell script to copy recursively.\n\n Arguments:\n destination: Folder where to put the data.\n For example \/mnt\/data\n inputs: a list of InputFileParam\n file_provider: file provider string used to filter the output params; the\n returned command will only apply outputs whose file provider\n matches this file filter.\n\n Returns:\n a multi-line string with a shell script that copies the inputs\n recursively from GCS.\n \"\"\"\n command = _LOCALIZE_COMMAND_MAP[file_provider]\n filtered_inputs = [\n var for var in inputs\n if var.recursive and var.file_provider == file_provider\n ]\n # pylint: disable=g-complex-comprehension\n copy_input_dirs = '\\n'.join([\n textwrap.dedent(\"\"\"\n mkdir -p {data_mount}\/{docker_path}\n for ((i = 0; i < 3; i++)); do\n if {command} {source_uri} {data_mount}\/{docker_path}; then\n break\n elif ((i == 2)); then\n 1>&2 echo \"Recursive localization failed.\"\n exit 1\n fi\n done\n chmod -R o+r {data_mount}\/{docker_path}\n \"\"\").format(\n command=command,\n source_uri=var.uri,\n data_mount=destination.rstrip('\/'),\n docker_path=var.docker_path) for var in filtered_inputs\n ])\n # pylint: enable=g-complex-comprehension\n return copy_input_dirs","function_tokens":["def","build_recursive_localize_command","(","destination",",","inputs",",","file_provider",")",":","command","=","_LOCALIZE_COMMAND_MAP","[","file_provider","]","filtered_inputs","=","[","var","for","var","in","inputs","if","var",".","recursive","and","var",".","file_provider","==","file_provider","]","# pylint: disable=g-complex-comprehension","copy_input_dirs","=","'\\n'",".","join","(","[","textwrap",".","dedent","(","\"\"\"\n mkdir -p {data_mount}\/{docker_path}\n for ((i = 0; i < 3; i++)); do\n if {command} {source_uri} {data_mount}\/{docker_path}; then\n break\n elif ((i == 2)); then\n 1>&2 echo \"Recursive localization failed.\"\n exit 1\n fi\n done\n chmod -R o+r {data_mount}\/{docker_path}\n \"\"\"",")",".","format","(","command","=","command",",","source_uri","=","var",".","uri",",","data_mount","=","destination",".","rstrip","(","'\/'",")",",","docker_path","=","var",".","docker_path",")","for","var","in","filtered_inputs","]",")","# pylint: enable=g-complex-comprehension","return","copy_input_dirs"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/providers_util.py#L98-L138"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/providers_util.py","language":"python","identifier":"build_recursive_gcs_delocalize_env","parameters":"(source, outputs)","argument_list":"","return_statement":"return '\\n'.join([\n 'export {0}={1}\/{2}'.format(var.name,\n source.rstrip('\/'),\n var.docker_path.rstrip('\/'))\n for var in filtered_outs\n ])","docstring":"Return a multi-line string with export statements for the variables.\n\n Arguments:\n source: Folder with the data.\n For example \/mnt\/data\n outputs: a list of OutputFileParam\n\n Returns:\n a multi-line string with a shell script that sets environment variables\n corresponding to the outputs.","docstring_summary":"Return a multi-line string with export statements for the variables.","docstring_tokens":["Return","a","multi","-","line","string","with","export","statements","for","the","variables","."],"function":"def build_recursive_gcs_delocalize_env(source, outputs):\n \"\"\"Return a multi-line string with export statements for the variables.\n\n Arguments:\n source: Folder with the data.\n For example \/mnt\/data\n outputs: a list of OutputFileParam\n\n Returns:\n a multi-line string with a shell script that sets environment variables\n corresponding to the outputs.\n \"\"\"\n filtered_outs = [\n var for var in outputs\n if var.recursive and var.file_provider == job_model.P_GCS\n ]\n # pylint: disable=g-complex-comprehension\n return '\\n'.join([\n 'export {0}={1}\/{2}'.format(var.name,\n source.rstrip('\/'),\n var.docker_path.rstrip('\/'))\n for var in filtered_outs\n ])","function_tokens":["def","build_recursive_gcs_delocalize_env","(","source",",","outputs",")",":","filtered_outs","=","[","var","for","var","in","outputs","if","var",".","recursive","and","var",".","file_provider","==","job_model",".","P_GCS","]","# pylint: disable=g-complex-comprehension","return","'\\n'",".","join","(","[","'export {0}={1}\/{2}'",".","format","(","var",".","name",",","source",".","rstrip","(","'\/'",")",",","var",".","docker_path",".","rstrip","(","'\/'",")",")","for","var","in","filtered_outs","]",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/providers_util.py#L141-L163"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/providers_util.py","language":"python","identifier":"build_recursive_delocalize_command","parameters":"(source, outputs, file_provider)","argument_list":"","return_statement":"return '\\n'.join([\n textwrap.dedent(\"\"\"\n for ((i = 0; i < 3; i++)); do\n if {command} {data_mount}\/{docker_path} {destination_uri}; then\n break\n elif ((i == 2)); then\n 1>&2 echo \"Recursive de-localization failed.\"\n exit 1\n fi\n done\n \"\"\").format(\n command=command,\n data_mount=source.rstrip('\/'),\n docker_path=var.docker_path,\n destination_uri=var.uri) for var in filtered_outputs\n ])","docstring":"Return a multi-line string with a shell script to copy recursively.\n\n Arguments:\n source: Folder with the data.\n For example \/mnt\/data\n outputs: a list of OutputFileParam.\n file_provider: file provider string used to filter the output params; the\n returned command will only apply outputs whose file provider\n matches this file filter.\n\n Returns:\n a multi-line string with a shell script that copies the inputs\n recursively to GCS.","docstring_summary":"Return a multi-line string with a shell script to copy recursively.","docstring_tokens":["Return","a","multi","-","line","string","with","a","shell","script","to","copy","recursively","."],"function":"def build_recursive_delocalize_command(source, outputs, file_provider):\n \"\"\"Return a multi-line string with a shell script to copy recursively.\n\n Arguments:\n source: Folder with the data.\n For example \/mnt\/data\n outputs: a list of OutputFileParam.\n file_provider: file provider string used to filter the output params; the\n returned command will only apply outputs whose file provider\n matches this file filter.\n\n Returns:\n a multi-line string with a shell script that copies the inputs\n recursively to GCS.\n \"\"\"\n command = _LOCALIZE_COMMAND_MAP[file_provider]\n filtered_outputs = [\n var for var in outputs\n if var.recursive and var.file_provider == file_provider\n ]\n # pylint: disable=g-complex-comprehension\n return '\\n'.join([\n textwrap.dedent(\"\"\"\n for ((i = 0; i < 3; i++)); do\n if {command} {data_mount}\/{docker_path} {destination_uri}; then\n break\n elif ((i == 2)); then\n 1>&2 echo \"Recursive de-localization failed.\"\n exit 1\n fi\n done\n \"\"\").format(\n command=command,\n data_mount=source.rstrip('\/'),\n docker_path=var.docker_path,\n destination_uri=var.uri) for var in filtered_outputs\n ])","function_tokens":["def","build_recursive_delocalize_command","(","source",",","outputs",",","file_provider",")",":","command","=","_LOCALIZE_COMMAND_MAP","[","file_provider","]","filtered_outputs","=","[","var","for","var","in","outputs","if","var",".","recursive","and","var",".","file_provider","==","file_provider","]","# pylint: disable=g-complex-comprehension","return","'\\n'",".","join","(","[","textwrap",".","dedent","(","\"\"\"\n for ((i = 0; i < 3; i++)); do\n if {command} {data_mount}\/{docker_path} {destination_uri}; then\n break\n elif ((i == 2)); then\n 1>&2 echo \"Recursive de-localization failed.\"\n exit 1\n fi\n done\n \"\"\"",")",".","format","(","command","=","command",",","data_mount","=","source",".","rstrip","(","'\/'",")",",","docker_path","=","var",".","docker_path",",","destination_uri","=","var",".","uri",")","for","var","in","filtered_outputs","]",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/providers_util.py#L167-L203"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/providers_util.py","language":"python","identifier":"get_task_metadata","parameters":"(job_metadata, task_id)","argument_list":"","return_statement":"return task_metadata","docstring":"Returns a dict combining job metadata with the task id.","docstring_summary":"Returns a dict combining job metadata with the task id.","docstring_tokens":["Returns","a","dict","combining","job","metadata","with","the","task","id","."],"function":"def get_task_metadata(job_metadata, task_id):\n \"\"\"Returns a dict combining job metadata with the task id.\"\"\"\n task_metadata = job_metadata.copy()\n task_metadata['task-id'] = task_id\n\n return task_metadata","function_tokens":["def","get_task_metadata","(","job_metadata",",","task_id",")",":","task_metadata","=","job_metadata",".","copy","(",")","task_metadata","[","'task-id'","]","=","task_id","return","task_metadata"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/providers_util.py#L207-L212"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/providers_util.py","language":"python","identifier":"build_mount_env","parameters":"(source, mounts)","argument_list":"","return_statement":"return '\\n'.join([\n 'export {0}={1}\/{2}'.format(var.name, source.rstrip('\/'),\n var.docker_path.rstrip('\/')) for var in mounts\n ])","docstring":"Return a multi-line string with export statements for the variables.\n\n Arguments:\n source: Folder with the data. For example \/mnt\/data\n mounts: a list of MountParam\n\n Returns:\n a multi-line string with a shell script that sets environment variables\n corresponding to the mounts.","docstring_summary":"Return a multi-line string with export statements for the variables.","docstring_tokens":["Return","a","multi","-","line","string","with","export","statements","for","the","variables","."],"function":"def build_mount_env(source, mounts):\n \"\"\"Return a multi-line string with export statements for the variables.\n\n Arguments:\n source: Folder with the data. For example \/mnt\/data\n mounts: a list of MountParam\n\n Returns:\n a multi-line string with a shell script that sets environment variables\n corresponding to the mounts.\n \"\"\"\n return '\\n'.join([\n 'export {0}={1}\/{2}'.format(var.name, source.rstrip('\/'),\n var.docker_path.rstrip('\/')) for var in mounts\n ])","function_tokens":["def","build_mount_env","(","source",",","mounts",")",":","return","'\\n'",".","join","(","[","'export {0}={1}\/{2}'",".","format","(","var",".","name",",","source",".","rstrip","(","'\/'",")",",","var",".","docker_path",".","rstrip","(","'\/'",")",")","for","var","in","mounts","]",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/providers_util.py#L215-L229"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/providers_util.py","language":"python","identifier":"get_job_and_task_param","parameters":"(job_params, task_params, field)","argument_list":"","return_statement":"return job_params.get(field, set()) | task_params.get(field, set())","docstring":"Returns a dict combining the field for job and task params.","docstring_summary":"Returns a dict combining the field for job and task params.","docstring_tokens":["Returns","a","dict","combining","the","field","for","job","and","task","params","."],"function":"def get_job_and_task_param(job_params, task_params, field):\n \"\"\"Returns a dict combining the field for job and task params.\"\"\"\n return job_params.get(field, set()) | task_params.get(field, set())","function_tokens":["def","get_job_and_task_param","(","job_params",",","task_params",",","field",")",":","return","job_params",".","get","(","field",",","set","(",")",")","|","task_params",".","get","(","field",",","set","(",")",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/providers_util.py#L232-L234"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/providers_util.py","language":"python","identifier":"prepare_job_metadata","parameters":"(script, job_name, user_id)","argument_list":"","return_statement":"return {\n 'job-name': job_name_value,\n 'user-id': user_id,\n 'dsub-version': version,\n }","docstring":"Returns a dictionary of metadata fields for the job.","docstring_summary":"Returns a dictionary of metadata fields for the job.","docstring_tokens":["Returns","a","dictionary","of","metadata","fields","for","the","job","."],"function":"def prepare_job_metadata(script, job_name, user_id):\n \"\"\"Returns a dictionary of metadata fields for the job.\"\"\"\n\n # The name of the job is derived from the job_name and gets set as a\n # 'job-name' label (and so the value must be normalized).\n if not job_name:\n job_name = os.path.basename(script).split('.', 1)[0]\n job_name_value = job_model.convert_to_label_chars(job_name)\n\n # The user-id will get set as a label\n user_id = job_model.convert_to_label_chars(user_id)\n\n # Standard version is MAJOR.MINOR(.PATCH). This will convert the version\n # string to \"vMAJOR-MINOR(-PATCH)\". Example; \"0.1.0\" -> \"v0-1-0\".\n version = job_model.convert_to_label_chars('v%s' % DSUB_VERSION)\n return {\n 'job-name': job_name_value,\n 'user-id': user_id,\n 'dsub-version': version,\n }","function_tokens":["def","prepare_job_metadata","(","script",",","job_name",",","user_id",")",":","# The name of the job is derived from the job_name and gets set as a","# 'job-name' label (and so the value must be normalized).","if","not","job_name",":","job_name","=","os",".","path",".","basename","(","script",")",".","split","(","'.'",",","1",")","[","0","]","job_name_value","=","job_model",".","convert_to_label_chars","(","job_name",")","# The user-id will get set as a label","user_id","=","job_model",".","convert_to_label_chars","(","user_id",")","# Standard version is MAJOR.MINOR(.PATCH). This will convert the version","# string to \"vMAJOR-MINOR(-PATCH)\". Example; \"0.1.0\" -> \"v0-1-0\".","version","=","job_model",".","convert_to_label_chars","(","'v%s'","%","DSUB_VERSION",")","return","{","'job-name'",":","job_name_value",",","'user-id'",":","user_id",",","'dsub-version'",":","version",",","}"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/providers_util.py#L237-L256"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/job_model.py","language":"python","identifier":"validate_param_name","parameters":"(name, param_type)","argument_list":"","return_statement":"","docstring":"Validate that the name follows posix conventions for env variables.","docstring_summary":"Validate that the name follows posix conventions for env variables.","docstring_tokens":["Validate","that","the","name","follows","posix","conventions","for","env","variables","."],"function":"def validate_param_name(name, param_type):\n \"\"\"Validate that the name follows posix conventions for env variables.\"\"\"\n # http:\/\/pubs.opengroup.org\/onlinepubs\/9699919799\/basedefs\/V1_chap03.html#tag_03_235\n #\n # 3.235 Name\n # In the shell command language, a word consisting solely of underscores,\n # digits, and alphabetics from the portable character set.\n if not re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', name):\n raise ValueError('Invalid %s: %s' % (param_type, name))","function_tokens":["def","validate_param_name","(","name",",","param_type",")",":","# http:\/\/pubs.opengroup.org\/onlinepubs\/9699919799\/basedefs\/V1_chap03.html#tag_03_235","#","# 3.235 Name","# In the shell command language, a word consisting solely of underscores,","# digits, and alphabetics from the portable character set.","if","not","re",".","match","(","r'^[a-zA-Z_][a-zA-Z0-9_]*$'",",","name",")",":","raise","ValueError","(","'Invalid %s: %s'","%","(","param_type",",","name",")",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/job_model.py#L103-L111"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/job_model.py","language":"python","identifier":"validate_bucket_name","parameters":"(bucket)","argument_list":"","return_statement":"","docstring":"Validate that the name is a valid GCS bucket.","docstring_summary":"Validate that the name is a valid GCS bucket.","docstring_tokens":["Validate","that","the","name","is","a","valid","GCS","bucket","."],"function":"def validate_bucket_name(bucket):\n \"\"\"Validate that the name is a valid GCS bucket.\"\"\"\n if not bucket.startswith('gs:\/\/'):\n raise ValueError(\n 'Invalid bucket path \"%s\". Must start with \"gs:\/\/\".' % bucket)\n bucket_name = bucket[len('gs:\/\/'):]\n if not re.search(r'^\\w[\\w_\\.-]{1,61}\\w$', bucket_name):\n raise ValueError('Invalid bucket name: %s' % bucket)","function_tokens":["def","validate_bucket_name","(","bucket",")",":","if","not","bucket",".","startswith","(","'gs:\/\/'",")",":","raise","ValueError","(","'Invalid bucket path \"%s\". Must start with \"gs:\/\/\".'","%","bucket",")","bucket_name","=","bucket","[","len","(","'gs:\/\/'",")",":","]","if","not","re",".","search","(","r'^\\w[\\w_\\.-]{1,61}\\w$'",",","bucket_name",")",":","raise","ValueError","(","'Invalid bucket name: %s'","%","bucket",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/job_model.py#L114-L121"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/job_model.py","language":"python","identifier":"convert_to_label_chars","parameters":"(s)","argument_list":"","return_statement":"return ''.join(label_char_transform(c) for c in s)","docstring":"Turn the specified name and value into a valid Google label.","docstring_summary":"Turn the specified name and value into a valid Google label.","docstring_tokens":["Turn","the","specified","name","and","value","into","a","valid","Google","label","."],"function":"def convert_to_label_chars(s):\n \"\"\"Turn the specified name and value into a valid Google label.\"\"\"\n\n # We want the results to be user-friendly, not just functional.\n # So we can't base-64 encode it.\n # * If upper-case: lower-case it\n # * If the char is not a standard letter or digit. make it a dash\n\n # March 2019 note: underscores are now allowed in labels.\n # However, removing the conversion of underscores to dashes here would\n # create inconsistencies between old jobs and new jobs.\n # With existing code, $USER \"jane_doe\" has a user-id label of \"jane-doe\".\n # If we remove the conversion, the user-id label for new jobs is \"jane_doe\".\n # This makes looking up old jobs more complicated.\n\n accepted_characters = string.ascii_lowercase + string.digits + '-'\n\n def label_char_transform(char):\n if char in accepted_characters:\n return char\n if char in string.ascii_uppercase:\n return char.lower()\n return '-'\n\n return ''.join(label_char_transform(c) for c in s)","function_tokens":["def","convert_to_label_chars","(","s",")",":","# We want the results to be user-friendly, not just functional.","# So we can't base-64 encode it.","# * If upper-case: lower-case it","# * If the char is not a standard letter or digit. make it a dash","# March 2019 note: underscores are now allowed in labels.","# However, removing the conversion of underscores to dashes here would","# create inconsistencies between old jobs and new jobs.","# With existing code, $USER \"jane_doe\" has a user-id label of \"jane-doe\".","# If we remove the conversion, the user-id label for new jobs is \"jane_doe\".","# This makes looking up old jobs more complicated.","accepted_characters","=","string",".","ascii_lowercase","+","string",".","digits","+","'-'","def","label_char_transform","(","char",")",":","if","char","in","accepted_characters",":","return","char","if","char","in","string",".","ascii_uppercase",":","return","char",".","lower","(",")","return","'-'","return","''",".","join","(","label_char_transform","(","c",")","for","c","in","s",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/job_model.py#L182-L206"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/job_model.py","language":"python","identifier":"ensure_job_params_are_complete","parameters":"(job_params)","argument_list":"","return_statement":"","docstring":"For the job, ensure that each param entry is not None.","docstring_summary":"For the job, ensure that each param entry is not None.","docstring_tokens":["For","the","job","ensure","that","each","param","entry","is","not","None","."],"function":"def ensure_job_params_are_complete(job_params):\n \"\"\"For the job, ensure that each param entry is not None.\"\"\"\n for param in [\n 'labels', 'envs', 'inputs', 'outputs', 'mounts', 'input-recursives',\n 'output-recursives'\n ]:\n if not job_params.get(param):\n job_params[param] = set()","function_tokens":["def","ensure_job_params_are_complete","(","job_params",")",":","for","param","in","[","'labels'",",","'envs'",",","'inputs'",",","'outputs'",",","'mounts'",",","'input-recursives'",",","'output-recursives'","]",":","if","not","job_params",".","get","(","param",")",":","job_params","[","param","]","=","set","(",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/job_model.py#L523-L530"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/job_model.py","language":"python","identifier":"ensure_task_params_are_complete","parameters":"(task_descriptors)","argument_list":"","return_statement":"","docstring":"For each task, ensure that each task param entry is not None.","docstring_summary":"For each task, ensure that each task param entry is not None.","docstring_tokens":["For","each","task","ensure","that","each","task","param","entry","is","not","None","."],"function":"def ensure_task_params_are_complete(task_descriptors):\n \"\"\"For each task, ensure that each task param entry is not None.\"\"\"\n for task_desc in task_descriptors:\n for param in [\n 'labels', 'envs', 'inputs', 'outputs', 'input-recursives',\n 'output-recursives'\n ]:\n if not task_desc.task_params.get(param):\n task_desc.task_params[param] = set()","function_tokens":["def","ensure_task_params_are_complete","(","task_descriptors",")",":","for","task_desc","in","task_descriptors",":","for","param","in","[","'labels'",",","'envs'",",","'inputs'",",","'outputs'",",","'input-recursives'",",","'output-recursives'","]",":","if","not","task_desc",".","task_params",".","get","(","param",")",":","task_desc",".","task_params","[","param","]","=","set","(",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/job_model.py#L533-L541"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/job_model.py","language":"python","identifier":"_remove_empty_items","parameters":"(d, required)","argument_list":"","return_statement":"return new_dict","docstring":"Return a new dict with any empty items removed.\n\n Note that this is not a deep check. If d contains a dictionary which\n itself contains empty items, those are never checked.\n\n This method exists to make to_serializable() functions cleaner.\n We could revisit this some day, but for now, the serialized objects are\n stripped of empty values to keep the output YAML more compact.\n\n Args:\n d: a dictionary\n required: list of required keys (for example, TaskDescriptors always emit\n the \"task-id\", even if None)\n\n Returns:\n A dictionary with empty items removed.","docstring_summary":"Return a new dict with any empty items removed.","docstring_tokens":["Return","a","new","dict","with","any","empty","items","removed","."],"function":"def _remove_empty_items(d, required):\n \"\"\"Return a new dict with any empty items removed.\n\n Note that this is not a deep check. If d contains a dictionary which\n itself contains empty items, those are never checked.\n\n This method exists to make to_serializable() functions cleaner.\n We could revisit this some day, but for now, the serialized objects are\n stripped of empty values to keep the output YAML more compact.\n\n Args:\n d: a dictionary\n required: list of required keys (for example, TaskDescriptors always emit\n the \"task-id\", even if None)\n\n Returns:\n A dictionary with empty items removed.\n \"\"\"\n\n new_dict = {}\n for k, v in d.items():\n if k in required:\n new_dict[k] = v\n elif isinstance(v, int) or v:\n # \"if v\" would suppress emitting int(0)\n new_dict[k] = v\n\n return new_dict","function_tokens":["def","_remove_empty_items","(","d",",","required",")",":","new_dict","=","{","}","for","k",",","v","in","d",".","items","(",")",":","if","k","in","required",":","new_dict","[","k","]","=","v","elif","isinstance","(","v",",","int",")","or","v",":","# \"if v\" would suppress emitting int(0)","new_dict","[","k","]","=","v","return","new_dict"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/job_model.py#L544-L571"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/job_model.py","language":"python","identifier":"task_view_generator","parameters":"(job_descriptor)","argument_list":"","return_statement":"","docstring":"Generator that yields a task-specific view of the job.\n\n This generator exists to make it easy for callers to iterate over the tasks\n in a JobDescriptor. Each pass yields a new JobDescriptor with a single task.\n\n Args:\n job_descriptor: A JobDescriptor with 1 or more tasks.\n\n Yields:\n A JobDescriptor with a single task.","docstring_summary":"Generator that yields a task-specific view of the job.","docstring_tokens":["Generator","that","yields","a","task","-","specific","view","of","the","job","."],"function":"def task_view_generator(job_descriptor):\n \"\"\"Generator that yields a task-specific view of the job.\n\n This generator exists to make it easy for callers to iterate over the tasks\n in a JobDescriptor. Each pass yields a new JobDescriptor with a single task.\n\n Args:\n job_descriptor: A JobDescriptor with 1 or more tasks.\n\n Yields:\n A JobDescriptor with a single task.\n \"\"\"\n for task_descriptor in job_descriptor.task_descriptors:\n jd = JobDescriptor(job_descriptor.job_metadata, job_descriptor.job_params,\n job_descriptor.job_resources, [task_descriptor])\n yield jd","function_tokens":["def","task_view_generator","(","job_descriptor",")",":","for","task_descriptor","in","job_descriptor",".","task_descriptors",":","jd","=","JobDescriptor","(","job_descriptor",".","job_metadata",",","job_descriptor",".","job_params",",","job_descriptor",".","job_resources",",","[","task_descriptor","]",")","yield","jd"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/job_model.py#L888-L903"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/job_model.py","language":"python","identifier":"LabelParam._validate_label","parameters":"(cls, name, value)","argument_list":"","return_statement":"","docstring":"Raise ValueError if the label is invalid.","docstring_summary":"Raise ValueError if the label is invalid.","docstring_tokens":["Raise","ValueError","if","the","label","is","invalid","."],"function":"def _validate_label(cls, name, value):\n \"\"\"Raise ValueError if the label is invalid.\"\"\"\n # Rules for labels are described in:\n # https:\/\/cloud.google.com\/compute\/docs\/labeling-resources#restrictions\n\n # * Keys and values cannot be longer than 63 characters each.\n # * Keys and values can only contain lowercase letters, numeric characters,\n # underscores, and dashes.\n # * International characters are allowed.\n # * Label keys must start with a lowercase letter and international\n # characters are allowed.\n # * Label keys cannot be empty.\n cls._check_label_name(name)\n cls._check_label_value(value)\n\n # Ensure that reserved labels are not being used.\n if not cls._allow_reserved_keys and name in RESERVED_LABELS:\n raise ValueError('Label flag (%s=...) must not use reserved keys: %r' %\n (name, list(RESERVED_LABELS)))","function_tokens":["def","_validate_label","(","cls",",","name",",","value",")",":","# Rules for labels are described in:","# https:\/\/cloud.google.com\/compute\/docs\/labeling-resources#restrictions","# * Keys and values cannot be longer than 63 characters each.","# * Keys and values can only contain lowercase letters, numeric characters,","# underscores, and dashes.","# * International characters are allowed.","# * Label keys must start with a lowercase letter and international","# characters are allowed.","# * Label keys cannot be empty.","cls",".","_check_label_name","(","name",")","cls",".","_check_label_value","(","value",")","# Ensure that reserved labels are not being used.","if","not","cls",".","_allow_reserved_keys","and","name","in","RESERVED_LABELS",":","raise","ValueError","(","'Label flag (%s=...) must not use reserved keys: %r'","%","(","name",",","list","(","RESERVED_LABELS",")",")",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/job_model.py#L229-L247"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/job_model.py","language":"python","identifier":"LabelParam._check_label_value","parameters":"(value)","argument_list":"","return_statement":"","docstring":"Raise ValueError if the label value is invalid.","docstring_summary":"Raise ValueError if the label value is invalid.","docstring_tokens":["Raise","ValueError","if","the","label","value","is","invalid","."],"function":"def _check_label_value(value):\n \"\"\"Raise ValueError if the label value is invalid.\"\"\"\n if not value:\n return\n\n if len(value) > 63:\n raise ValueError(\n 'Label values must not be longer than 63 characters: \"%s\"' % value)\n\n if not re.match(r'^([-_a-z0-9]*)?$', value):\n raise ValueError(\n 'Invalid value for label: \"%s\". Must contain only lowercase letters, '\n 'numeric characters, underscores, and dashes.' % value)","function_tokens":["def","_check_label_value","(","value",")",":","if","not","value",":","return","if","len","(","value",")",">","63",":","raise","ValueError","(","'Label values must not be longer than 63 characters: \"%s\"'","%","value",")","if","not","re",".","match","(","r'^([-_a-z0-9]*)?$'",",","value",")",":","raise","ValueError","(","'Invalid value for label: \"%s\". Must contain only lowercase letters, '","'numeric characters, underscores, and dashes.'","%","value",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/job_model.py#L260-L272"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/job_model.py","language":"python","identifier":"TaskDescriptor.to_serializable","parameters":"(self)","argument_list":"","return_statement":"return _remove_empty_items(task, ['task-id'])","docstring":"Return a dict populated for serialization (as YAML\/JSON).","docstring_summary":"Return a dict populated for serialization (as YAML\/JSON).","docstring_tokens":["Return","a","dict","populated","for","serialization","(","as","YAML","\/","JSON",")","."],"function":"def to_serializable(self):\n \"\"\"Return a dict populated for serialization (as YAML\/JSON).\"\"\"\n\n task_metadata = self.task_metadata\n task_params = self.task_params\n task_resources = self.task_resources\n\n # The only required field is the task-id, even if it is None\n task_id = None\n if task_metadata.get('task-id') is not None:\n task_id = str(task_metadata.get('task-id'))\n\n task = {'task-id': task_id}\n task['create-time'] = task_metadata.get('create-time')\n task['task-attempt'] = task_metadata.get('task-attempt')\n\n if task_resources.logging_path:\n task['logging-path'] = str(task_resources.logging_path.uri)\n\n task['labels'] = {var.name: var.value for var in task_params['labels']}\n\n task['envs'] = {var.name: var.value for var in task_params['envs']}\n\n task['inputs'] = {\n var.name: var.value\n for var in task_params['inputs']\n if not var.recursive\n }\n task['input-recursives'] = {\n var.name: var.value\n for var in task_params['inputs']\n if var.recursive\n }\n task['outputs'] = {\n var.name: var.value\n for var in task_params['outputs']\n if not var.recursive\n }\n task['output-recursives'] = {\n var.name: var.value\n for var in task_params['outputs']\n if var.recursive\n }\n\n return _remove_empty_items(task, ['task-id'])","function_tokens":["def","to_serializable","(","self",")",":","task_metadata","=","self",".","task_metadata","task_params","=","self",".","task_params","task_resources","=","self",".","task_resources","# The only required field is the task-id, even if it is None","task_id","=","None","if","task_metadata",".","get","(","'task-id'",")","is","not","None",":","task_id","=","str","(","task_metadata",".","get","(","'task-id'",")",")","task","=","{","'task-id'",":","task_id","}","task","[","'create-time'","]","=","task_metadata",".","get","(","'create-time'",")","task","[","'task-attempt'","]","=","task_metadata",".","get","(","'task-attempt'",")","if","task_resources",".","logging_path",":","task","[","'logging-path'","]","=","str","(","task_resources",".","logging_path",".","uri",")","task","[","'labels'","]","=","{","var",".","name",":","var",".","value","for","var","in","task_params","[","'labels'","]","}","task","[","'envs'","]","=","{","var",".","name",":","var",".","value","for","var","in","task_params","[","'envs'","]","}","task","[","'inputs'","]","=","{","var",".","name",":","var",".","value","for","var","in","task_params","[","'inputs'","]","if","not","var",".","recursive","}","task","[","'input-recursives'","]","=","{","var",".","name",":","var",".","value","for","var","in","task_params","[","'inputs'","]","if","var",".","recursive","}","task","[","'outputs'","]","=","{","var",".","name",":","var",".","value","for","var","in","task_params","[","'outputs'","]","if","not","var",".","recursive","}","task","[","'output-recursives'","]","=","{","var",".","name",":","var",".","value","for","var","in","task_params","[","'outputs'","]","if","var",".","recursive","}","return","_remove_empty_items","(","task",",","[","'task-id'","]",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/job_model.py#L604-L648"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/job_model.py","language":"python","identifier":"JobDescriptor.to_serializable","parameters":"(self)","argument_list":"","return_statement":"return _remove_empty_items(job, [])","docstring":"Return a dict populated for serialization (as YAML\/JSON).","docstring_summary":"Return a dict populated for serialization (as YAML\/JSON).","docstring_tokens":["Return","a","dict","populated","for","serialization","(","as","YAML","\/","JSON",")","."],"function":"def to_serializable(self):\n \"\"\"Return a dict populated for serialization (as YAML\/JSON).\"\"\"\n\n job_metadata = self.job_metadata\n job_resources = self.job_resources\n job_params = self.job_params\n task_descriptors = self.task_descriptors\n\n job = {\n 'job-id': job_metadata.get('job-id'),\n 'job-name': job_metadata.get('job-name'),\n 'user-id': job_metadata.get('user-id'),\n 'create-time': job_metadata.get('create-time'),\n 'dsub-version': job_metadata.get('dsub-version'),\n 'user-project': job_metadata.get('user-project'),\n 'task-ids': job_metadata.get('task-ids'),\n 'script-name': job_metadata['script'].name,\n }\n\n # logging is specified as a command-line argument and is typically\n # transformed (substituting job-id). The transformed value is saved\n # on a per-task basis as the 'logging-path'.\n if job_resources.logging:\n job['logging'] = str(job_resources.logging.uri)\n\n job['labels'] = {var.name: var.value for var in job_params['labels']}\n\n job['envs'] = {var.name: var.value for var in job_params['envs']}\n\n job['inputs'] = {\n var.name: var.value\n for var in job_params['inputs']\n if not var.recursive\n }\n job['input-recursives'] = {\n var.name: var.value\n for var in job_params['inputs']\n if var.recursive\n }\n\n job['outputs'] = {\n var.name: var.value\n for var in job_params['outputs']\n if not var.recursive\n }\n job['output-recursives'] = {\n var.name: var.value\n for var in job_params['outputs']\n if var.recursive\n }\n job['mounts'] = {var.name: var.value for var in job_params['mounts']}\n\n tasks = []\n for task_descriptor in task_descriptors:\n tasks.append(task_descriptor.to_serializable())\n\n job['tasks'] = tasks\n\n return _remove_empty_items(job, [])","function_tokens":["def","to_serializable","(","self",")",":","job_metadata","=","self",".","job_metadata","job_resources","=","self",".","job_resources","job_params","=","self",".","job_params","task_descriptors","=","self",".","task_descriptors","job","=","{","'job-id'",":","job_metadata",".","get","(","'job-id'",")",",","'job-name'",":","job_metadata",".","get","(","'job-name'",")",",","'user-id'",":","job_metadata",".","get","(","'user-id'",")",",","'create-time'",":","job_metadata",".","get","(","'create-time'",")",",","'dsub-version'",":","job_metadata",".","get","(","'dsub-version'",")",",","'user-project'",":","job_metadata",".","get","(","'user-project'",")",",","'task-ids'",":","job_metadata",".","get","(","'task-ids'",")",",","'script-name'",":","job_metadata","[","'script'","]",".","name",",","}","# logging is specified as a command-line argument and is typically","# transformed (substituting job-id). The transformed value is saved","# on a per-task basis as the 'logging-path'.","if","job_resources",".","logging",":","job","[","'logging'","]","=","str","(","job_resources",".","logging",".","uri",")","job","[","'labels'","]","=","{","var",".","name",":","var",".","value","for","var","in","job_params","[","'labels'","]","}","job","[","'envs'","]","=","{","var",".","name",":","var",".","value","for","var","in","job_params","[","'envs'","]","}","job","[","'inputs'","]","=","{","var",".","name",":","var",".","value","for","var","in","job_params","[","'inputs'","]","if","not","var",".","recursive","}","job","[","'input-recursives'","]","=","{","var",".","name",":","var",".","value","for","var","in","job_params","[","'inputs'","]","if","var",".","recursive","}","job","[","'outputs'","]","=","{","var",".","name",":","var",".","value","for","var","in","job_params","[","'outputs'","]","if","not","var",".","recursive","}","job","[","'output-recursives'","]","=","{","var",".","name",":","var",".","value","for","var","in","job_params","[","'outputs'","]","if","var",".","recursive","}","job","[","'mounts'","]","=","{","var",".","name",":","var",".","value","for","var","in","job_params","[","'mounts'","]","}","tasks","=","[","]","for","task_descriptor","in","task_descriptors",":","tasks",".","append","(","task_descriptor",".","to_serializable","(",")",")","job","[","'tasks'","]","=","tasks","return","_remove_empty_items","(","job",",","[","]",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/job_model.py#L684-L742"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/job_model.py","language":"python","identifier":"JobDescriptor.to_yaml","parameters":"(self)","argument_list":"","return_statement":"return yaml.dump(self.to_serializable(), default_flow_style=False)","docstring":"Return a YAML string representing the job and task data.\n\n A provider's internal representation of a dsub task typically does not map\n 1-1 to the dsub representation. For example, the Google Genomics Pipeline\n does not natively support \"input-recursive\" or \"output-recursive\", so the\n google provider cannot easily reconstruct the user inputs from the\n pipeline's associated Operation object.\n\n All providers are likely to need a way to reliably serialize job and task-\n related information, either for dstat or for any type of \"retry\" mechanism\n we might want to build.\n\n Returns:\n YAML string","docstring_summary":"Return a YAML string representing the job and task data.","docstring_tokens":["Return","a","YAML","string","representing","the","job","and","task","data","."],"function":"def to_yaml(self):\n \"\"\"Return a YAML string representing the job and task data.\n\n A provider's internal representation of a dsub task typically does not map\n 1-1 to the dsub representation. For example, the Google Genomics Pipeline\n does not natively support \"input-recursive\" or \"output-recursive\", so the\n google provider cannot easily reconstruct the user inputs from the\n pipeline's associated Operation object.\n\n All providers are likely to need a way to reliably serialize job and task-\n related information, either for dstat or for any type of \"retry\" mechanism\n we might want to build.\n\n Returns:\n YAML string\n \"\"\"\n return yaml.dump(self.to_serializable(), default_flow_style=False)","function_tokens":["def","to_yaml","(","self",")",":","return","yaml",".","dump","(","self",".","to_serializable","(",")",",","default_flow_style","=","False",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/job_model.py#L744-L760"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/job_model.py","language":"python","identifier":"JobDescriptor.from_yaml","parameters":"(cls, yaml_string)","argument_list":"","return_statement":"return JobDescriptor(job_metadata, job_params, job_resources,\n task_descriptors)","docstring":"Populate and return a JobDescriptor from a YAML string.","docstring_summary":"Populate and return a JobDescriptor from a YAML string.","docstring_tokens":["Populate","and","return","a","JobDescriptor","from","a","YAML","string","."],"function":"def from_yaml(cls, yaml_string):\n \"\"\"Populate and return a JobDescriptor from a YAML string.\"\"\"\n try:\n job = yaml.full_load(yaml_string)\n except AttributeError:\n # For installations that cannot update their PyYAML version\n job = yaml.load(yaml_string)\n\n job_metadata = {}\n for key in [\n 'job-id', 'job-name', 'task-ids', 'user-id', 'dsub-version',\n 'user-project', 'script-name'\n ]:\n if job.get(key) is not None:\n job_metadata[key] = job.get(key)\n\n # Make sure that create-time string is turned into a datetime\n job_create_time = job.get('create-time')\n cls._set_metadata_create_time(job_metadata, job_create_time)\n\n job_resources = Resources(logging=job.get('logging'))\n\n job_params = {}\n job_params['labels'] = cls._label_params_from_dict(job.get('labels', {}))\n job_params['envs'] = cls._env_params_from_dict(job.get('envs', {}))\n job_params['inputs'] = cls._input_file_params_from_dict(\n job.get('inputs', {}), False)\n job_params['input-recursives'] = cls._input_file_params_from_dict(\n job.get('input-recursives', {}), True)\n job_params['outputs'] = cls._output_file_params_from_dict(\n job.get('outputs', {}), False)\n job_params['output-recursives'] = cls._output_file_params_from_dict(\n job.get('output-recursives', {}), True)\n job_params['mounts'] = cls._mount_params_from_dict(job.get('mounts', {}))\n\n task_descriptors = []\n for task in job.get('tasks', []):\n task_metadata = {'task-id': task.get('task-id')}\n\n # Old instances of the meta.yaml do not have a task create time.\n create_time = task.get('create-time')\n if create_time:\n cls._set_metadata_create_time(task_metadata, create_time)\n\n if task.get('task-attempt') is not None:\n task_metadata['task-attempt'] = task.get('task-attempt')\n\n task_params = {}\n task_params['labels'] = cls._label_params_from_dict(\n task.get('labels', {}))\n task_params['envs'] = cls._env_params_from_dict(task.get('envs', {}))\n task_params['inputs'] = cls._input_file_params_from_dict(\n task.get('inputs', {}), False)\n task_params['input-recursives'] = cls._input_file_params_from_dict(\n task.get('input-recursives', {}), True)\n task_params['outputs'] = cls._output_file_params_from_dict(\n task.get('outputs', {}), False)\n task_params['output-recursives'] = cls._output_file_params_from_dict(\n task.get('output-recursives', {}), True)\n\n task_resources = Resources(logging_path=task.get('logging-path'))\n\n task_descriptors.append(\n TaskDescriptor(task_metadata, task_params, task_resources))\n\n return JobDescriptor(job_metadata, job_params, job_resources,\n task_descriptors)","function_tokens":["def","from_yaml","(","cls",",","yaml_string",")",":","try",":","job","=","yaml",".","full_load","(","yaml_string",")","except","AttributeError",":","# For installations that cannot update their PyYAML version","job","=","yaml",".","load","(","yaml_string",")","job_metadata","=","{","}","for","key","in","[","'job-id'",",","'job-name'",",","'task-ids'",",","'user-id'",",","'dsub-version'",",","'user-project'",",","'script-name'","]",":","if","job",".","get","(","key",")","is","not","None",":","job_metadata","[","key","]","=","job",".","get","(","key",")","# Make sure that create-time string is turned into a datetime","job_create_time","=","job",".","get","(","'create-time'",")","cls",".","_set_metadata_create_time","(","job_metadata",",","job_create_time",")","job_resources","=","Resources","(","logging","=","job",".","get","(","'logging'",")",")","job_params","=","{","}","job_params","[","'labels'","]","=","cls",".","_label_params_from_dict","(","job",".","get","(","'labels'",",","{","}",")",")","job_params","[","'envs'","]","=","cls",".","_env_params_from_dict","(","job",".","get","(","'envs'",",","{","}",")",")","job_params","[","'inputs'","]","=","cls",".","_input_file_params_from_dict","(","job",".","get","(","'inputs'",",","{","}",")",",","False",")","job_params","[","'input-recursives'","]","=","cls",".","_input_file_params_from_dict","(","job",".","get","(","'input-recursives'",",","{","}",")",",","True",")","job_params","[","'outputs'","]","=","cls",".","_output_file_params_from_dict","(","job",".","get","(","'outputs'",",","{","}",")",",","False",")","job_params","[","'output-recursives'","]","=","cls",".","_output_file_params_from_dict","(","job",".","get","(","'output-recursives'",",","{","}",")",",","True",")","job_params","[","'mounts'","]","=","cls",".","_mount_params_from_dict","(","job",".","get","(","'mounts'",",","{","}",")",")","task_descriptors","=","[","]","for","task","in","job",".","get","(","'tasks'",",","[","]",")",":","task_metadata","=","{","'task-id'",":","task",".","get","(","'task-id'",")","}","# Old instances of the meta.yaml do not have a task create time.","create_time","=","task",".","get","(","'create-time'",")","if","create_time",":","cls",".","_set_metadata_create_time","(","task_metadata",",","create_time",")","if","task",".","get","(","'task-attempt'",")","is","not","None",":","task_metadata","[","'task-attempt'","]","=","task",".","get","(","'task-attempt'",")","task_params","=","{","}","task_params","[","'labels'","]","=","cls",".","_label_params_from_dict","(","task",".","get","(","'labels'",",","{","}",")",")","task_params","[","'envs'","]","=","cls",".","_env_params_from_dict","(","task",".","get","(","'envs'",",","{","}",")",")","task_params","[","'inputs'","]","=","cls",".","_input_file_params_from_dict","(","task",".","get","(","'inputs'",",","{","}",")",",","False",")","task_params","[","'input-recursives'","]","=","cls",".","_input_file_params_from_dict","(","task",".","get","(","'input-recursives'",",","{","}",")",",","True",")","task_params","[","'outputs'","]","=","cls",".","_output_file_params_from_dict","(","task",".","get","(","'outputs'",",","{","}",")",",","False",")","task_params","[","'output-recursives'","]","=","cls",".","_output_file_params_from_dict","(","task",".","get","(","'output-recursives'",",","{","}",")",",","True",")","task_resources","=","Resources","(","logging_path","=","task",".","get","(","'logging-path'",")",")","task_descriptors",".","append","(","TaskDescriptor","(","task_metadata",",","task_params",",","task_resources",")",")","return","JobDescriptor","(","job_metadata",",","job_params",",","job_resources",",","task_descriptors",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/job_model.py#L809-L875"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/job_model.py","language":"python","identifier":"JobDescriptor.find_task_descriptor","parameters":"(self, task_id)","argument_list":"","return_statement":"return None","docstring":"Returns the task_descriptor corresponding to task_id.","docstring_summary":"Returns the task_descriptor corresponding to task_id.","docstring_tokens":["Returns","the","task_descriptor","corresponding","to","task_id","."],"function":"def find_task_descriptor(self, task_id):\n \"\"\"Returns the task_descriptor corresponding to task_id.\"\"\"\n\n # It is not guaranteed that the index will be task_id - 1 when --tasks is\n # used with a min\/max range.\n for task_descriptor in self.task_descriptors:\n if task_descriptor.task_metadata.get('task-id') == task_id:\n return task_descriptor\n return None","function_tokens":["def","find_task_descriptor","(","self",",","task_id",")",":","# It is not guaranteed that the index will be task_id - 1 when --tasks is","# used with a min\/max range.","for","task_descriptor","in","self",".","task_descriptors",":","if","task_descriptor",".","task_metadata",".","get","(","'task-id'",")","==","task_id",":","return","task_descriptor","return","None"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/job_model.py#L877-L885"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/output_formatter.py","language":"python","identifier":"prepare_summary_table","parameters":"(rows)","argument_list":"","return_statement":"return new_rows","docstring":"Create a new table that is a summary of the input rows.\n\n All with the same (job-name or job-id, status) go together.\n\n Args:\n rows: the input rows, a list of dictionaries.\n\n Returns:\n A new row set of summary information.","docstring_summary":"Create a new table that is a summary of the input rows.","docstring_tokens":["Create","a","new","table","that","is","a","summary","of","the","input","rows","."],"function":"def prepare_summary_table(rows):\n \"\"\"Create a new table that is a summary of the input rows.\n\n All with the same (job-name or job-id, status) go together.\n\n Args:\n rows: the input rows, a list of dictionaries.\n\n Returns:\n A new row set of summary information.\n \"\"\"\n if not rows:\n return []\n\n # We either group on the job-name (if present) or fall back to the job-id\n key_field = 'job-name'\n if key_field not in rows[0]:\n key_field = 'job-id'\n\n # Group each of the rows based on (job-name or job-id, status)\n grouped = collections.defaultdict(lambda: collections.defaultdict(lambda: []))\n for row in rows:\n grouped[row.get(key_field, '')][row.get('status', '')] += [row]\n\n # Now that we have the rows grouped, create a summary table.\n # Use the original table as the driver in order to preserve the order.\n new_rows = []\n for job_key in sorted(grouped.keys()):\n group = grouped[job_key]\n canonical_status = ['RUNNING', 'SUCCESS', 'FAILURE', 'CANCEL']\n # Written this way to ensure that if somehow a new status is introduced,\n # it shows up in our output.\n for status in canonical_status + sorted(group.keys()):\n if status not in group:\n continue\n task_count = len(group[status])\n del group[status]\n if task_count:\n summary_row = collections.OrderedDict()\n summary_row[key_field] = job_key\n summary_row['status'] = status\n summary_row['task-count'] = task_count\n new_rows.append(summary_row)\n\n return new_rows","function_tokens":["def","prepare_summary_table","(","rows",")",":","if","not","rows",":","return","[","]","# We either group on the job-name (if present) or fall back to the job-id","key_field","=","'job-name'","if","key_field","not","in","rows","[","0","]",":","key_field","=","'job-id'","# Group each of the rows based on (job-name or job-id, status)","grouped","=","collections",".","defaultdict","(","lambda",":","collections",".","defaultdict","(","lambda",":","[","]",")",")","for","row","in","rows",":","grouped","[","row",".","get","(","key_field",",","''",")","]","[","row",".","get","(","'status'",",","''",")","]","+=","[","row","]","# Now that we have the rows grouped, create a summary table.","# Use the original table as the driver in order to preserve the order.","new_rows","=","[","]","for","job_key","in","sorted","(","grouped",".","keys","(",")",")",":","group","=","grouped","[","job_key","]","canonical_status","=","[","'RUNNING'",",","'SUCCESS'",",","'FAILURE'",",","'CANCEL'","]","# Written this way to ensure that if somehow a new status is introduced,","# it shows up in our output.","for","status","in","canonical_status","+","sorted","(","group",".","keys","(",")",")",":","if","status","not","in","group",":","continue","task_count","=","len","(","group","[","status","]",")","del","group","[","status","]","if","task_count",":","summary_row","=","collections",".","OrderedDict","(",")","summary_row","[","key_field","]","=","job_key","summary_row","[","'status'","]","=","status","summary_row","[","'task-count'","]","=","task_count","new_rows",".","append","(","summary_row",")","return","new_rows"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/output_formatter.py#L246-L290"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/output_formatter.py","language":"python","identifier":"prepare_row","parameters":"(task, full, summary)","argument_list":"","return_statement":"return row","docstring":"return a dict with the task's info (more if \"full\" is set).","docstring_summary":"return a dict with the task's info (more if \"full\" is set).","docstring_tokens":["return","a","dict","with","the","task","s","info","(","more","if","full","is","set",")","."],"function":"def prepare_row(task, full, summary):\n \"\"\"return a dict with the task's info (more if \"full\" is set).\"\"\"\n\n # Would like to include the Job ID in the default set of columns, but\n # it is a long value and would leave little room for status and update time.\n\n row_spec = collections.namedtuple('row_spec',\n ['key', 'required', 'default_value'])\n\n # pyformat: disable\n default_columns = [\n row_spec('job-name', True, None),\n row_spec('task-id', False, None),\n row_spec('last-update', True, None),\n row_spec('status-message', True, None)\n ]\n full_columns = default_columns + [\n row_spec('job-id', True, None),\n row_spec('user-id', True, None),\n row_spec('status', True, None),\n row_spec('status-detail', True, None),\n row_spec('task-attempt', False, None),\n row_spec('create-time', True, None),\n row_spec('start-time', True, None),\n row_spec('end-time', True, None),\n row_spec('internal-id', True, None),\n row_spec('logging', True, None),\n row_spec('labels', True, {}),\n row_spec('envs', True, {}),\n row_spec('inputs', True, {}),\n row_spec('input-recursives', False, {}),\n row_spec('outputs', True, {}),\n row_spec('output-recursives', False, {}),\n row_spec('mounts', True, {}),\n row_spec('provider', True, None),\n row_spec('provider-attributes', True, {}),\n row_spec('events', True, []),\n row_spec('user-project', False, None),\n row_spec('dsub-version', False, None),\n row_spec('script-name', False, None),\n row_spec('script', False, None),\n ]\n summary_columns = default_columns + [\n row_spec('job-id', True, None),\n row_spec('user-id', True, None),\n row_spec('status', True, None),\n ]\n # pyformat: enable\n\n assert not (full and summary), 'Full and summary cannot both be enabled'\n\n if full:\n columns = full_columns\n elif summary:\n columns = summary_columns\n else:\n columns = default_columns\n\n row = {}\n for col in columns:\n key, required, default = col\n\n value = task.get_field(key, default)\n if required or value is not None:\n row[key] = value\n\n return row","function_tokens":["def","prepare_row","(","task",",","full",",","summary",")",":","# Would like to include the Job ID in the default set of columns, but","# it is a long value and would leave little room for status and update time.","row_spec","=","collections",".","namedtuple","(","'row_spec'",",","[","'key'",",","'required'",",","'default_value'","]",")","# pyformat: disable","default_columns","=","[","row_spec","(","'job-name'",",","True",",","None",")",",","row_spec","(","'task-id'",",","False",",","None",")",",","row_spec","(","'last-update'",",","True",",","None",")",",","row_spec","(","'status-message'",",","True",",","None",")","]","full_columns","=","default_columns","+","[","row_spec","(","'job-id'",",","True",",","None",")",",","row_spec","(","'user-id'",",","True",",","None",")",",","row_spec","(","'status'",",","True",",","None",")",",","row_spec","(","'status-detail'",",","True",",","None",")",",","row_spec","(","'task-attempt'",",","False",",","None",")",",","row_spec","(","'create-time'",",","True",",","None",")",",","row_spec","(","'start-time'",",","True",",","None",")",",","row_spec","(","'end-time'",",","True",",","None",")",",","row_spec","(","'internal-id'",",","True",",","None",")",",","row_spec","(","'logging'",",","True",",","None",")",",","row_spec","(","'labels'",",","True",",","{","}",")",",","row_spec","(","'envs'",",","True",",","{","}",")",",","row_spec","(","'inputs'",",","True",",","{","}",")",",","row_spec","(","'input-recursives'",",","False",",","{","}",")",",","row_spec","(","'outputs'",",","True",",","{","}",")",",","row_spec","(","'output-recursives'",",","False",",","{","}",")",",","row_spec","(","'mounts'",",","True",",","{","}",")",",","row_spec","(","'provider'",",","True",",","None",")",",","row_spec","(","'provider-attributes'",",","True",",","{","}",")",",","row_spec","(","'events'",",","True",",","[","]",")",",","row_spec","(","'user-project'",",","False",",","None",")",",","row_spec","(","'dsub-version'",",","False",",","None",")",",","row_spec","(","'script-name'",",","False",",","None",")",",","row_spec","(","'script'",",","False",",","None",")",",","]","summary_columns","=","default_columns","+","[","row_spec","(","'job-id'",",","True",",","None",")",",","row_spec","(","'user-id'",",","True",",","None",")",",","row_spec","(","'status'",",","True",",","None",")",",","]","# pyformat: enable","assert","not","(","full","and","summary",")",",","'Full and summary cannot both be enabled'","if","full",":","columns","=","full_columns","elif","summary",":","columns","=","summary_columns","else",":","columns","=","default_columns","row","=","{","}","for","col","in","columns",":","key",",","required",",","default","=","col","value","=","task",".","get_field","(","key",",","default",")","if","required","or","value","is","not","None",":","row","[","key","]","=","value","return","row"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/output_formatter.py#L293-L359"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/output_formatter.py","language":"python","identifier":"OutputFormatter.prepare_output","parameters":"(self, row)","argument_list":"","return_statement":"return row","docstring":"Convert types of task fields.","docstring_summary":"Convert types of task fields.","docstring_tokens":["Convert","types","of","task","fields","."],"function":"def prepare_output(self, row):\n \"\"\"Convert types of task fields.\"\"\"\n date_fields = ['last-update', 'create-time', 'start-time', 'end-time']\n int_fields = ['task-attempt']\n\n for col in date_fields:\n if col in row:\n row[col] = self.default_format_date(row[col])\n\n for col in int_fields:\n if col in row and row[col] is not None:\n row[col] = int(row[col])\n\n return row","function_tokens":["def","prepare_output","(","self",",","row",")",":","date_fields","=","[","'last-update'",",","'create-time'",",","'start-time'",",","'end-time'","]","int_fields","=","[","'task-attempt'","]","for","col","in","date_fields",":","if","col","in","row",":","row","[","col","]","=","self",".","default_format_date","(","row","[","col","]",")","for","col","in","int_fields",":","if","col","in","row","and","row","[","col","]","is","not","None",":","row","[","col","]","=","int","(","row","[","col","]",")","return","row"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/output_formatter.py#L55-L68"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/output_formatter.py","language":"python","identifier":"OutputFormatter.print_table","parameters":"(self, table)","argument_list":"","return_statement":"","docstring":"Function to be defined by the derived class to print output.","docstring_summary":"Function to be defined by the derived class to print output.","docstring_tokens":["Function","to","be","defined","by","the","derived","class","to","print","output","."],"function":"def print_table(self, table):\n \"\"\"Function to be defined by the derived class to print output.\"\"\"\n raise NotImplementedError('print_table method not defined!')","function_tokens":["def","print_table","(","self",",","table",")",":","raise","NotImplementedError","(","'print_table method not defined!'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/output_formatter.py#L70-L72"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/output_formatter.py","language":"python","identifier":"OutputFormatter.prepare_and_print_table","parameters":"(self, rows, summary)","argument_list":"","return_statement":"","docstring":"Wrapper for prepare_output and print_table.","docstring_summary":"Wrapper for prepare_output and print_table.","docstring_tokens":["Wrapper","for","prepare_output","and","print_table","."],"function":"def prepare_and_print_table(self, rows, summary):\n \"\"\"Wrapper for prepare_output and print_table.\"\"\"\n if summary:\n rows = prepare_summary_table(rows)\n\n table = []\n for row in rows:\n row = self.prepare_output(row)\n table.append(row)\n self.print_table(table)","function_tokens":["def","prepare_and_print_table","(","self",",","rows",",","summary",")",":","if","summary",":","rows","=","prepare_summary_table","(","rows",")","table","=","[","]","for","row","in","rows",":","row","=","self",".","prepare_output","(","row",")","table",".","append","(","row",")","self",".","print_table","(","table",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/output_formatter.py#L74-L83"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/output_formatter.py","language":"python","identifier":"TextOutput._trim_display_field","parameters":"(self, value, max_length)","argument_list":"","return_statement":"return value","docstring":"Return a value for display; if longer than max length, use ellipsis.","docstring_summary":"Return a value for display; if longer than max length, use ellipsis.","docstring_tokens":["Return","a","value","for","display",";","if","longer","than","max","length","use","ellipsis","."],"function":"def _trim_display_field(self, value, max_length):\n \"\"\"Return a value for display; if longer than max length, use ellipsis.\"\"\"\n if not value:\n return ''\n if len(value) > max_length:\n return value[:max_length - 3] + '...'\n return value","function_tokens":["def","_trim_display_field","(","self",",","value",",","max_length",")",":","if","not","value",":","return","''","if","len","(","value",")",">","max_length",":","return","value","[",":","max_length","-","3","]","+","'...'","return","value"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/output_formatter.py#L112-L118"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/output_formatter.py","language":"python","identifier":"TextOutput._format_pairs","parameters":"(self, values)","argument_list":"","return_statement":"return ', '.join(\n '%s=%s' % (key, value) for key, value in sorted(values.items()))","docstring":"Returns a string of comma-delimited key=value pairs.","docstring_summary":"Returns a string of comma-delimited key=value pairs.","docstring_tokens":["Returns","a","string","of","comma","-","delimited","key","=","value","pairs","."],"function":"def _format_pairs(self, values):\n \"\"\"Returns a string of comma-delimited key=value pairs.\"\"\"\n return ', '.join(\n '%s=%s' % (key, value) for key, value in sorted(values.items()))","function_tokens":["def","_format_pairs","(","self",",","values",")",":","return","', '",".","join","(","'%s=%s'","%","(","key",",","value",")","for","key",",","value","in","sorted","(","values",".","items","(",")",")",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/output_formatter.py#L135-L138"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/output_formatter.py","language":"python","identifier":"YamlOutput.string_presenter","parameters":"(self, dumper, data)","argument_list":"","return_statement":"","docstring":"Presenter to force yaml.dump to use multi-line string style.","docstring_summary":"Presenter to force yaml.dump to use multi-line string style.","docstring_tokens":["Presenter","to","force","yaml",".","dump","to","use","multi","-","line","string","style","."],"function":"def string_presenter(self, dumper, data):\n \"\"\"Presenter to force yaml.dump to use multi-line string style.\"\"\"\n if '\\n' in data:\n return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|')\n else:\n return dumper.represent_scalar('tag:yaml.org,2002:str', data)","function_tokens":["def","string_presenter","(","self",",","dumper",",","data",")",":","if","'\\n'","in","data",":","return","dumper",".","represent_scalar","(","'tag:yaml.org,2002:str'",",","data",",","style","=","'|'",")","else",":","return","dumper",".","represent_scalar","(","'tag:yaml.org,2002:str'",",","data",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/output_formatter.py#L211-L216"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"_gcs_uri_rewriter","parameters":"(raw_uri)","argument_list":"","return_statement":"return raw_uri, docker_path","docstring":"Rewrite GCS file paths as required by the rewrite_uris method.\n\n The GCS rewriter performs no operations on the raw_path and simply returns\n it as the normalized URI. The docker path has the gs:\/\/ prefix replaced\n with gs\/ so that it can be mounted inside a docker image.\n\n Args:\n raw_uri: (str) the raw GCS URI, prefix, or pattern.\n\n Returns:\n normalized: a cleaned version of the uri provided by command line.\n docker_path: the uri rewritten in the format required for mounting inside\n a docker worker.","docstring_summary":"Rewrite GCS file paths as required by the rewrite_uris method.","docstring_tokens":["Rewrite","GCS","file","paths","as","required","by","the","rewrite_uris","method","."],"function":"def _gcs_uri_rewriter(raw_uri):\n \"\"\"Rewrite GCS file paths as required by the rewrite_uris method.\n\n The GCS rewriter performs no operations on the raw_path and simply returns\n it as the normalized URI. The docker path has the gs:\/\/ prefix replaced\n with gs\/ so that it can be mounted inside a docker image.\n\n Args:\n raw_uri: (str) the raw GCS URI, prefix, or pattern.\n\n Returns:\n normalized: a cleaned version of the uri provided by command line.\n docker_path: the uri rewritten in the format required for mounting inside\n a docker worker.\n \"\"\"\n docker_path = raw_uri.replace('gs:\/\/', 'gs\/', 1)\n return raw_uri, docker_path","function_tokens":["def","_gcs_uri_rewriter","(","raw_uri",")",":","docker_path","=","raw_uri",".","replace","(","'gs:\/\/'",",","'gs\/'",",","1",")","return","raw_uri",",","docker_path"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L286-L302"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"_local_uri_rewriter","parameters":"(raw_uri)","argument_list":"","return_statement":"return normed_uri, docker_path","docstring":"Rewrite local file URIs as required by the rewrite_uris method.\n\n Local file paths, unlike GCS paths, may have their raw URI simplified by\n os.path.normpath which collapses extraneous indirect characters.\n\n >>> _local_uri_rewriter('\/tmp\/a_path\/..\/B_PATH\/file.txt')\n ('\/tmp\/B_PATH\/file.txt', 'file\/tmp\/B_PATH\/file.txt')\n >>> _local_uri_rewriter('\/myhome\/.\/mydir\/')\n ('\/myhome\/mydir\/', 'file\/myhome\/mydir\/')\n\n The local path rewriter will also work to preserve relative paths even\n when creating the docker path. This prevents leaking of information on the\n invoker's system to the remote system. Doing this requires a number of path\n substitutions denoted with the __ convention.\n\n >>> _local_uri_rewriter('.\/..\/upper_dir\/')[1]\n 'file\/_dotdot_\/upper_dir\/'\n >>> _local_uri_rewriter('~\/localdata\/*.bam')[1]\n 'file\/_home_\/localdata\/*.bam'\n\n Args:\n raw_uri: (str) the raw file or directory path.\n\n Returns:\n normalized: a simplified and\/or expanded version of the uri.\n docker_path: the uri rewritten in the format required for mounting inside\n a docker worker.","docstring_summary":"Rewrite local file URIs as required by the rewrite_uris method.","docstring_tokens":["Rewrite","local","file","URIs","as","required","by","the","rewrite_uris","method","."],"function":"def _local_uri_rewriter(raw_uri):\n \"\"\"Rewrite local file URIs as required by the rewrite_uris method.\n\n Local file paths, unlike GCS paths, may have their raw URI simplified by\n os.path.normpath which collapses extraneous indirect characters.\n\n >>> _local_uri_rewriter('\/tmp\/a_path\/..\/B_PATH\/file.txt')\n ('\/tmp\/B_PATH\/file.txt', 'file\/tmp\/B_PATH\/file.txt')\n >>> _local_uri_rewriter('\/myhome\/.\/mydir\/')\n ('\/myhome\/mydir\/', 'file\/myhome\/mydir\/')\n\n The local path rewriter will also work to preserve relative paths even\n when creating the docker path. This prevents leaking of information on the\n invoker's system to the remote system. Doing this requires a number of path\n substitutions denoted with the __ convention.\n\n >>> _local_uri_rewriter('.\/..\/upper_dir\/')[1]\n 'file\/_dotdot_\/upper_dir\/'\n >>> _local_uri_rewriter('~\/localdata\/*.bam')[1]\n 'file\/_home_\/localdata\/*.bam'\n\n Args:\n raw_uri: (str) the raw file or directory path.\n\n Returns:\n normalized: a simplified and\/or expanded version of the uri.\n docker_path: the uri rewritten in the format required for mounting inside\n a docker worker.\n\n \"\"\"\n # The path is split into components so that the filename is not rewritten.\n raw_path, filename = os.path.split(raw_uri)\n # Generate the local path that can be resolved by filesystem operations,\n # this removes special shell characters, condenses indirects and replaces\n # any unnecessary prefix.\n prefix_replacements = [('file:\/\/\/', '\/'), ('~\/', os.getenv('HOME')), ('.\/',\n ''),\n ('file:\/', '\/')]\n normed_path = raw_path\n for prefix, replacement in prefix_replacements:\n if normed_path.startswith(prefix):\n normed_path = os.path.join(replacement, normed_path[len(prefix):])\n # Because abspath strips the trailing '\/' from bare directory references\n # other than root, this ensures that all directory references end with '\/'.\n normed_uri = directory_fmt(os.path.abspath(normed_path))\n normed_uri = os.path.join(normed_uri, filename)\n\n # Generate the path used inside the docker image;\n # 1) Get rid of extra indirects: \/this\/.\/that -> \/this\/that\n # 2) Rewrite required indirects as synthetic characters.\n # 3) Strip relative or absolute path leading character.\n # 4) Add 'file\/' prefix.\n docker_rewrites = [(r'\/\\.\\.', '\/_dotdot_'), (r'^\\.\\.', '_dotdot_'),\n (r'^~\/', '_home_\/'), (r'^file:\/', '')]\n docker_path = os.path.normpath(raw_path)\n for pattern, replacement in docker_rewrites:\n docker_path = re.sub(pattern, replacement, docker_path)\n docker_path = docker_path.lstrip('.\/') # Strips any of '.' '.\/' '\/'.\n docker_path = directory_fmt('file\/' + docker_path) + filename\n return normed_uri, docker_path","function_tokens":["def","_local_uri_rewriter","(","raw_uri",")",":","# The path is split into components so that the filename is not rewritten.","raw_path",",","filename","=","os",".","path",".","split","(","raw_uri",")","# Generate the local path that can be resolved by filesystem operations,","# this removes special shell characters, condenses indirects and replaces","# any unnecessary prefix.","prefix_replacements","=","[","(","'file:\/\/\/'",",","'\/'",")",",","(","'~\/'",",","os",".","getenv","(","'HOME'",")",")",",","(","'.\/'",",","''",")",",","(","'file:\/'",",","'\/'",")","]","normed_path","=","raw_path","for","prefix",",","replacement","in","prefix_replacements",":","if","normed_path",".","startswith","(","prefix",")",":","normed_path","=","os",".","path",".","join","(","replacement",",","normed_path","[","len","(","prefix",")",":","]",")","# Because abspath strips the trailing '\/' from bare directory references","# other than root, this ensures that all directory references end with '\/'.","normed_uri","=","directory_fmt","(","os",".","path",".","abspath","(","normed_path",")",")","normed_uri","=","os",".","path",".","join","(","normed_uri",",","filename",")","# Generate the path used inside the docker image;","# 1) Get rid of extra indirects: \/this\/.\/that -> \/this\/that","# 2) Rewrite required indirects as synthetic characters.","# 3) Strip relative or absolute path leading character.","# 4) Add 'file\/' prefix.","docker_rewrites","=","[","(","r'\/\\.\\.'",",","'\/_dotdot_'",")",",","(","r'^\\.\\.'",",","'_dotdot_'",")",",","(","r'^~\/'",",","'_home_\/'",")",",","(","r'^file:\/'",",","''",")","]","docker_path","=","os",".","path",".","normpath","(","raw_path",")","for","pattern",",","replacement","in","docker_rewrites",":","docker_path","=","re",".","sub","(","pattern",",","replacement",",","docker_path",")","docker_path","=","docker_path",".","lstrip","(","'.\/'",")","# Strips any of '.' '.\/' '\/'.","docker_path","=","directory_fmt","(","'file\/'","+","docker_path",")","+","filename","return","normed_uri",",","docker_path"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L305-L364"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"get_gcs_mounts","parameters":"(mounts)","argument_list":"","return_statement":"return _get_filtered_mounts(mounts, job_model.GCSMountParam)","docstring":"Returns the GCS mounts from mounts.","docstring_summary":"Returns the GCS mounts from mounts.","docstring_tokens":["Returns","the","GCS","mounts","from","mounts","."],"function":"def get_gcs_mounts(mounts):\n \"\"\"Returns the GCS mounts from mounts.\"\"\"\n return _get_filtered_mounts(mounts, job_model.GCSMountParam)","function_tokens":["def","get_gcs_mounts","(","mounts",")",":","return","_get_filtered_mounts","(","mounts",",","job_model",".","GCSMountParam",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L367-L369"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"get_persistent_disk_mounts","parameters":"(mounts)","argument_list":"","return_statement":"return _get_filtered_mounts(mounts, job_model.PersistentDiskMountParam)","docstring":"Returns the persistent disk mounts from mounts.","docstring_summary":"Returns the persistent disk mounts from mounts.","docstring_tokens":["Returns","the","persistent","disk","mounts","from","mounts","."],"function":"def get_persistent_disk_mounts(mounts):\n \"\"\"Returns the persistent disk mounts from mounts.\"\"\"\n return _get_filtered_mounts(mounts, job_model.PersistentDiskMountParam)","function_tokens":["def","get_persistent_disk_mounts","(","mounts",")",":","return","_get_filtered_mounts","(","mounts",",","job_model",".","PersistentDiskMountParam",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L372-L374"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"get_local_mounts","parameters":"(mounts)","argument_list":"","return_statement":"return _get_filtered_mounts(mounts, job_model.LocalMountParam)","docstring":"Returns the local mounts from mounts.","docstring_summary":"Returns the local mounts from mounts.","docstring_tokens":["Returns","the","local","mounts","from","mounts","."],"function":"def get_local_mounts(mounts):\n \"\"\"Returns the local mounts from mounts.\"\"\"\n return _get_filtered_mounts(mounts, job_model.LocalMountParam)","function_tokens":["def","get_local_mounts","(","mounts",")",":","return","_get_filtered_mounts","(","mounts",",","job_model",".","LocalMountParam",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L377-L379"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"_get_filtered_mounts","parameters":"(mounts, mount_param_type)","argument_list":"","return_statement":"return set([mount for mount in mounts if isinstance(mount, mount_param_type)])","docstring":"Helper function to return an appropriate set of mount parameters.","docstring_summary":"Helper function to return an appropriate set of mount parameters.","docstring_tokens":["Helper","function","to","return","an","appropriate","set","of","mount","parameters","."],"function":"def _get_filtered_mounts(mounts, mount_param_type):\n \"\"\"Helper function to return an appropriate set of mount parameters.\"\"\"\n return set([mount for mount in mounts if isinstance(mount, mount_param_type)])","function_tokens":["def","_get_filtered_mounts","(","mounts",",","mount_param_type",")",":","return","set","(","[","mount","for","mount","in","mounts","if","isinstance","(","mount",",","mount_param_type",")","]",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L382-L384"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"build_logging_param","parameters":"(logging_uri, util_class=OutputFileParamUtil)","argument_list":"","return_statement":"return job_model.LoggingParam(uri, provider)","docstring":"Convenience function simplifies construction of the logging uri.","docstring_summary":"Convenience function simplifies construction of the logging uri.","docstring_tokens":["Convenience","function","simplifies","construction","of","the","logging","uri","."],"function":"def build_logging_param(logging_uri, util_class=OutputFileParamUtil):\n \"\"\"Convenience function simplifies construction of the logging uri.\"\"\"\n if not logging_uri:\n return job_model.LoggingParam(None, None)\n recursive = not logging_uri.endswith('.log')\n oututil = util_class('')\n _, uri, provider = oututil.parse_uri(logging_uri, recursive)\n if '*' in uri.basename:\n raise ValueError('Wildcards not allowed in logging URI: %s' % uri)\n return job_model.LoggingParam(uri, provider)","function_tokens":["def","build_logging_param","(","logging_uri",",","util_class","=","OutputFileParamUtil",")",":","if","not","logging_uri",":","return","job_model",".","LoggingParam","(","None",",","None",")","recursive","=","not","logging_uri",".","endswith","(","'.log'",")","oututil","=","util_class","(","''",")","_",",","uri",",","provider","=","oututil",".","parse_uri","(","logging_uri",",","recursive",")","if","'*'","in","uri",".","basename",":","raise","ValueError","(","'Wildcards not allowed in logging URI: %s'","%","uri",")","return","job_model",".","LoggingParam","(","uri",",","provider",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L387-L396"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"split_pair","parameters":"(pair_string, separator, nullable_idx=1)","argument_list":"","return_statement":"","docstring":"Split a string into a pair, which can have one empty value.\n\n Args:\n pair_string: The string to be split.\n separator: The separator to be used for splitting.\n nullable_idx: The location to be set to null if the separator is not in the\n input string. Should be either 0 or 1.\n\n Returns:\n A list containing the pair.\n\n Raises:\n IndexError: If nullable_idx is not 0 or 1.","docstring_summary":"Split a string into a pair, which can have one empty value.","docstring_tokens":["Split","a","string","into","a","pair","which","can","have","one","empty","value","."],"function":"def split_pair(pair_string, separator, nullable_idx=1):\n \"\"\"Split a string into a pair, which can have one empty value.\n\n Args:\n pair_string: The string to be split.\n separator: The separator to be used for splitting.\n nullable_idx: The location to be set to null if the separator is not in the\n input string. Should be either 0 or 1.\n\n Returns:\n A list containing the pair.\n\n Raises:\n IndexError: If nullable_idx is not 0 or 1.\n \"\"\"\n\n pair = pair_string.split(separator, 1)\n if len(pair) == 1:\n if nullable_idx == 0:\n return [None, pair[0]]\n elif nullable_idx == 1:\n return [pair[0], None]\n else:\n raise IndexError('nullable_idx should be either 0 or 1.')\n else:\n return pair","function_tokens":["def","split_pair","(","pair_string",",","separator",",","nullable_idx","=","1",")",":","pair","=","pair_string",".","split","(","separator",",","1",")","if","len","(","pair",")","==","1",":","if","nullable_idx","==","0",":","return","[","None",",","pair","[","0","]","]","elif","nullable_idx","==","1",":","return","[","pair","[","0","]",",","None","]","else",":","raise","IndexError","(","'nullable_idx should be either 0 or 1.'",")","else",":","return","pair"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L399-L424"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"parse_tasks_file_header","parameters":"(header, input_file_param_util,\n output_file_param_util)","argument_list":"","return_statement":"return job_params","docstring":"Parse the header from the tasks file into env, input, output definitions.\n\n Elements are formatted similar to their equivalent command-line arguments,\n but with associated values coming from the data rows.\n\n Environment variables columns are headered as \"--env \"\n Inputs columns are headered as \"--input \" with the name optional.\n Outputs columns are headered as \"--output \" with the name optional.\n\n For historical reasons, bareword column headers (such as \"JOB_ID\") are\n equivalent to \"--env var_name\".\n\n Args:\n header: Array of header fields\n input_file_param_util: Utility for producing InputFileParam objects.\n output_file_param_util: Utility for producing OutputFileParam objects.\n\n Returns:\n job_params: A list of EnvParams and FileParams for the environment\n variables, LabelParams, input file parameters, and output file parameters.\n\n Raises:\n ValueError: If a header contains a \":\" and the prefix is not supported.","docstring_summary":"Parse the header from the tasks file into env, input, output definitions.","docstring_tokens":["Parse","the","header","from","the","tasks","file","into","env","input","output","definitions","."],"function":"def parse_tasks_file_header(header, input_file_param_util,\n output_file_param_util):\n \"\"\"Parse the header from the tasks file into env, input, output definitions.\n\n Elements are formatted similar to their equivalent command-line arguments,\n but with associated values coming from the data rows.\n\n Environment variables columns are headered as \"--env \"\n Inputs columns are headered as \"--input \" with the name optional.\n Outputs columns are headered as \"--output \" with the name optional.\n\n For historical reasons, bareword column headers (such as \"JOB_ID\") are\n equivalent to \"--env var_name\".\n\n Args:\n header: Array of header fields\n input_file_param_util: Utility for producing InputFileParam objects.\n output_file_param_util: Utility for producing OutputFileParam objects.\n\n Returns:\n job_params: A list of EnvParams and FileParams for the environment\n variables, LabelParams, input file parameters, and output file parameters.\n\n Raises:\n ValueError: If a header contains a \":\" and the prefix is not supported.\n \"\"\"\n job_params = []\n\n for col in header:\n\n # Reserve the \"-\" and \"--\" namespace.\n # If the column has no leading \"-\", treat it as an environment variable\n col_type = '--env'\n col_value = col\n if col.startswith('-'):\n col_type, col_value = split_pair(col, ' ', 1)\n\n if col_type == '--env':\n job_params.append(job_model.EnvParam(col_value))\n\n elif col_type == '--label':\n job_params.append(job_model.LabelParam(col_value))\n\n elif col_type == '--input' or col_type == '--input-recursive':\n name = input_file_param_util.get_variable_name(col_value)\n job_params.append(\n job_model.InputFileParam(\n name, recursive=(col_type.endswith('recursive'))))\n\n elif col_type == '--output' or col_type == '--output-recursive':\n name = output_file_param_util.get_variable_name(col_value)\n job_params.append(\n job_model.OutputFileParam(\n name, recursive=(col_type.endswith('recursive'))))\n\n else:\n raise ValueError('Unrecognized column header: %s' % col)\n\n return job_params","function_tokens":["def","parse_tasks_file_header","(","header",",","input_file_param_util",",","output_file_param_util",")",":","job_params","=","[","]","for","col","in","header",":","# Reserve the \"-\" and \"--\" namespace.","# If the column has no leading \"-\", treat it as an environment variable","col_type","=","'--env'","col_value","=","col","if","col",".","startswith","(","'-'",")",":","col_type",",","col_value","=","split_pair","(","col",",","' '",",","1",")","if","col_type","==","'--env'",":","job_params",".","append","(","job_model",".","EnvParam","(","col_value",")",")","elif","col_type","==","'--label'",":","job_params",".","append","(","job_model",".","LabelParam","(","col_value",")",")","elif","col_type","==","'--input'","or","col_type","==","'--input-recursive'",":","name","=","input_file_param_util",".","get_variable_name","(","col_value",")","job_params",".","append","(","job_model",".","InputFileParam","(","name",",","recursive","=","(","col_type",".","endswith","(","'recursive'",")",")",")",")","elif","col_type","==","'--output'","or","col_type","==","'--output-recursive'",":","name","=","output_file_param_util",".","get_variable_name","(","col_value",")","job_params",".","append","(","job_model",".","OutputFileParam","(","name",",","recursive","=","(","col_type",".","endswith","(","'recursive'",")",")",")",")","else",":","raise","ValueError","(","'Unrecognized column header: %s'","%","col",")","return","job_params"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L427-L485"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"tasks_file_to_task_descriptors","parameters":"(tasks, retries, input_file_param_util,\n output_file_param_util)","argument_list":"","return_statement":"return task_descriptors","docstring":"Parses task parameters from a TSV.\n\n Args:\n tasks: Dict containing the path to a TSV file and task numbers to run\n variables, input, and output parameters as column headings. Subsequent\n lines specify parameter values, one row per job.\n retries: Number of retries allowed.\n input_file_param_util: Utility for producing InputFileParam objects.\n output_file_param_util: Utility for producing OutputFileParam objects.\n\n Returns:\n task_descriptors: an array of records, each containing the task-id,\n task-attempt, 'envs', 'inputs', 'outputs', 'labels' that defines the set of\n parameters for each task of the job.\n\n Raises:\n ValueError: If no job records were provided","docstring_summary":"Parses task parameters from a TSV.","docstring_tokens":["Parses","task","parameters","from","a","TSV","."],"function":"def tasks_file_to_task_descriptors(tasks, retries, input_file_param_util,\n output_file_param_util):\n \"\"\"Parses task parameters from a TSV.\n\n Args:\n tasks: Dict containing the path to a TSV file and task numbers to run\n variables, input, and output parameters as column headings. Subsequent\n lines specify parameter values, one row per job.\n retries: Number of retries allowed.\n input_file_param_util: Utility for producing InputFileParam objects.\n output_file_param_util: Utility for producing OutputFileParam objects.\n\n Returns:\n task_descriptors: an array of records, each containing the task-id,\n task-attempt, 'envs', 'inputs', 'outputs', 'labels' that defines the set of\n parameters for each task of the job.\n\n Raises:\n ValueError: If no job records were provided\n \"\"\"\n task_descriptors = []\n\n path = tasks['path']\n task_min = tasks.get('min')\n task_max = tasks.get('max')\n\n # First check for any empty lines\n param_file = dsub_util.load_file(path)\n param_file_lines = param_file.splitlines()\n if any([not line for line in param_file_lines]):\n raise ValueError('Blank line(s) found in {}'.format(path))\n\n # Set up a Reader that tokenizes the fields\n reader = csv.reader(param_file_lines, delimiter='\\t')\n\n # Read the first line and extract the parameters\n header = next(reader)\n job_params = parse_tasks_file_header(header, input_file_param_util,\n output_file_param_util)\n\n # Build a list of records from the parsed input file\n for row in reader:\n # Tasks are numbered starting at 1 and since the first line of the TSV\n # file is a header, the first task appears on line 2.\n task_id = reader.line_num - 1\n if task_min and task_id < task_min:\n continue\n if task_max and task_id > task_max:\n continue\n\n if len(row) != len(job_params):\n raise ValueError(\n 'Unexpected number of fields {} vs {}: in {} line {}'.format(\n len(row), len(job_params), path, reader.line_num))\n\n # Each row can contain \"envs\", \"inputs\", \"outputs\"\n envs = set()\n inputs = set()\n outputs = set()\n labels = set()\n\n for i in range(0, len(job_params)):\n param = job_params[i]\n name = param.name\n if isinstance(param, job_model.EnvParam):\n envs.add(job_model.EnvParam(name, row[i]))\n\n elif isinstance(param, job_model.LabelParam):\n labels.add(job_model.LabelParam(name, row[i]))\n\n elif isinstance(param, job_model.InputFileParam):\n inputs.add(\n input_file_param_util.make_param(name, row[i], param.recursive))\n\n elif isinstance(param, job_model.OutputFileParam):\n outputs.add(\n output_file_param_util.make_param(name, row[i], param.recursive))\n\n task_descriptors.append(\n job_model.TaskDescriptor({\n 'task-id': task_id,\n 'task-attempt': 1 if retries else None\n }, {\n 'labels': labels,\n 'envs': envs,\n 'inputs': inputs,\n 'outputs': outputs\n }, job_model.Resources()))\n\n # Ensure that there are jobs to execute (and not just a header)\n if not task_descriptors:\n raise ValueError('No tasks added from %s' % path)\n\n return task_descriptors","function_tokens":["def","tasks_file_to_task_descriptors","(","tasks",",","retries",",","input_file_param_util",",","output_file_param_util",")",":","task_descriptors","=","[","]","path","=","tasks","[","'path'","]","task_min","=","tasks",".","get","(","'min'",")","task_max","=","tasks",".","get","(","'max'",")","# First check for any empty lines","param_file","=","dsub_util",".","load_file","(","path",")","param_file_lines","=","param_file",".","splitlines","(",")","if","any","(","[","not","line","for","line","in","param_file_lines","]",")",":","raise","ValueError","(","'Blank line(s) found in {}'",".","format","(","path",")",")","# Set up a Reader that tokenizes the fields","reader","=","csv",".","reader","(","param_file_lines",",","delimiter","=","'\\t'",")","# Read the first line and extract the parameters","header","=","next","(","reader",")","job_params","=","parse_tasks_file_header","(","header",",","input_file_param_util",",","output_file_param_util",")","# Build a list of records from the parsed input file","for","row","in","reader",":","# Tasks are numbered starting at 1 and since the first line of the TSV","# file is a header, the first task appears on line 2.","task_id","=","reader",".","line_num","-","1","if","task_min","and","task_id","<","task_min",":","continue","if","task_max","and","task_id",">","task_max",":","continue","if","len","(","row",")","!=","len","(","job_params",")",":","raise","ValueError","(","'Unexpected number of fields {} vs {}: in {} line {}'",".","format","(","len","(","row",")",",","len","(","job_params",")",",","path",",","reader",".","line_num",")",")","# Each row can contain \"envs\", \"inputs\", \"outputs\"","envs","=","set","(",")","inputs","=","set","(",")","outputs","=","set","(",")","labels","=","set","(",")","for","i","in","range","(","0",",","len","(","job_params",")",")",":","param","=","job_params","[","i","]","name","=","param",".","name","if","isinstance","(","param",",","job_model",".","EnvParam",")",":","envs",".","add","(","job_model",".","EnvParam","(","name",",","row","[","i","]",")",")","elif","isinstance","(","param",",","job_model",".","LabelParam",")",":","labels",".","add","(","job_model",".","LabelParam","(","name",",","row","[","i","]",")",")","elif","isinstance","(","param",",","job_model",".","InputFileParam",")",":","inputs",".","add","(","input_file_param_util",".","make_param","(","name",",","row","[","i","]",",","param",".","recursive",")",")","elif","isinstance","(","param",",","job_model",".","OutputFileParam",")",":","outputs",".","add","(","output_file_param_util",".","make_param","(","name",",","row","[","i","]",",","param",".","recursive",")",")","task_descriptors",".","append","(","job_model",".","TaskDescriptor","(","{","'task-id'",":","task_id",",","'task-attempt'",":","1","if","retries","else","None","}",",","{","'labels'",":","labels",",","'envs'",":","envs",",","'inputs'",":","inputs",",","'outputs'",":","outputs","}",",","job_model",".","Resources","(",")",")",")","# Ensure that there are jobs to execute (and not just a header)","if","not","task_descriptors",":","raise","ValueError","(","'No tasks added from %s'","%","path",")","return","task_descriptors"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L488-L581"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"parse_pair_args","parameters":"(labels, argclass)","argument_list":"","return_statement":"return label_data","docstring":"Parse flags of key=value pairs and return a list of argclass.\n\n For pair variables, we need to:\n * split the input into name=value pairs (value optional)\n * Create the EnvParam object\n\n Args:\n labels: list of 'key' or 'key=value' strings.\n argclass: Container class for args, must instantiate with argclass(k, v).\n\n Returns:\n list of argclass objects.","docstring_summary":"Parse flags of key=value pairs and return a list of argclass.","docstring_tokens":["Parse","flags","of","key","=","value","pairs","and","return","a","list","of","argclass","."],"function":"def parse_pair_args(labels, argclass):\n \"\"\"Parse flags of key=value pairs and return a list of argclass.\n\n For pair variables, we need to:\n * split the input into name=value pairs (value optional)\n * Create the EnvParam object\n\n Args:\n labels: list of 'key' or 'key=value' strings.\n argclass: Container class for args, must instantiate with argclass(k, v).\n\n Returns:\n list of argclass objects.\n \"\"\"\n label_data = set()\n for arg in labels:\n name, value = split_pair(arg, '=', nullable_idx=1)\n label_data.add(argclass(name, value))\n return label_data","function_tokens":["def","parse_pair_args","(","labels",",","argclass",")",":","label_data","=","set","(",")","for","arg","in","labels",":","name",",","value","=","split_pair","(","arg",",","'='",",","nullable_idx","=","1",")","label_data",".","add","(","argclass","(","name",",","value",")",")","return","label_data"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L584-L602"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"args_to_job_params","parameters":"(envs, labels, inputs, inputs_recursive, outputs,\n outputs_recursive, mounts, input_file_param_util,\n output_file_param_util, mount_param_util)","argument_list":"","return_statement":"return {\n 'envs': env_data,\n 'inputs': input_data,\n 'outputs': output_data,\n 'labels': label_data,\n 'mounts': mount_data,\n }","docstring":"Parse env, input, and output parameters into a job parameters and data.\n\n Passing arguments on the command-line allows for launching a single job.\n The env, input, and output arguments encode both the definition of the\n job as well as the single job's values.\n\n Env arguments are simple name=value pairs.\n Input and output file arguments can contain name=value pairs or just values.\n Either of the following is valid:\n\n uri\n myfile=uri\n\n Args:\n envs: list of environment variable job parameters\n labels: list of labels to attach to the tasks\n inputs: list of file input parameters\n inputs_recursive: list of recursive directory input parameters\n outputs: list of file output parameters\n outputs_recursive: list of recursive directory output parameters\n mounts: list of gcs buckets to mount\n input_file_param_util: Utility for producing InputFileParam objects.\n output_file_param_util: Utility for producing OutputFileParam objects.\n mount_param_util: Utility for producing MountParam objects.\n\n Returns:\n job_params: a dictionary of 'envs', 'inputs', and 'outputs' that defines the\n set of parameters and data for a job.","docstring_summary":"Parse env, input, and output parameters into a job parameters and data.","docstring_tokens":["Parse","env","input","and","output","parameters","into","a","job","parameters","and","data","."],"function":"def args_to_job_params(envs, labels, inputs, inputs_recursive, outputs,\n outputs_recursive, mounts, input_file_param_util,\n output_file_param_util, mount_param_util):\n \"\"\"Parse env, input, and output parameters into a job parameters and data.\n\n Passing arguments on the command-line allows for launching a single job.\n The env, input, and output arguments encode both the definition of the\n job as well as the single job's values.\n\n Env arguments are simple name=value pairs.\n Input and output file arguments can contain name=value pairs or just values.\n Either of the following is valid:\n\n uri\n myfile=uri\n\n Args:\n envs: list of environment variable job parameters\n labels: list of labels to attach to the tasks\n inputs: list of file input parameters\n inputs_recursive: list of recursive directory input parameters\n outputs: list of file output parameters\n outputs_recursive: list of recursive directory output parameters\n mounts: list of gcs buckets to mount\n input_file_param_util: Utility for producing InputFileParam objects.\n output_file_param_util: Utility for producing OutputFileParam objects.\n mount_param_util: Utility for producing MountParam objects.\n\n Returns:\n job_params: a dictionary of 'envs', 'inputs', and 'outputs' that defines the\n set of parameters and data for a job.\n \"\"\"\n # Parse environmental variables and labels.\n env_data = parse_pair_args(envs, job_model.EnvParam)\n label_data = parse_pair_args(labels, job_model.LabelParam)\n\n # For input files, we need to:\n # * split the input into name=uri pairs (name optional)\n # * get the environmental variable name, or automatically set if null.\n # * create the input file param\n input_data = set()\n for (recursive, args) in ((False, inputs), (True, inputs_recursive)):\n for arg in args:\n name, value = split_pair(arg, '=', nullable_idx=0)\n name = input_file_param_util.get_variable_name(name)\n input_data.add(input_file_param_util.make_param(name, value, recursive))\n\n # For output files, we need to:\n # * split the input into name=uri pairs (name optional)\n # * get the environmental variable name, or automatically set if null.\n # * create the output file param\n output_data = set()\n for (recursive, args) in ((False, outputs), (True, outputs_recursive)):\n for arg in args:\n name, value = split_pair(arg, '=', 0)\n name = output_file_param_util.get_variable_name(name)\n output_data.add(output_file_param_util.make_param(name, value, recursive))\n\n mount_data = set()\n for arg in mounts:\n # Mounts can look like `--mount VAR=PATH` or `--mount VAR=PATH {num}`,\n # where num is the size of the disk in Gb. We assume a space is the\n # separator between path and disk size.\n if ' ' in arg:\n key_value_pair, disk_size = arg.split(' ')\n name, value = split_pair(key_value_pair, '=', 1)\n mount_data.add(mount_param_util.make_param(name, value, disk_size))\n else:\n name, value = split_pair(arg, '=', 1)\n mount_data.add(mount_param_util.make_param(name, value, disk_size=None))\n return {\n 'envs': env_data,\n 'inputs': input_data,\n 'outputs': output_data,\n 'labels': label_data,\n 'mounts': mount_data,\n }","function_tokens":["def","args_to_job_params","(","envs",",","labels",",","inputs",",","inputs_recursive",",","outputs",",","outputs_recursive",",","mounts",",","input_file_param_util",",","output_file_param_util",",","mount_param_util",")",":","# Parse environmental variables and labels.","env_data","=","parse_pair_args","(","envs",",","job_model",".","EnvParam",")","label_data","=","parse_pair_args","(","labels",",","job_model",".","LabelParam",")","# For input files, we need to:","# * split the input into name=uri pairs (name optional)","# * get the environmental variable name, or automatically set if null.","# * create the input file param","input_data","=","set","(",")","for","(","recursive",",","args",")","in","(","(","False",",","inputs",")",",","(","True",",","inputs_recursive",")",")",":","for","arg","in","args",":","name",",","value","=","split_pair","(","arg",",","'='",",","nullable_idx","=","0",")","name","=","input_file_param_util",".","get_variable_name","(","name",")","input_data",".","add","(","input_file_param_util",".","make_param","(","name",",","value",",","recursive",")",")","# For output files, we need to:","# * split the input into name=uri pairs (name optional)","# * get the environmental variable name, or automatically set if null.","# * create the output file param","output_data","=","set","(",")","for","(","recursive",",","args",")","in","(","(","False",",","outputs",")",",","(","True",",","outputs_recursive",")",")",":","for","arg","in","args",":","name",",","value","=","split_pair","(","arg",",","'='",",","0",")","name","=","output_file_param_util",".","get_variable_name","(","name",")","output_data",".","add","(","output_file_param_util",".","make_param","(","name",",","value",",","recursive",")",")","mount_data","=","set","(",")","for","arg","in","mounts",":","# Mounts can look like `--mount VAR=PATH` or `--mount VAR=PATH {num}`,","# where num is the size of the disk in Gb. We assume a space is the","# separator between path and disk size.","if","' '","in","arg",":","key_value_pair",",","disk_size","=","arg",".","split","(","' '",")","name",",","value","=","split_pair","(","key_value_pair",",","'='",",","1",")","mount_data",".","add","(","mount_param_util",".","make_param","(","name",",","value",",","disk_size",")",")","else",":","name",",","value","=","split_pair","(","arg",",","'='",",","1",")","mount_data",".","add","(","mount_param_util",".","make_param","(","name",",","value",",","disk_size","=","None",")",")","return","{","'envs'",":","env_data",",","'inputs'",":","input_data",",","'outputs'",":","output_data",",","'labels'",":","label_data",",","'mounts'",":","mount_data",",","}"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L605-L681"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"validate_submit_args_or_fail","parameters":"(job_descriptor, provider_name, input_providers,\n output_providers, logging_providers)","argument_list":"","return_statement":"","docstring":"Validate that arguments passed to submit_job have valid file providers.\n\n This utility function takes resources and task data args from `submit_job`\n in the base provider. This function will fail with a value error if any of the\n parameters are not valid. See the following example;\n\n >>> job_resources = type('', (object,),\n ... {\"logging\": job_model.LoggingParam('gs:\/\/logtemp', job_model.P_GCS)})()\n >>> job_params={'inputs': set(), 'outputs': set(), 'mounts': set()}\n >>> task_descriptors = [\n ... job_model.TaskDescriptor(None, {\n ... 'inputs': {\n ... job_model.FileParam('IN', uri='gs:\/\/in\/*',\n ... file_provider=job_model.P_GCS)},\n ... 'outputs': set()}, None),\n ... job_model.TaskDescriptor(None, {\n ... 'inputs': set(),\n ... 'outputs': {\n ... job_model.FileParam('OUT', uri='gs:\/\/out\/*',\n ... file_provider=job_model.P_GCS)}}, None)]\n ...\n >>> validate_submit_args_or_fail(job_model.JobDescriptor(None, job_params,\n ... job_resources, task_descriptors),\n ... provider_name='MYPROVIDER',\n ... input_providers=[job_model.P_GCS],\n ... output_providers=[job_model.P_GCS],\n ... logging_providers=[job_model.P_GCS])\n ...\n >>> validate_submit_args_or_fail(job_model.JobDescriptor(None, job_params,\n ... job_resources, task_descriptors),\n ... provider_name='MYPROVIDER',\n ... input_providers=[job_model.P_GCS],\n ... output_providers=[job_model.P_LOCAL],\n ... logging_providers=[job_model.P_GCS])\n Traceback (most recent call last):\n ...\n ValueError: Unsupported output path (gs:\/\/out\/*) for provider 'MYPROVIDER'.\n\n Args:\n job_descriptor: instance of job_model.JobDescriptor.\n provider_name: (str) the name of the execution provider.\n input_providers: (string collection) whitelist of file providers for input.\n output_providers: (string collection) whitelist of providers for output.\n logging_providers: (string collection) whitelist of providers for logging.\n\n Raises:\n ValueError: if any file providers do not match the whitelists.","docstring_summary":"Validate that arguments passed to submit_job have valid file providers.","docstring_tokens":["Validate","that","arguments","passed","to","submit_job","have","valid","file","providers","."],"function":"def validate_submit_args_or_fail(job_descriptor, provider_name, input_providers,\n output_providers, logging_providers):\n \"\"\"Validate that arguments passed to submit_job have valid file providers.\n\n This utility function takes resources and task data args from `submit_job`\n in the base provider. This function will fail with a value error if any of the\n parameters are not valid. See the following example;\n\n >>> job_resources = type('', (object,),\n ... {\"logging\": job_model.LoggingParam('gs:\/\/logtemp', job_model.P_GCS)})()\n >>> job_params={'inputs': set(), 'outputs': set(), 'mounts': set()}\n >>> task_descriptors = [\n ... job_model.TaskDescriptor(None, {\n ... 'inputs': {\n ... job_model.FileParam('IN', uri='gs:\/\/in\/*',\n ... file_provider=job_model.P_GCS)},\n ... 'outputs': set()}, None),\n ... job_model.TaskDescriptor(None, {\n ... 'inputs': set(),\n ... 'outputs': {\n ... job_model.FileParam('OUT', uri='gs:\/\/out\/*',\n ... file_provider=job_model.P_GCS)}}, None)]\n ...\n >>> validate_submit_args_or_fail(job_model.JobDescriptor(None, job_params,\n ... job_resources, task_descriptors),\n ... provider_name='MYPROVIDER',\n ... input_providers=[job_model.P_GCS],\n ... output_providers=[job_model.P_GCS],\n ... logging_providers=[job_model.P_GCS])\n ...\n >>> validate_submit_args_or_fail(job_model.JobDescriptor(None, job_params,\n ... job_resources, task_descriptors),\n ... provider_name='MYPROVIDER',\n ... input_providers=[job_model.P_GCS],\n ... output_providers=[job_model.P_LOCAL],\n ... logging_providers=[job_model.P_GCS])\n Traceback (most recent call last):\n ...\n ValueError: Unsupported output path (gs:\/\/out\/*) for provider 'MYPROVIDER'.\n\n Args:\n job_descriptor: instance of job_model.JobDescriptor.\n provider_name: (str) the name of the execution provider.\n input_providers: (string collection) whitelist of file providers for input.\n output_providers: (string collection) whitelist of providers for output.\n logging_providers: (string collection) whitelist of providers for logging.\n\n Raises:\n ValueError: if any file providers do not match the whitelists.\n \"\"\"\n job_resources = job_descriptor.job_resources\n job_params = job_descriptor.job_params\n task_descriptors = job_descriptor.task_descriptors\n\n # Validate logging file provider.\n _validate_providers([job_resources.logging], 'logging', logging_providers,\n provider_name)\n\n # Validate job input and output file providers\n _validate_providers(job_params['inputs'], 'input', input_providers,\n provider_name)\n _validate_providers(job_params['outputs'], 'output', output_providers,\n provider_name)\n\n # Validate input and output file providers.\n for task_descriptor in task_descriptors:\n _validate_providers(task_descriptor.task_params['inputs'], 'input',\n input_providers, provider_name)\n _validate_providers(task_descriptor.task_params['outputs'], 'output',\n output_providers, provider_name)","function_tokens":["def","validate_submit_args_or_fail","(","job_descriptor",",","provider_name",",","input_providers",",","output_providers",",","logging_providers",")",":","job_resources","=","job_descriptor",".","job_resources","job_params","=","job_descriptor",".","job_params","task_descriptors","=","job_descriptor",".","task_descriptors","# Validate logging file provider.","_validate_providers","(","[","job_resources",".","logging","]",",","'logging'",",","logging_providers",",","provider_name",")","# Validate job input and output file providers","_validate_providers","(","job_params","[","'inputs'","]",",","'input'",",","input_providers",",","provider_name",")","_validate_providers","(","job_params","[","'outputs'","]",",","'output'",",","output_providers",",","provider_name",")","# Validate input and output file providers.","for","task_descriptor","in","task_descriptors",":","_validate_providers","(","task_descriptor",".","task_params","[","'inputs'","]",",","'input'",",","input_providers",",","provider_name",")","_validate_providers","(","task_descriptor",".","task_params","[","'outputs'","]",",","'output'",",","output_providers",",","provider_name",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L696-L765"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"directory_fmt","parameters":"(directory)","argument_list":"","return_statement":"return directory.rstrip('\/') + '\/'","docstring":"In ensure that directories end with '\/'.\n\n Frequently we need to ensure that directory paths end with a forward slash.\n Pythons dirname and split functions in the path library treat this\n inconsistently creating this requirement. This function is simple but was\n written to centralize documentation of an often used (and often explained)\n requirement in this codebase.\n\n >>> os.path.dirname('gs:\/\/bucket\/folder\/file.txt')\n 'gs:\/\/bucket\/folder'\n >>> directory_fmt(os.path.dirname('gs:\/\/bucket\/folder\/file.txt'))\n 'gs:\/\/bucket\/folder\/'\n >>> os.path.dirname('\/newfile')\n '\/'\n >>> directory_fmt(os.path.dirname('\/newfile'))\n '\/'\n\n Specifically we need this since copy commands must know whether the\n destination is a directory to function properly. See the following shell\n interaction for an example of the inconsistency. Notice that text files are\n copied as expected but the bam is copied over the directory name.\n\n Multiple files copy, works as intended in all cases:\n $ touch a.txt b.txt\n $ gsutil cp .\/*.txt gs:\/\/mybucket\/text_dest\n $ gsutil ls gs:\/\/mybucket\/text_dest\/\n 0 2017-07-19T21:44:36Z gs:\/\/mybucket\/text_dest\/a.txt\n 0 2017-07-19T21:44:36Z gs:\/\/mybucket\/text_dest\/b.txt\n TOTAL: 2 objects, 0 bytes (0 B)\n\n Single file copy fails to copy into a directory:\n $ touch 1.bam\n $ gsutil cp .\/*.bam gs:\/\/mybucket\/bad_dest\n $ gsutil ls gs:\/\/mybucket\/bad_dest\n 0 2017-07-19T21:46:16Z gs:\/\/mybucket\/bad_dest\n TOTAL: 1 objects, 0 bytes (0 B)\n\n Adding a trailing forward slash fixes this:\n $ touch my.sam\n $ gsutil cp .\/*.sam gs:\/\/mybucket\/good_folder\n $ gsutil ls gs:\/\/mybucket\/good_folder\n 0 2017-07-19T21:46:16Z gs:\/\/mybucket\/good_folder\/my.sam\n TOTAL: 1 objects, 0 bytes (0 B)\n\n Args:\n directory (str): a uri without an blob or file basename.\n\n Returns:\n the directory with a trailing slash.","docstring_summary":"In ensure that directories end with '\/'.","docstring_tokens":["In","ensure","that","directories","end","with","\/","."],"function":"def directory_fmt(directory):\n \"\"\"In ensure that directories end with '\/'.\n\n Frequently we need to ensure that directory paths end with a forward slash.\n Pythons dirname and split functions in the path library treat this\n inconsistently creating this requirement. This function is simple but was\n written to centralize documentation of an often used (and often explained)\n requirement in this codebase.\n\n >>> os.path.dirname('gs:\/\/bucket\/folder\/file.txt')\n 'gs:\/\/bucket\/folder'\n >>> directory_fmt(os.path.dirname('gs:\/\/bucket\/folder\/file.txt'))\n 'gs:\/\/bucket\/folder\/'\n >>> os.path.dirname('\/newfile')\n '\/'\n >>> directory_fmt(os.path.dirname('\/newfile'))\n '\/'\n\n Specifically we need this since copy commands must know whether the\n destination is a directory to function properly. See the following shell\n interaction for an example of the inconsistency. Notice that text files are\n copied as expected but the bam is copied over the directory name.\n\n Multiple files copy, works as intended in all cases:\n $ touch a.txt b.txt\n $ gsutil cp .\/*.txt gs:\/\/mybucket\/text_dest\n $ gsutil ls gs:\/\/mybucket\/text_dest\/\n 0 2017-07-19T21:44:36Z gs:\/\/mybucket\/text_dest\/a.txt\n 0 2017-07-19T21:44:36Z gs:\/\/mybucket\/text_dest\/b.txt\n TOTAL: 2 objects, 0 bytes (0 B)\n\n Single file copy fails to copy into a directory:\n $ touch 1.bam\n $ gsutil cp .\/*.bam gs:\/\/mybucket\/bad_dest\n $ gsutil ls gs:\/\/mybucket\/bad_dest\n 0 2017-07-19T21:46:16Z gs:\/\/mybucket\/bad_dest\n TOTAL: 1 objects, 0 bytes (0 B)\n\n Adding a trailing forward slash fixes this:\n $ touch my.sam\n $ gsutil cp .\/*.sam gs:\/\/mybucket\/good_folder\n $ gsutil ls gs:\/\/mybucket\/good_folder\n 0 2017-07-19T21:46:16Z gs:\/\/mybucket\/good_folder\/my.sam\n TOTAL: 1 objects, 0 bytes (0 B)\n\n Args:\n directory (str): a uri without an blob or file basename.\n\n Returns:\n the directory with a trailing slash.\n \"\"\"\n return directory.rstrip('\/') + '\/'","function_tokens":["def","directory_fmt","(","directory",")",":","return","directory",".","rstrip","(","'\/'",")","+","'\/'"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L768-L819"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"handle_version_flag","parameters":"()","argument_list":"","return_statement":"","docstring":"If the --version flag is passed, print version to stdout and exit.\n\n Within dsub commands, --version should be the highest priority flag.\n This function supplies a repeatable and DRY way of checking for the\n version flag and printing the version. Callers still need to define a version\n flag in the command's flags so that it shows up in help output.","docstring_summary":"If the --version flag is passed, print version to stdout and exit.","docstring_tokens":["If","the","--","version","flag","is","passed","print","version","to","stdout","and","exit","."],"function":"def handle_version_flag():\n \"\"\"If the --version flag is passed, print version to stdout and exit.\n\n Within dsub commands, --version should be the highest priority flag.\n This function supplies a repeatable and DRY way of checking for the\n version flag and printing the version. Callers still need to define a version\n flag in the command's flags so that it shows up in help output.\n \"\"\"\n parser = argparse.ArgumentParser(description='Version parser', add_help=False)\n parser.add_argument('--version', '-v', dest='version', action='store_true')\n parser.set_defaults(version=False)\n args, _ = parser.parse_known_args()\n if args.version:\n print('dsub version: %s' % DSUB_VERSION)\n sys.exit()","function_tokens":["def","handle_version_flag","(",")",":","parser","=","argparse",".","ArgumentParser","(","description","=","'Version parser'",",","add_help","=","False",")","parser",".","add_argument","(","'--version'",",","'-v'",",","dest","=","'version'",",","action","=","'store_true'",")","parser",".","set_defaults","(","version","=","False",")","args",",","_","=","parser",".","parse_known_args","(",")","if","args",".","version",":","print","(","'dsub version: %s'","%","DSUB_VERSION",")","sys",".","exit","(",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L822-L836"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"age_to_create_time","parameters":"(age, from_time=None)","argument_list":"","return_statement":"","docstring":"Compute the create time (UTC) for the list filter.\n\n If the age is an integer value it is treated as a UTC date.\n Otherwise the value must be of the form \"\" where supported\n units are s, m, h, d, w (seconds, minutes, hours, days, weeks).\n\n Args:\n age: A \"\" string or integer value.\n from_time:\n\n Returns:\n A timezone-aware datetime or None if age parameter is empty.","docstring_summary":"Compute the create time (UTC) for the list filter.","docstring_tokens":["Compute","the","create","time","(","UTC",")","for","the","list","filter","."],"function":"def age_to_create_time(age, from_time=None):\n \"\"\"Compute the create time (UTC) for the list filter.\n\n If the age is an integer value it is treated as a UTC date.\n Otherwise the value must be of the form \"\" where supported\n units are s, m, h, d, w (seconds, minutes, hours, days, weeks).\n\n Args:\n age: A \"\" string or integer value.\n from_time:\n\n Returns:\n A timezone-aware datetime or None if age parameter is empty.\n \"\"\"\n\n if not age:\n return None\n\n if not from_time:\n from_time = dsub_util.replace_timezone(datetime.datetime.now(), tzlocal())\n\n try:\n last_char = age[-1]\n\n if last_char == 's':\n return from_time - datetime.timedelta(seconds=int(age[:-1]))\n elif last_char == 'm':\n return from_time - datetime.timedelta(minutes=int(age[:-1]))\n elif last_char == 'h':\n return from_time - datetime.timedelta(hours=int(age[:-1]))\n elif last_char == 'd':\n return from_time - datetime.timedelta(days=int(age[:-1]))\n elif last_char == 'w':\n return from_time - datetime.timedelta(weeks=int(age[:-1]))\n else:\n # If no unit is given treat the age as seconds from epoch, otherwise apply\n # the correct time unit.\n return dsub_util.replace_timezone(\n datetime.datetime.utcfromtimestamp(int(age)), pytz.utc)\n\n except (ValueError, OverflowError) as e:\n raise ValueError('Unable to parse age string %s: %s' % (age, e))","function_tokens":["def","age_to_create_time","(","age",",","from_time","=","None",")",":","if","not","age",":","return","None","if","not","from_time",":","from_time","=","dsub_util",".","replace_timezone","(","datetime",".","datetime",".","now","(",")",",","tzlocal","(",")",")","try",":","last_char","=","age","[","-","1","]","if","last_char","==","'s'",":","return","from_time","-","datetime",".","timedelta","(","seconds","=","int","(","age","[",":","-","1","]",")",")","elif","last_char","==","'m'",":","return","from_time","-","datetime",".","timedelta","(","minutes","=","int","(","age","[",":","-","1","]",")",")","elif","last_char","==","'h'",":","return","from_time","-","datetime",".","timedelta","(","hours","=","int","(","age","[",":","-","1","]",")",")","elif","last_char","==","'d'",":","return","from_time","-","datetime",".","timedelta","(","days","=","int","(","age","[",":","-","1","]",")",")","elif","last_char","==","'w'",":","return","from_time","-","datetime",".","timedelta","(","weeks","=","int","(","age","[",":","-","1","]",")",")","else",":","# If no unit is given treat the age as seconds from epoch, otherwise apply","# the correct time unit.","return","dsub_util",".","replace_timezone","(","datetime",".","datetime",".","utcfromtimestamp","(","int","(","age",")",")",",","pytz",".","utc",")","except","(","ValueError",",","OverflowError",")","as","e",":","raise","ValueError","(","'Unable to parse age string %s: %s'","%","(","age",",","e",")",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L839-L880"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"_interval_to_seconds","parameters":"(interval, valid_units='smhdw')","argument_list":"","return_statement":"","docstring":"Convert the timeout duration to seconds.\n\n The value must be of the form \"\" where supported\n units are s, m, h, d, w (seconds, minutes, hours, days, weeks).\n\n Args:\n interval: A \"\" string.\n valid_units: A list of supported units.\n\n Returns:\n A string of the form \"s\" or None if timeout is empty.","docstring_summary":"Convert the timeout duration to seconds.","docstring_tokens":["Convert","the","timeout","duration","to","seconds","."],"function":"def _interval_to_seconds(interval, valid_units='smhdw'):\n \"\"\"Convert the timeout duration to seconds.\n\n The value must be of the form \"\" where supported\n units are s, m, h, d, w (seconds, minutes, hours, days, weeks).\n\n Args:\n interval: A \"\" string.\n valid_units: A list of supported units.\n\n Returns:\n A string of the form \"s\" or None if timeout is empty.\n \"\"\"\n if not interval:\n return None\n\n try:\n last_char = interval[-1]\n\n if last_char == 's' and 's' in valid_units:\n return str(float(interval[:-1])) + 's'\n elif last_char == 'm' and 'm' in valid_units:\n return str(float(interval[:-1]) * 60) + 's'\n elif last_char == 'h' and 'h' in valid_units:\n return str(float(interval[:-1]) * 60 * 60) + 's'\n elif last_char == 'd' and 'd' in valid_units:\n return str(float(interval[:-1]) * 60 * 60 * 24) + 's'\n elif last_char == 'w' and 'w' in valid_units:\n return str(float(interval[:-1]) * 60 * 60 * 24 * 7) + 's'\n else:\n raise ValueError(\n 'Unsupported units in interval string %s: %s' % (interval, last_char))\n\n except (ValueError, OverflowError) as e:\n raise ValueError('Unable to parse interval string %s: %s' % (interval, e))","function_tokens":["def","_interval_to_seconds","(","interval",",","valid_units","=","'smhdw'",")",":","if","not","interval",":","return","None","try",":","last_char","=","interval","[","-","1","]","if","last_char","==","'s'","and","'s'","in","valid_units",":","return","str","(","float","(","interval","[",":","-","1","]",")",")","+","'s'","elif","last_char","==","'m'","and","'m'","in","valid_units",":","return","str","(","float","(","interval","[",":","-","1","]",")","*","60",")","+","'s'","elif","last_char","==","'h'","and","'h'","in","valid_units",":","return","str","(","float","(","interval","[",":","-","1","]",")","*","60","*","60",")","+","'s'","elif","last_char","==","'d'","and","'d'","in","valid_units",":","return","str","(","float","(","interval","[",":","-","1","]",")","*","60","*","60","*","24",")","+","'s'","elif","last_char","==","'w'","and","'w'","in","valid_units",":","return","str","(","float","(","interval","[",":","-","1","]",")","*","60","*","60","*","24","*","7",")","+","'s'","else",":","raise","ValueError","(","'Unsupported units in interval string %s: %s'","%","(","interval",",","last_char",")",")","except","(","ValueError",",","OverflowError",")","as","e",":","raise","ValueError","(","'Unable to parse interval string %s: %s'","%","(","interval",",","e",")",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L883-L917"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"preemptile_param_type","parameters":"(preemptible)","argument_list":"","return_statement":"","docstring":"Wrapper function to create a PreemptibleParam object from argparse.","docstring_summary":"Wrapper function to create a PreemptibleParam object from argparse.","docstring_tokens":["Wrapper","function","to","create","a","PreemptibleParam","object","from","argparse","."],"function":"def preemptile_param_type(preemptible):\n \"\"\"Wrapper function to create a PreemptibleParam object from argparse.\"\"\"\n if bool is type(preemptible):\n return PreemptibleParam(preemptible)\n elif str is type(preemptible):\n try:\n return PreemptibleParam(int(preemptible))\n except ValueError:\n raise argparse.ArgumentTypeError(\n 'Invalid value {} for --preemptible.'.format(preemptible))\n else:\n raise argparse.ArgumentTypeError(\n 'Invalid value {} for --preemptible.'.format(preemptible))","function_tokens":["def","preemptile_param_type","(","preemptible",")",":","if","bool","is","type","(","preemptible",")",":","return","PreemptibleParam","(","preemptible",")","elif","str","is","type","(","preemptible",")",":","try",":","return","PreemptibleParam","(","int","(","preemptible",")",")","except","ValueError",":","raise","argparse",".","ArgumentTypeError","(","'Invalid value {} for --preemptible.'",".","format","(","preemptible",")",")","else",":","raise","argparse",".","ArgumentTypeError","(","'Invalid value {} for --preemptible.'",".","format","(","preemptible",")",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L965-L977"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"FileParamUtil.get_variable_name","parameters":"(self, name)","argument_list":"","return_statement":"return name","docstring":"Produce a default variable name if none is specified.","docstring_summary":"Produce a default variable name if none is specified.","docstring_tokens":["Produce","a","default","variable","name","if","none","is","specified","."],"function":"def get_variable_name(self, name):\n \"\"\"Produce a default variable name if none is specified.\"\"\"\n if not name:\n name = '%s%s' % (self._auto_prefix, self._auto_index)\n self._auto_index += 1\n return name","function_tokens":["def","get_variable_name","(","self",",","name",")",":","if","not","name",":","name","=","'%s%s'","%","(","self",".","_auto_prefix",",","self",".","_auto_index",")","self",".","_auto_index","+=","1","return","name"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L82-L87"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"FileParamUtil.rewrite_uris","parameters":"(self, raw_uri, file_provider)","argument_list":"","return_statement":"return normalized, os.path.join(self._relative_path, docker_path)","docstring":"Accept a raw uri and return rewritten versions.\n\n This function returns a normalized URI and a docker path. The normalized\n URI may have minor alterations meant to disambiguate and prepare for use\n by shell utilities that may require a specific format.\n\n The docker rewriter makes substantial modifications to the raw URI when\n constructing a docker path, but modifications must follow these rules:\n 1) System specific characters are not allowed (ex. indirect paths).\n 2) The path, if it is a directory, must end in a forward slash.\n 3) The path will begin with the value set in self._relative_path.\n 4) The path will have an additional prefix (after self._relative_path) set\n by the file provider-specific rewriter.\n\n Rewrite output for the docker path:\n >>> out_util = FileParamUtil('AUTO_', 'output')\n >>> out_util.rewrite_uris('gs:\/\/mybucket\/myfile.txt', job_model.P_GCS)[1]\n 'output\/gs\/mybucket\/myfile.txt'\n >>> out_util.rewrite_uris('.\/data\/myfolder\/', job_model.P_LOCAL)[1]\n 'output\/file\/data\/myfolder\/'\n\n When normalizing the URI for cloud buckets, no rewrites are done. For local\n files, the user directory will be expanded and relative paths will be\n converted to absolute:\n >>> in_util = FileParamUtil('AUTO_', 'input')\n >>> in_util.rewrite_uris('gs:\/\/mybucket\/gcs_dir\/', job_model.P_GCS)[0]\n 'gs:\/\/mybucket\/gcs_dir\/'\n >>> in_util.rewrite_uris('\/data\/.\/dir_a\/..\/myfile.txt',\n ... job_model.P_LOCAL)[0]\n '\/data\/myfile.txt'\n >>> in_util.rewrite_uris('file:\/\/\/tmp\/data\/*.bam', job_model.P_LOCAL)[0]\n '\/tmp\/data\/*.bam'\n\n Args:\n raw_uri: (str) the path component of the raw URI.\n file_provider: a valid provider (contained in job_model.FILE_PROVIDERS).\n\n Returns:\n normalized: a cleaned version of the uri provided by command line.\n docker_path: the uri rewritten in the format required for mounting inside\n a docker worker.\n\n Raises:\n ValueError: if file_provider is not valid.","docstring_summary":"Accept a raw uri and return rewritten versions.","docstring_tokens":["Accept","a","raw","uri","and","return","rewritten","versions","."],"function":"def rewrite_uris(self, raw_uri, file_provider):\n \"\"\"Accept a raw uri and return rewritten versions.\n\n This function returns a normalized URI and a docker path. The normalized\n URI may have minor alterations meant to disambiguate and prepare for use\n by shell utilities that may require a specific format.\n\n The docker rewriter makes substantial modifications to the raw URI when\n constructing a docker path, but modifications must follow these rules:\n 1) System specific characters are not allowed (ex. indirect paths).\n 2) The path, if it is a directory, must end in a forward slash.\n 3) The path will begin with the value set in self._relative_path.\n 4) The path will have an additional prefix (after self._relative_path) set\n by the file provider-specific rewriter.\n\n Rewrite output for the docker path:\n >>> out_util = FileParamUtil('AUTO_', 'output')\n >>> out_util.rewrite_uris('gs:\/\/mybucket\/myfile.txt', job_model.P_GCS)[1]\n 'output\/gs\/mybucket\/myfile.txt'\n >>> out_util.rewrite_uris('.\/data\/myfolder\/', job_model.P_LOCAL)[1]\n 'output\/file\/data\/myfolder\/'\n\n When normalizing the URI for cloud buckets, no rewrites are done. For local\n files, the user directory will be expanded and relative paths will be\n converted to absolute:\n >>> in_util = FileParamUtil('AUTO_', 'input')\n >>> in_util.rewrite_uris('gs:\/\/mybucket\/gcs_dir\/', job_model.P_GCS)[0]\n 'gs:\/\/mybucket\/gcs_dir\/'\n >>> in_util.rewrite_uris('\/data\/.\/dir_a\/..\/myfile.txt',\n ... job_model.P_LOCAL)[0]\n '\/data\/myfile.txt'\n >>> in_util.rewrite_uris('file:\/\/\/tmp\/data\/*.bam', job_model.P_LOCAL)[0]\n '\/tmp\/data\/*.bam'\n\n Args:\n raw_uri: (str) the path component of the raw URI.\n file_provider: a valid provider (contained in job_model.FILE_PROVIDERS).\n\n Returns:\n normalized: a cleaned version of the uri provided by command line.\n docker_path: the uri rewritten in the format required for mounting inside\n a docker worker.\n\n Raises:\n ValueError: if file_provider is not valid.\n \"\"\"\n if file_provider == job_model.P_GCS:\n normalized, docker_path = _gcs_uri_rewriter(raw_uri)\n elif file_provider == job_model.P_LOCAL:\n normalized, docker_path = _local_uri_rewriter(raw_uri)\n else:\n raise ValueError('File provider not supported: %r' % file_provider)\n return normalized, os.path.join(self._relative_path, docker_path)","function_tokens":["def","rewrite_uris","(","self",",","raw_uri",",","file_provider",")",":","if","file_provider","==","job_model",".","P_GCS",":","normalized",",","docker_path","=","_gcs_uri_rewriter","(","raw_uri",")","elif","file_provider","==","job_model",".","P_LOCAL",":","normalized",",","docker_path","=","_local_uri_rewriter","(","raw_uri",")","else",":","raise","ValueError","(","'File provider not supported: %r'","%","file_provider",")","return","normalized",",","os",".","path",".","join","(","self",".","_relative_path",",","docker_path",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L89-L141"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"FileParamUtil.parse_file_provider","parameters":"(uri)","argument_list":"","return_statement":"","docstring":"Find the file provider for a URI.","docstring_summary":"Find the file provider for a URI.","docstring_tokens":["Find","the","file","provider","for","a","URI","."],"function":"def parse_file_provider(uri):\n \"\"\"Find the file provider for a URI.\"\"\"\n providers = {'gs': job_model.P_GCS, 'file': job_model.P_LOCAL}\n # URI scheme detector uses a range up to 30 since none of the IANA\n # registered schemes are longer than this.\n provider_found = re.match(r'^([A-Za-z][A-Za-z0-9+.-]{0,29}):\/\/', uri)\n if provider_found:\n prefix = provider_found.group(1).lower()\n else:\n # If no provider is specified in the URI, assume that the local\n # filesystem is being used. Availability and validity of the local\n # file\/directory will be checked later.\n prefix = 'file'\n if prefix in providers:\n return providers[prefix]\n else:\n raise ValueError('File prefix not supported: %s:\/\/' % prefix)","function_tokens":["def","parse_file_provider","(","uri",")",":","providers","=","{","'gs'",":","job_model",".","P_GCS",",","'file'",":","job_model",".","P_LOCAL","}","# URI scheme detector uses a range up to 30 since none of the IANA","# registered schemes are longer than this.","provider_found","=","re",".","match","(","r'^([A-Za-z][A-Za-z0-9+.-]{0,29}):\/\/'",",","uri",")","if","provider_found",":","prefix","=","provider_found",".","group","(","1",")",".","lower","(",")","else",":","# If no provider is specified in the URI, assume that the local","# filesystem is being used. Availability and validity of the local","# file\/directory will be checked later.","prefix","=","'file'","if","prefix","in","providers",":","return","providers","[","prefix","]","else",":","raise","ValueError","(","'File prefix not supported: %s:\/\/'","%","prefix",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L144-L160"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"FileParamUtil._validate_paths_or_fail","parameters":"(uri, recursive)","argument_list":"","return_statement":"","docstring":"Do basic validation of the uri, return the path and filename.","docstring_summary":"Do basic validation of the uri, return the path and filename.","docstring_tokens":["Do","basic","validation","of","the","uri","return","the","path","and","filename","."],"function":"def _validate_paths_or_fail(uri, recursive):\n \"\"\"Do basic validation of the uri, return the path and filename.\"\"\"\n path, filename = os.path.split(uri)\n\n # dsub could support character ranges ([0-9]) with some more work, but for\n # now we assume that basic asterisk wildcards are sufficient. Reject any URI\n # that includes square brackets or question marks, since we know that\n # if they actually worked, it would be accidental.\n if '[' in uri or ']' in uri:\n raise ValueError(\n 'Square bracket (character ranges) are not supported: %s' % uri)\n if '?' in uri:\n raise ValueError('Question mark wildcards are not supported: %s' % uri)\n\n # Only support file URIs and *filename* wildcards\n # Wildcards at the directory level or \"**\" syntax would require better\n # support from the Pipelines API *or* doing expansion here and\n # (potentially) producing a series of FileParams, instead of one.\n if '*' in path:\n raise ValueError(\n 'Path wildcard (*) are only supported for files: %s' % uri)\n if '**' in filename:\n raise ValueError('Recursive wildcards (\"**\") not supported: %s' % uri)\n if filename in ('..', '.'):\n raise ValueError('Path characters \"..\" and \".\" not supported '\n 'for file names: %s' % uri)\n\n # Do not allow non-recursive IO to reference directories.\n if not recursive and not filename:\n raise ValueError('Input or output values that are not recursive must '\n 'reference a filename or wildcard: %s' % uri)","function_tokens":["def","_validate_paths_or_fail","(","uri",",","recursive",")",":","path",",","filename","=","os",".","path",".","split","(","uri",")","# dsub could support character ranges ([0-9]) with some more work, but for","# now we assume that basic asterisk wildcards are sufficient. Reject any URI","# that includes square brackets or question marks, since we know that","# if they actually worked, it would be accidental.","if","'['","in","uri","or","']'","in","uri",":","raise","ValueError","(","'Square bracket (character ranges) are not supported: %s'","%","uri",")","if","'?'","in","uri",":","raise","ValueError","(","'Question mark wildcards are not supported: %s'","%","uri",")","# Only support file URIs and *filename* wildcards","# Wildcards at the directory level or \"**\" syntax would require better","# support from the Pipelines API *or* doing expansion here and","# (potentially) producing a series of FileParams, instead of one.","if","'*'","in","path",":","raise","ValueError","(","'Path wildcard (*) are only supported for files: %s'","%","uri",")","if","'**'","in","filename",":","raise","ValueError","(","'Recursive wildcards (\"**\") not supported: %s'","%","uri",")","if","filename","in","(","'..'",",","'.'",")",":","raise","ValueError","(","'Path characters \"..\" and \".\" not supported '","'for file names: %s'","%","uri",")","# Do not allow non-recursive IO to reference directories.","if","not","recursive","and","not","filename",":","raise","ValueError","(","'Input or output values that are not recursive must '","'reference a filename or wildcard: %s'","%","uri",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L163-L193"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"FileParamUtil.parse_uri","parameters":"(self, raw_uri, recursive)","argument_list":"","return_statement":"return docker_uri, uri_parts, file_provider","docstring":"Return a valid docker_path, uri, and file provider from a flag value.","docstring_summary":"Return a valid docker_path, uri, and file provider from a flag value.","docstring_tokens":["Return","a","valid","docker_path","uri","and","file","provider","from","a","flag","value","."],"function":"def parse_uri(self, raw_uri, recursive):\n \"\"\"Return a valid docker_path, uri, and file provider from a flag value.\"\"\"\n # Assume recursive URIs are directory paths.\n if recursive:\n raw_uri = directory_fmt(raw_uri)\n # Get the file provider, validate the raw URI, and rewrite the path\n # component of the URI for docker and remote.\n file_provider = self.parse_file_provider(raw_uri)\n self._validate_paths_or_fail(raw_uri, recursive)\n uri, docker_uri = self.rewrite_uris(raw_uri, file_provider)\n uri_parts = job_model.UriParts(\n directory_fmt(os.path.dirname(uri)), os.path.basename(uri))\n return docker_uri, uri_parts, file_provider","function_tokens":["def","parse_uri","(","self",",","raw_uri",",","recursive",")",":","# Assume recursive URIs are directory paths.","if","recursive",":","raw_uri","=","directory_fmt","(","raw_uri",")","# Get the file provider, validate the raw URI, and rewrite the path","# component of the URI for docker and remote.","file_provider","=","self",".","parse_file_provider","(","raw_uri",")","self",".","_validate_paths_or_fail","(","raw_uri",",","recursive",")","uri",",","docker_uri","=","self",".","rewrite_uris","(","raw_uri",",","file_provider",")","uri_parts","=","job_model",".","UriParts","(","directory_fmt","(","os",".","path",".","dirname","(","uri",")",")",",","os",".","path",".","basename","(","uri",")",")","return","docker_uri",",","uri_parts",",","file_provider"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L195-L207"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"FileParamUtil.make_param","parameters":"(self, name, raw_uri, recursive)","argument_list":"","return_statement":"return self.param_class(name, raw_uri, docker_path, uri_parts, recursive,\n provider)","docstring":"Return a *FileParam given an input uri.","docstring_summary":"Return a *FileParam given an input uri.","docstring_tokens":["Return","a","*","FileParam","given","an","input","uri","."],"function":"def make_param(self, name, raw_uri, recursive):\n \"\"\"Return a *FileParam given an input uri.\"\"\"\n if not raw_uri:\n return self.param_class(name, None, None, None, recursive, None)\n docker_path, uri_parts, provider = self.parse_uri(raw_uri, recursive)\n return self.param_class(name, raw_uri, docker_path, uri_parts, recursive,\n provider)","function_tokens":["def","make_param","(","self",",","name",",","raw_uri",",","recursive",")",":","if","not","raw_uri",":","return","self",".","param_class","(","name",",","None",",","None",",","None",",","recursive",",","None",")","docker_path",",","uri_parts",",","provider","=","self",".","parse_uri","(","raw_uri",",","recursive",")","return","self",".","param_class","(","name",",","raw_uri",",","docker_path",",","uri_parts",",","recursive",",","provider",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L209-L215"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"MountParamUtil._parse_image_uri","parameters":"(self, raw_uri)","argument_list":"","return_statement":"return docker_uri","docstring":"Return a valid docker_path from a Google Persistent Disk url.","docstring_summary":"Return a valid docker_path from a Google Persistent Disk url.","docstring_tokens":["Return","a","valid","docker_path","from","a","Google","Persistent","Disk","url","."],"function":"def _parse_image_uri(self, raw_uri):\n \"\"\"Return a valid docker_path from a Google Persistent Disk url.\"\"\"\n # The string replace is so we don't have colons and double slashes in the\n # mount path. The idea is the resulting mount path would look like:\n # \/mnt\/data\/mount\/http\/www.googleapis.com\/compute\/v1\/projects\/...\n docker_uri = os.path.join(self._relative_path,\n raw_uri.replace('https:\/\/', 'https\/', 1))\n return docker_uri","function_tokens":["def","_parse_image_uri","(","self",",","raw_uri",")",":","# The string replace is so we don't have colons and double slashes in the","# mount path. The idea is the resulting mount path would look like:","# \/mnt\/data\/mount\/http\/www.googleapis.com\/compute\/v1\/projects\/...","docker_uri","=","os",".","path",".","join","(","self",".","_relative_path",",","raw_uri",".","replace","(","'https:\/\/'",",","'https\/'",",","1",")",")","return","docker_uri"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L240-L247"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"MountParamUtil._parse_local_mount_uri","parameters":"(self, raw_uri)","argument_list":"","return_statement":"return local_path, docker_uri","docstring":"Return a valid docker_path for a local file path.","docstring_summary":"Return a valid docker_path for a local file path.","docstring_tokens":["Return","a","valid","docker_path","for","a","local","file","path","."],"function":"def _parse_local_mount_uri(self, raw_uri):\n \"\"\"Return a valid docker_path for a local file path.\"\"\"\n raw_uri = directory_fmt(raw_uri)\n _, docker_path = _local_uri_rewriter(raw_uri)\n local_path = docker_path[len('file'):]\n docker_uri = os.path.join(self._relative_path, docker_path)\n return local_path, docker_uri","function_tokens":["def","_parse_local_mount_uri","(","self",",","raw_uri",")",":","raw_uri","=","directory_fmt","(","raw_uri",")","_",",","docker_path","=","_local_uri_rewriter","(","raw_uri",")","local_path","=","docker_path","[","len","(","'file'",")",":","]","docker_uri","=","os",".","path",".","join","(","self",".","_relative_path",",","docker_path",")","return","local_path",",","docker_uri"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L249-L255"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"MountParamUtil._parse_gcs_uri","parameters":"(self, raw_uri)","argument_list":"","return_statement":"return docker_uri","docstring":"Return a valid docker_path for a GCS bucket.","docstring_summary":"Return a valid docker_path for a GCS bucket.","docstring_tokens":["Return","a","valid","docker_path","for","a","GCS","bucket","."],"function":"def _parse_gcs_uri(self, raw_uri):\n \"\"\"Return a valid docker_path for a GCS bucket.\"\"\"\n # Assume URI is a directory path.\n raw_uri = directory_fmt(raw_uri)\n _, docker_path = _gcs_uri_rewriter(raw_uri)\n docker_uri = os.path.join(self._relative_path, docker_path)\n return docker_uri","function_tokens":["def","_parse_gcs_uri","(","self",",","raw_uri",")",":","# Assume URI is a directory path.","raw_uri","=","directory_fmt","(","raw_uri",")","_",",","docker_path","=","_gcs_uri_rewriter","(","raw_uri",")","docker_uri","=","os",".","path",".","join","(","self",".","_relative_path",",","docker_path",")","return","docker_uri"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L257-L263"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"MountParamUtil.make_param","parameters":"(self, name, raw_uri, disk_size)","argument_list":"","return_statement":"","docstring":"Return a MountParam given a GCS bucket, disk image or local path.","docstring_summary":"Return a MountParam given a GCS bucket, disk image or local path.","docstring_tokens":["Return","a","MountParam","given","a","GCS","bucket","disk","image","or","local","path","."],"function":"def make_param(self, name, raw_uri, disk_size):\n \"\"\"Return a MountParam given a GCS bucket, disk image or local path.\"\"\"\n if raw_uri.startswith('https:\/\/www.googleapis.com\/compute'):\n # Full Image URI should look something like:\n # https:\/\/www.googleapis.com\/compute\/v1\/projects\/\/global\/images\/\n # But don't validate further, should the form of a valid image URI\n # change (v1->v2, for example)\n docker_path = self._parse_image_uri(raw_uri)\n return job_model.PersistentDiskMountParam(\n name, raw_uri, docker_path, disk_size, disk_type=None)\n elif raw_uri.startswith('file:\/\/'):\n local_path, docker_path = self._parse_local_mount_uri(raw_uri)\n return job_model.LocalMountParam(name, raw_uri, docker_path, local_path)\n elif raw_uri.startswith('gs:\/\/'):\n docker_path = self._parse_gcs_uri(raw_uri)\n return job_model.GCSMountParam(name, raw_uri, docker_path)\n else:\n raise ValueError(\n 'Mount parameter {} must begin with valid prefix.'.format(raw_uri))","function_tokens":["def","make_param","(","self",",","name",",","raw_uri",",","disk_size",")",":","if","raw_uri",".","startswith","(","'https:\/\/www.googleapis.com\/compute'",")",":","# Full Image URI should look something like:","# https:\/\/www.googleapis.com\/compute\/v1\/projects\/\/global\/images\/","# But don't validate further, should the form of a valid image URI","# change (v1->v2, for example)","docker_path","=","self",".","_parse_image_uri","(","raw_uri",")","return","job_model",".","PersistentDiskMountParam","(","name",",","raw_uri",",","docker_path",",","disk_size",",","disk_type","=","None",")","elif","raw_uri",".","startswith","(","'file:\/\/'",")",":","local_path",",","docker_path","=","self",".","_parse_local_mount_uri","(","raw_uri",")","return","job_model",".","LocalMountParam","(","name",",","raw_uri",",","docker_path",",","local_path",")","elif","raw_uri",".","startswith","(","'gs:\/\/'",")",":","docker_path","=","self",".","_parse_gcs_uri","(","raw_uri",")","return","job_model",".","GCSMountParam","(","name",",","raw_uri",",","docker_path",")","else",":","raise","ValueError","(","'Mount parameter {} must begin with valid prefix.'",".","format","(","raw_uri",")",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L265-L283"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/lib\/param_util.py","language":"python","identifier":"PreemptibleParam.validate","parameters":"(self, retries)","argument_list":"","return_statement":"","docstring":"Validates that preemptible arguments make sense with retries.","docstring_summary":"Validates that preemptible arguments make sense with retries.","docstring_tokens":["Validates","that","preemptible","arguments","make","sense","with","retries","."],"function":"def validate(self, retries):\n \"\"\"Validates that preemptible arguments make sense with retries.\"\"\"\n if int is type(self._max_preemptible_attempts):\n if retries < 0 or self._max_preemptible_attempts < 0:\n raise ValueError('--retries and --preemptible may not be negative')\n\n if self._max_preemptible_attempts >= 1 and not retries:\n # This means user specified a positive preemptible number\n # but didn't specify a retries number\n raise ValueError(\n 'Requesting 1 or more preemptible attempts requires setting retries'\n )\n\n if self._max_preemptible_attempts > retries:\n raise ValueError(\n 'Value passed for --preemptible cannot be larger than --retries.')","function_tokens":["def","validate","(","self",",","retries",")",":","if","int","is","type","(","self",".","_max_preemptible_attempts",")",":","if","retries","<","0","or","self",".","_max_preemptible_attempts","<","0",":","raise","ValueError","(","'--retries and --preemptible may not be negative'",")","if","self",".","_max_preemptible_attempts",">=","1","and","not","retries",":","# This means user specified a positive preemptible number","# but didn't specify a retries number","raise","ValueError","(","'Requesting 1 or more preemptible attempts requires setting retries'",")","if","self",".","_max_preemptible_attempts",">","retries",":","raise","ValueError","(","'Value passed for --preemptible cannot be larger than --retries.'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/lib\/param_util.py#L947-L962"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dstat.py","language":"python","identifier":"get_credentials","parameters":"(args)","argument_list":"","return_statement":"return provider_base.credentials_from_args(args)","docstring":"Returns credentials for API requests.","docstring_summary":"Returns credentials for API requests.","docstring_tokens":["Returns","credentials","for","API","requests","."],"function":"def get_credentials(args):\n \"\"\"Returns credentials for API requests.\"\"\"\n\n # Across dsub, dstat, ddel, defer to the provider for credentials handling\n return provider_base.credentials_from_args(args)","function_tokens":["def","get_credentials","(","args",")",":","# Across dsub, dstat, ddel, defer to the provider for credentials handling","return","provider_base",".","credentials_from_args","(","args",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dstat.py#L41-L45"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dstat.py","language":"python","identifier":"_parse_arguments","parameters":"()","argument_list":"","return_statement":"return provider_base.parse_args(\n parser, {\n 'google-cls-v2': ['project'],\n 'google-v2': ['project'],\n 'test-fails': [],\n 'local': [],\n }, sys.argv[1:])","docstring":"Parses command line arguments.\n\n Returns:\n A Namespace of parsed arguments.","docstring_summary":"Parses command line arguments.","docstring_tokens":["Parses","command","line","arguments","."],"function":"def _parse_arguments():\n \"\"\"Parses command line arguments.\n\n Returns:\n A Namespace of parsed arguments.\n \"\"\"\n # Handle version flag and exit if it was passed.\n param_util.handle_version_flag()\n\n parser = provider_base.create_parser(sys.argv[0])\n\n parser.add_argument(\n '--version', '-v', default=False, help='Print the dsub version and exit.')\n\n parser.add_argument(\n '--jobs',\n '-j',\n nargs='*',\n help='A list of jobs IDs on which to check status')\n parser.add_argument(\n '--names',\n '-n',\n nargs='*',\n help='A list of job names on which to check status')\n parser.add_argument(\n '--tasks',\n '-t',\n nargs='*',\n help='A list of task IDs on which to check status')\n parser.add_argument(\n '--attempts',\n nargs='*',\n help='A list of task attempts on which to check status')\n parser.add_argument(\n '--users',\n '-u',\n nargs='*',\n default=[],\n help=\"\"\"Lists only those jobs which were submitted by the list of users.\n Use \"*\" to list jobs of any user.\"\"\")\n parser.add_argument(\n '--status',\n '-s',\n nargs='*',\n default=['RUNNING'],\n choices=['RUNNING', 'SUCCESS', 'FAILURE', 'CANCELED', '*'],\n help=\"\"\"Lists only those jobs which match the specified status(es).\n Choose from {'RUNNING', 'SUCCESS', 'FAILURE', 'CANCELED'}.\n Use \"*\" to list jobs of any status.\"\"\",\n metavar='STATUS')\n parser.add_argument(\n '--age',\n help=\"\"\"List only those jobs newer than the specified age. Ages can be\n listed using a number followed by a unit. Supported units are\n s (seconds), m (minutes), h (hours), d (days), w (weeks).\n For example: '7d' (7 days). Bare numbers are treated as UTC.\"\"\")\n parser.add_argument(\n '--label',\n nargs='*',\n action=param_util.ListParamAction,\n default=[],\n help='User labels to match. Tasks returned must match all labels.',\n metavar='KEY=VALUE')\n parser.add_argument(\n '--poll-interval',\n default=10,\n type=int,\n help='Polling interval (in seconds) for checking job status '\n 'when --wait is set.')\n parser.add_argument(\n '--wait', action='store_true', help='Wait until jobs have all completed.')\n parser.add_argument(\n '--limit',\n default=0,\n type=int,\n help='The maximum number of tasks to list. The default is unlimited.')\n parser.add_argument(\n '--format',\n choices=['text', 'json', 'yaml', 'provider-json'],\n help='Set the output format.')\n output_style = parser.add_mutually_exclusive_group()\n output_style.add_argument(\n '--full',\n '-f',\n action='store_true',\n help='Display output with full task information'\n ' and input parameters.')\n output_style.add_argument(\n '--summary',\n action='store_true',\n help='Display a summary of the results, grouped by (job, status).')\n\n # Shared between the \"google-cls-v2\" and \"google-v2\" providers\n google_common = parser.add_argument_group(\n title='google-common',\n description=\"\"\"Options common to the \"google\", \"google-cls-v2\", and\n \"google-v2\" providers\"\"\")\n google_common.add_argument(\n '--project', help='Cloud project ID in which to find and the job(s)')\n\n google_cls_v2 = parser.add_argument_group(\n title='\"google-cls-v2\" provider options',\n description='See also the \"google-common\" options listed')\n google_cls_v2.add_argument(\n '--location',\n default=job_model.DEFAULT_LOCATION,\n help=\"\"\"Specifies the Google Cloud region to which the dsub job was\n submitted. (default: {})\"\"\".format(job_model.DEFAULT_LOCATION))\n\n return provider_base.parse_args(\n parser, {\n 'google-cls-v2': ['project'],\n 'google-v2': ['project'],\n 'test-fails': [],\n 'local': [],\n }, sys.argv[1:])","function_tokens":["def","_parse_arguments","(",")",":","# Handle version flag and exit if it was passed.","param_util",".","handle_version_flag","(",")","parser","=","provider_base",".","create_parser","(","sys",".","argv","[","0","]",")","parser",".","add_argument","(","'--version'",",","'-v'",",","default","=","False",",","help","=","'Print the dsub version and exit.'",")","parser",".","add_argument","(","'--jobs'",",","'-j'",",","nargs","=","'*'",",","help","=","'A list of jobs IDs on which to check status'",")","parser",".","add_argument","(","'--names'",",","'-n'",",","nargs","=","'*'",",","help","=","'A list of job names on which to check status'",")","parser",".","add_argument","(","'--tasks'",",","'-t'",",","nargs","=","'*'",",","help","=","'A list of task IDs on which to check status'",")","parser",".","add_argument","(","'--attempts'",",","nargs","=","'*'",",","help","=","'A list of task attempts on which to check status'",")","parser",".","add_argument","(","'--users'",",","'-u'",",","nargs","=","'*'",",","default","=","[","]",",","help","=","\"\"\"Lists only those jobs which were submitted by the list of users.\n Use \"*\" to list jobs of any user.\"\"\"",")","parser",".","add_argument","(","'--status'",",","'-s'",",","nargs","=","'*'",",","default","=","[","'RUNNING'","]",",","choices","=","[","'RUNNING'",",","'SUCCESS'",",","'FAILURE'",",","'CANCELED'",",","'*'","]",",","help","=","\"\"\"Lists only those jobs which match the specified status(es).\n Choose from {'RUNNING', 'SUCCESS', 'FAILURE', 'CANCELED'}.\n Use \"*\" to list jobs of any status.\"\"\"",",","metavar","=","'STATUS'",")","parser",".","add_argument","(","'--age'",",","help","=","\"\"\"List only those jobs newer than the specified age. Ages can be\n listed using a number followed by a unit. Supported units are\n s (seconds), m (minutes), h (hours), d (days), w (weeks).\n For example: '7d' (7 days). Bare numbers are treated as UTC.\"\"\"",")","parser",".","add_argument","(","'--label'",",","nargs","=","'*'",",","action","=","param_util",".","ListParamAction",",","default","=","[","]",",","help","=","'User labels to match. Tasks returned must match all labels.'",",","metavar","=","'KEY=VALUE'",")","parser",".","add_argument","(","'--poll-interval'",",","default","=","10",",","type","=","int",",","help","=","'Polling interval (in seconds) for checking job status '","'when --wait is set.'",")","parser",".","add_argument","(","'--wait'",",","action","=","'store_true'",",","help","=","'Wait until jobs have all completed.'",")","parser",".","add_argument","(","'--limit'",",","default","=","0",",","type","=","int",",","help","=","'The maximum number of tasks to list. The default is unlimited.'",")","parser",".","add_argument","(","'--format'",",","choices","=","[","'text'",",","'json'",",","'yaml'",",","'provider-json'","]",",","help","=","'Set the output format.'",")","output_style","=","parser",".","add_mutually_exclusive_group","(",")","output_style",".","add_argument","(","'--full'",",","'-f'",",","action","=","'store_true'",",","help","=","'Display output with full task information'","' and input parameters.'",")","output_style",".","add_argument","(","'--summary'",",","action","=","'store_true'",",","help","=","'Display a summary of the results, grouped by (job, status).'",")","# Shared between the \"google-cls-v2\" and \"google-v2\" providers","google_common","=","parser",".","add_argument_group","(","title","=","'google-common'",",","description","=","\"\"\"Options common to the \"google\", \"google-cls-v2\", and\n \"google-v2\" providers\"\"\"",")","google_common",".","add_argument","(","'--project'",",","help","=","'Cloud project ID in which to find and the job(s)'",")","google_cls_v2","=","parser",".","add_argument_group","(","title","=","'\"google-cls-v2\" provider options'",",","description","=","'See also the \"google-common\" options listed'",")","google_cls_v2",".","add_argument","(","'--location'",",","default","=","job_model",".","DEFAULT_LOCATION",",","help","=","\"\"\"Specifies the Google Cloud region to which the dsub job was\n submitted. (default: {})\"\"\"",".","format","(","job_model",".","DEFAULT_LOCATION",")",")","return","provider_base",".","parse_args","(","parser",",","{","'google-cls-v2'",":","[","'project'","]",",","'google-v2'",":","[","'project'","]",",","'test-fails'",":","[","]",",","'local'",":","[","]",",","}",",","sys",".","argv","[","1",":","]",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dstat.py#L48-L163"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dstat.py","language":"python","identifier":"dstat_job_producer","parameters":"(provider,\n statuses,\n user_ids=None,\n job_ids=None,\n job_names=None,\n task_ids=None,\n task_attempts=None,\n labels=None,\n create_time_min=None,\n create_time_max=None,\n max_tasks=0,\n full_output=False,\n summary_output=False,\n poll_interval=0,\n raw_format=False)","argument_list":"","return_statement":"","docstring":"Generate jobs as lists of task dicts ready for formatting\/output.\n\n Args:\n provider: an instantiated dsub provider.\n statuses: a set of status strings that eligible jobs may match.\n user_ids: a set of user strings that eligible jobs may match.\n job_ids: a set of job-id strings eligible jobs may match.\n job_names: a set of job-name strings eligible jobs may match.\n task_ids: a set of task-id strings eligible tasks may match.\n task_attempts: a set of task-attempt strings eligible tasks may match.\n labels: set of LabelParam that all tasks must match.\n create_time_min: a timezone-aware datetime value for the earliest create\n time of a task, inclusive.\n create_time_max: a timezone-aware datetime value for the most recent create\n time of a task, inclusive.\n max_tasks: (int) maximum number of tasks to return per dstat job lookup.\n full_output: (bool) return all dsub fields.\n summary_output: (bool) return a summary of the job list.\n poll_interval: (int) wait time between poll events, dstat will poll jobs\n until all jobs succeed or fail. Set to zero to disable\n polling and return after the first lookup.\n raw_format: (bool) set True to prevent dsub from normalizing the task dict,\n this defaults to False and should only be set True if a\n provider-specific view of tasks is absolutely required.\n (NB: provider interfaces change over time, no transition path\n will be provided for users depending on this flag).\n\n Yields:\n lists of task dictionaries - each list representing a dstat poll event.","docstring_summary":"Generate jobs as lists of task dicts ready for formatting\/output.","docstring_tokens":["Generate","jobs","as","lists","of","task","dicts","ready","for","formatting","\/","output","."],"function":"def dstat_job_producer(provider,\n statuses,\n user_ids=None,\n job_ids=None,\n job_names=None,\n task_ids=None,\n task_attempts=None,\n labels=None,\n create_time_min=None,\n create_time_max=None,\n max_tasks=0,\n full_output=False,\n summary_output=False,\n poll_interval=0,\n raw_format=False):\n \"\"\"Generate jobs as lists of task dicts ready for formatting\/output.\n\n Args:\n provider: an instantiated dsub provider.\n statuses: a set of status strings that eligible jobs may match.\n user_ids: a set of user strings that eligible jobs may match.\n job_ids: a set of job-id strings eligible jobs may match.\n job_names: a set of job-name strings eligible jobs may match.\n task_ids: a set of task-id strings eligible tasks may match.\n task_attempts: a set of task-attempt strings eligible tasks may match.\n labels: set of LabelParam that all tasks must match.\n create_time_min: a timezone-aware datetime value for the earliest create\n time of a task, inclusive.\n create_time_max: a timezone-aware datetime value for the most recent create\n time of a task, inclusive.\n max_tasks: (int) maximum number of tasks to return per dstat job lookup.\n full_output: (bool) return all dsub fields.\n summary_output: (bool) return a summary of the job list.\n poll_interval: (int) wait time between poll events, dstat will poll jobs\n until all jobs succeed or fail. Set to zero to disable\n polling and return after the first lookup.\n raw_format: (bool) set True to prevent dsub from normalizing the task dict,\n this defaults to False and should only be set True if a\n provider-specific view of tasks is absolutely required.\n (NB: provider interfaces change over time, no transition path\n will be provided for users depending on this flag).\n\n Yields:\n lists of task dictionaries - each list representing a dstat poll event.\n \"\"\"\n some_job_running = True\n while some_job_running:\n # Get a batch of jobs.\n tasks = provider.lookup_job_tasks(\n statuses,\n user_ids=user_ids,\n job_ids=job_ids,\n job_names=job_names,\n task_ids=task_ids,\n task_attempts=task_attempts,\n labels=labels,\n create_time_min=create_time_min,\n create_time_max=create_time_max,\n max_tasks=max_tasks,\n page_size=max_tasks)\n\n some_job_running = False\n\n formatted_tasks = []\n for task in tasks:\n if 0 < max_tasks <= len(formatted_tasks):\n break\n\n # Format tasks as specified.\n if raw_format:\n formatted_tasks.append(task.raw_task_data())\n else:\n formatted_tasks.append(\n output_formatter.prepare_row(task, full_output, summary_output))\n\n # Determine if any of the jobs are running.\n if task.get_field('task-status') == 'RUNNING':\n some_job_running = True\n\n # Yield the tasks and determine if the loop should continue.\n yield formatted_tasks\n if poll_interval and some_job_running:\n time.sleep(poll_interval)\n else:\n break","function_tokens":["def","dstat_job_producer","(","provider",",","statuses",",","user_ids","=","None",",","job_ids","=","None",",","job_names","=","None",",","task_ids","=","None",",","task_attempts","=","None",",","labels","=","None",",","create_time_min","=","None",",","create_time_max","=","None",",","max_tasks","=","0",",","full_output","=","False",",","summary_output","=","False",",","poll_interval","=","0",",","raw_format","=","False",")",":","some_job_running","=","True","while","some_job_running",":","# Get a batch of jobs.","tasks","=","provider",".","lookup_job_tasks","(","statuses",",","user_ids","=","user_ids",",","job_ids","=","job_ids",",","job_names","=","job_names",",","task_ids","=","task_ids",",","task_attempts","=","task_attempts",",","labels","=","labels",",","create_time_min","=","create_time_min",",","create_time_max","=","create_time_max",",","max_tasks","=","max_tasks",",","page_size","=","max_tasks",")","some_job_running","=","False","formatted_tasks","=","[","]","for","task","in","tasks",":","if","0","<","max_tasks","<=","len","(","formatted_tasks",")",":","break","# Format tasks as specified.","if","raw_format",":","formatted_tasks",".","append","(","task",".","raw_task_data","(",")",")","else",":","formatted_tasks",".","append","(","output_formatter",".","prepare_row","(","task",",","full_output",",","summary_output",")",")","# Determine if any of the jobs are running.","if","task",".","get_field","(","'task-status'",")","==","'RUNNING'",":","some_job_running","=","True","# Yield the tasks and determine if the loop should continue.","yield","formatted_tasks","if","poll_interval","and","some_job_running",":","time",".","sleep","(","poll_interval",")","else",":","break"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dstat.py#L227-L311"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dstat.py","language":"python","identifier":"lookup_job_tasks","parameters":"(provider,\n statuses,\n user_ids=None,\n job_ids=None,\n job_names=None,\n task_ids=None,\n task_attempts=None,\n labels=None,\n create_time_min=None,\n create_time_max=None,\n max_tasks=0,\n page_size=0,\n summary_output=False)","argument_list":"","return_statement":"","docstring":"Generate formatted jobs individually, in order of create-time.\n\n Args:\n provider: an instantiated dsub provider.\n statuses: a set of status strings that eligible jobs may match.\n user_ids: a set of user strings that eligible jobs may match.\n job_ids: a set of job-id strings eligible jobs may match.\n job_names: a set of job-name strings eligible jobs may match.\n task_ids: a set of task-id strings eligible tasks may match.\n task_attempts: a set of task-attempt strings eligible tasks may match.\n labels: set of LabelParam that all tasks must match.\n create_time_min: a timezone-aware datetime value for the earliest create\n time of a task, inclusive.\n create_time_max: a timezone-aware datetime value for the most recent create\n time of a task, inclusive.\n max_tasks: (int) maximum number of tasks to return per dstat job lookup.\n page_size: the page size to use for each query to the backend. May be\n ignored by some provider implementations.\n summary_output: (bool) summarize the job list.\n\n Yields:\n Individual task dictionaries with associated metadata","docstring_summary":"Generate formatted jobs individually, in order of create-time.","docstring_tokens":["Generate","formatted","jobs","individually","in","order","of","create","-","time","."],"function":"def lookup_job_tasks(provider,\n statuses,\n user_ids=None,\n job_ids=None,\n job_names=None,\n task_ids=None,\n task_attempts=None,\n labels=None,\n create_time_min=None,\n create_time_max=None,\n max_tasks=0,\n page_size=0,\n summary_output=False):\n \"\"\"Generate formatted jobs individually, in order of create-time.\n\n Args:\n provider: an instantiated dsub provider.\n statuses: a set of status strings that eligible jobs may match.\n user_ids: a set of user strings that eligible jobs may match.\n job_ids: a set of job-id strings eligible jobs may match.\n job_names: a set of job-name strings eligible jobs may match.\n task_ids: a set of task-id strings eligible tasks may match.\n task_attempts: a set of task-attempt strings eligible tasks may match.\n labels: set of LabelParam that all tasks must match.\n create_time_min: a timezone-aware datetime value for the earliest create\n time of a task, inclusive.\n create_time_max: a timezone-aware datetime value for the most recent create\n time of a task, inclusive.\n max_tasks: (int) maximum number of tasks to return per dstat job lookup.\n page_size: the page size to use for each query to the backend. May be\n ignored by some provider implementations.\n summary_output: (bool) summarize the job list.\n\n Yields:\n Individual task dictionaries with associated metadata\n \"\"\"\n tasks_generator = provider.lookup_job_tasks(\n statuses,\n user_ids=user_ids,\n job_ids=job_ids,\n job_names=job_names,\n task_ids=task_ids,\n task_attempts=task_attempts,\n labels=labels,\n create_time_min=create_time_min,\n create_time_max=create_time_max,\n max_tasks=max_tasks,\n page_size=page_size)\n\n # Yield formatted tasks.\n for task in tasks_generator:\n yield output_formatter.prepare_row(task, True, summary_output)","function_tokens":["def","lookup_job_tasks","(","provider",",","statuses",",","user_ids","=","None",",","job_ids","=","None",",","job_names","=","None",",","task_ids","=","None",",","task_attempts","=","None",",","labels","=","None",",","create_time_min","=","None",",","create_time_max","=","None",",","max_tasks","=","0",",","page_size","=","0",",","summary_output","=","False",")",":","tasks_generator","=","provider",".","lookup_job_tasks","(","statuses",",","user_ids","=","user_ids",",","job_ids","=","job_ids",",","job_names","=","job_names",",","task_ids","=","task_ids",",","task_attempts","=","task_attempts",",","labels","=","labels",",","create_time_min","=","create_time_min",",","create_time_max","=","create_time_max",",","max_tasks","=","max_tasks",",","page_size","=","page_size",")","# Yield formatted tasks.","for","task","in","tasks_generator",":","yield","output_formatter",".","prepare_row","(","task",",","True",",","summary_output",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dstat.py#L314-L365"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"get_credentials","parameters":"(args)","argument_list":"","return_statement":"return provider_base.credentials_from_args(args)","docstring":"Returns credentials for API requests.","docstring_summary":"Returns credentials for API requests.","docstring_tokens":["Returns","credentials","for","API","requests","."],"function":"def get_credentials(args):\n \"\"\"Returns credentials for API requests.\"\"\"\n\n # Across dsub, dstat, ddel, defer to the provider for credentials handling\n return provider_base.credentials_from_args(args)","function_tokens":["def","get_credentials","(","args",")",":","# Across dsub, dstat, ddel, defer to the provider for credentials handling","return","provider_base",".","credentials_from_args","(","args",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L176-L180"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_check_private_address","parameters":"(args)","argument_list":"","return_statement":"","docstring":"If --use-private-address is enabled, ensure the Docker path is for GCR.","docstring_summary":"If --use-private-address is enabled, ensure the Docker path is for GCR.","docstring_tokens":["If","--","use","-","private","-","address","is","enabled","ensure","the","Docker","path","is","for","GCR","."],"function":"def _check_private_address(args):\n \"\"\"If --use-private-address is enabled, ensure the Docker path is for GCR.\"\"\"\n if args.use_private_address:\n image = args.image or DEFAULT_IMAGE\n split = image.split('\/', 1)\n if len(split) == 1 or not split[0].endswith('gcr.io'):\n raise ValueError(\n '--use-private-address must specify a --image with a gcr.io host')","function_tokens":["def","_check_private_address","(","args",")",":","if","args",".","use_private_address",":","image","=","args",".","image","or","DEFAULT_IMAGE","split","=","image",".","split","(","'\/'",",","1",")","if","len","(","split",")","==","1","or","not","split","[","0","]",".","endswith","(","'gcr.io'",")",":","raise","ValueError","(","'--use-private-address must specify a --image with a gcr.io host'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L183-L190"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_check_nvidia_driver_version","parameters":"(args)","argument_list":"","return_statement":"","docstring":"If --nvidia-driver-version is set, warn that it is ignored.","docstring_summary":"If --nvidia-driver-version is set, warn that it is ignored.","docstring_tokens":["If","--","nvidia","-","driver","-","version","is","set","warn","that","it","is","ignored","."],"function":"def _check_nvidia_driver_version(args):\n \"\"\"If --nvidia-driver-version is set, warn that it is ignored.\"\"\"\n if args.nvidia_driver_version:\n print('***WARNING: The --nvidia-driver-version flag is deprecated and will '\n 'be ignored.')","function_tokens":["def","_check_nvidia_driver_version","(","args",")",":","if","args",".","nvidia_driver_version",":","print","(","'***WARNING: The --nvidia-driver-version flag is deprecated and will '","'be ignored.'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L193-L197"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_google_cls_v2_parse_arguments","parameters":"(args)","argument_list":"","return_statement":"","docstring":"Validated google-cls-v2 arguments.","docstring_summary":"Validated google-cls-v2 arguments.","docstring_tokens":["Validated","google","-","cls","-","v2","arguments","."],"function":"def _google_cls_v2_parse_arguments(args):\n \"\"\"Validated google-cls-v2 arguments.\"\"\"\n\n # For the google-cls-v2 provider, the addition of the \"--location\" parameter,\n # along with a default (us-central1), we can just default everything.\n\n # So we only need to validate that there is not both a region and zone.\n if (args.zones and args.regions):\n raise ValueError('At most one of --regions and --zones may be specified')\n\n if args.machine_type and (args.min_cores or args.min_ram):\n raise ValueError(\n '--machine-type not supported together with --min-cores or --min-ram.')\n\n _check_private_address(args)\n _check_nvidia_driver_version(args)","function_tokens":["def","_google_cls_v2_parse_arguments","(","args",")",":","# For the google-cls-v2 provider, the addition of the \"--location\" parameter,","# along with a default (us-central1), we can just default everything.","# So we only need to validate that there is not both a region and zone.","if","(","args",".","zones","and","args",".","regions",")",":","raise","ValueError","(","'At most one of --regions and --zones may be specified'",")","if","args",".","machine_type","and","(","args",".","min_cores","or","args",".","min_ram",")",":","raise","ValueError","(","'--machine-type not supported together with --min-cores or --min-ram.'",")","_check_private_address","(","args",")","_check_nvidia_driver_version","(","args",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L200-L215"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_google_v2_parse_arguments","parameters":"(args)","argument_list":"","return_statement":"","docstring":"Validated google-v2 arguments.","docstring_summary":"Validated google-v2 arguments.","docstring_tokens":["Validated","google","-","v2","arguments","."],"function":"def _google_v2_parse_arguments(args):\n \"\"\"Validated google-v2 arguments.\"\"\"\n if (args.zones and args.regions) or (not args.zones and not args.regions):\n raise ValueError('Exactly one of --regions and --zones must be specified')\n\n if args.machine_type and (args.min_cores or args.min_ram):\n raise ValueError(\n '--machine-type not supported together with --min-cores or --min-ram.')\n\n _check_private_address(args)\n _check_nvidia_driver_version(args)","function_tokens":["def","_google_v2_parse_arguments","(","args",")",":","if","(","args",".","zones","and","args",".","regions",")","or","(","not","args",".","zones","and","not","args",".","regions",")",":","raise","ValueError","(","'Exactly one of --regions and --zones must be specified'",")","if","args",".","machine_type","and","(","args",".","min_cores","or","args",".","min_ram",")",":","raise","ValueError","(","'--machine-type not supported together with --min-cores or --min-ram.'",")","_check_private_address","(","args",")","_check_nvidia_driver_version","(","args",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L218-L228"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_local_parse_arguments","parameters":"(args)","argument_list":"","return_statement":"","docstring":"Validated local arguments.","docstring_summary":"Validated local arguments.","docstring_tokens":["Validated","local","arguments","."],"function":"def _local_parse_arguments(args):\n \"\"\"Validated local arguments.\"\"\"\n if args.user and args.user != dsub_util.get_os_user():\n raise ValueError('If specified, the local provider\\'s \"--user\" flag must '\n 'match the current logged-in user.')","function_tokens":["def","_local_parse_arguments","(","args",")",":","if","args",".","user","and","args",".","user","!=","dsub_util",".","get_os_user","(",")",":","raise","ValueError","(","'If specified, the local provider\\'s \"--user\" flag must '","'match the current logged-in user.'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L231-L235"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_parse_arguments","parameters":"(prog, argv)","argument_list":"","return_statement":"return args","docstring":"Parses command line arguments.\n\n Args:\n prog: The path of the program (dsub.py) or an alternate program name to\n display in usage.\n argv: The list of program arguments to parse.\n\n Returns:\n A Namespace of parsed arguments.","docstring_summary":"Parses command line arguments.","docstring_tokens":["Parses","command","line","arguments","."],"function":"def _parse_arguments(prog, argv):\n \"\"\"Parses command line arguments.\n\n Args:\n prog: The path of the program (dsub.py) or an alternate program name to\n display in usage.\n argv: The list of program arguments to parse.\n\n Returns:\n A Namespace of parsed arguments.\n \"\"\"\n # Handle version flag and exit if it was passed.\n param_util.handle_version_flag()\n\n parser = provider_base.create_parser(prog)\n\n # Add dsub core job submission arguments\n parser.add_argument(\n '--version', '-v', default=False, help='Print the dsub version and exit.')\n\n parser.add_argument(\n '--unique-job-id',\n default=False,\n action='store_true',\n help=\"\"\"Experimental: create a unique 32 character UUID for the dsub\n job-id using https:\/\/docs.python.org\/3\/library\/uuid.html.\n (default: False)\"\"\")\n parser.add_argument(\n '--name',\n help=\"\"\"Name for the job. Defaults to the script name or\n first token of the --command if specified.\"\"\")\n parser.add_argument(\n '--tasks',\n nargs='*',\n action=TaskParamAction,\n help=\"\"\"Path to a file of tab separated values (TSV) for task parameters.\n The file may be located in the local filesystem or in a Google Cloud\n Storage bucket.\n\n The first line is a list of column headers specifying an --env,\n --input, --input-recursive, --output or --output-recursive variable,\n and each subsequent line specifies the values for a task.\n\n Optionally specify tasks from the file to submit. Can take the form\n \"m\", \"m-\", or \"m-n\" where m and n are task numbers starting at 1.\n (default: None)\"\"\",\n metavar='FILE M-N')\n parser.add_argument(\n '--image',\n # Defaults to None so we can emit a warning if not specified\n # Will later on be set to DEFAULT_IMAGE\n default=None,\n help=\"\"\"Image name from Docker Hub, Google Container Repository, or other\n Docker image service. The task must have READ access to the\n image. (default: {})\"\"\".format(DEFAULT_IMAGE))\n parser.add_argument(\n '--dry-run',\n default=False,\n action='store_true',\n help='Print the task(s) that would be run and then exit. (default: False)'\n )\n parser.add_argument(\n '--command',\n help=\"\"\"Command to run inside the job\\'s Docker container. This argument\n or the --script argument must be provided.\"\"\",\n metavar='COMMAND')\n parser.add_argument(\n '--script',\n help=\"\"\"Path to a script that is located in the local file system or\n inside a Google Cloud Storage bucket. This script will be run inside\n the job\\'s Docker container. This argument or the --command\n argument must be provided.\"\"\",\n metavar='SCRIPT')\n parser.add_argument(\n '--env',\n nargs='*',\n action=param_util.ListParamAction,\n default=[],\n help='Environment variables for the script\\'s execution environment',\n metavar='KEY=VALUE')\n parser.add_argument(\n '--label',\n nargs='*',\n action=param_util.ListParamAction,\n default=[],\n help='Labels to associate to the job.',\n metavar='KEY=VALUE')\n parser.add_argument(\n '--input',\n nargs='*',\n action=param_util.ListParamAction,\n default=[],\n help=\"\"\"Input path arguments to localize into the script's execution\n environment\"\"\",\n metavar='KEY=REMOTE_PATH')\n parser.add_argument(\n '--input-recursive',\n nargs='*',\n action=param_util.ListParamAction,\n default=[],\n help=\"\"\"Input path arguments to localize recursively into the script\\'s\n execution environment\"\"\",\n metavar='KEY=REMOTE_PATH')\n parser.add_argument(\n '--output',\n nargs='*',\n action=param_util.ListParamAction,\n default=[],\n help=\"\"\"Output path arguments to de-localize from the script\\'s execution\n environment\"\"\",\n metavar='KEY=REMOTE_PATH')\n parser.add_argument(\n '--output-recursive',\n nargs='*',\n action=param_util.ListParamAction,\n default=[],\n help=\"\"\"Output path arguments to de-localize recursively from the script's\n execution environment\"\"\",\n metavar='KEY=REMOTE_PATH')\n parser.add_argument(\n '--user',\n '-u',\n help='User submitting the dsub job, defaults to the current OS user.')\n parser.add_argument(\n '--user-project',\n help=\"\"\"Specify a user project to be billed for all requests to Google\n Cloud Storage (logging, localization, delocalization). This flag exists\n to support accessing Requester Pays buckets (default: None)\"\"\")\n parser.add_argument(\n '--mount',\n nargs='*',\n action=param_util.ListParamAction,\n default=[],\n help=\"\"\"Mount a resource such as a bucket, disk, or directory into your\n Docker container\"\"\",\n metavar='KEY=PATH_SPEC')\n\n # Add dsub job management arguments\n parser.add_argument(\n '--wait',\n action='store_true',\n help='Wait for the job to finish all its tasks. (default: False)')\n parser.add_argument(\n '--retries',\n default=0,\n type=int,\n help='Number of retries to perform on failed tasks. (default: 0)')\n parser.add_argument(\n '--poll-interval',\n default=10,\n type=int,\n help='Polling interval (in seconds) for checking job status '\n 'when --wait or --after are set. (default: 10)')\n parser.add_argument(\n '--after',\n nargs='+',\n default=[],\n help='Job ID(s) to wait for before starting this job.')\n parser.add_argument(\n '--skip',\n default=False,\n action='store_true',\n help=\"\"\"Do not submit the job if all output specified using the --output\n and --output-recursive parameters already exist. Note that wildcard\n and recursive outputs cannot be strictly verified. See the\n documentation for details. (default: False)\"\"\")\n parser.add_argument(\n '--summary',\n default=False,\n action='store_true',\n help=\"\"\"During the --wait loop, display a summary of the results,\n grouped by (job, status). (default: False)\"\"\")\n\n # Add dsub resource requirement arguments\n parser.add_argument(\n '--min-cores',\n type=int,\n help=\"\"\"Minimum CPU cores for each job. The default is provider-specific.\n The google-v2 provider default is 1 core.\n The local provider does not allocate resources, but uses available\n resources of your machine.\"\"\")\n parser.add_argument(\n '--min-ram',\n type=float,\n help=\"\"\"Minimum RAM per job in GB. The default is provider-specific.\n The google-v2 provider default is 3.75 GB.\n The local provider does not allocate resources, but uses available\n resources of your machine.\"\"\")\n parser.add_argument(\n '--disk-size',\n default=job_model.DEFAULT_DISK_SIZE,\n type=int,\n help='Size (in GB) of data disk to attach for each job (default: {})'\n .format(job_model.DEFAULT_DISK_SIZE))\n\n parser.add_argument(\n '--logging',\n help='Cloud Storage path to send logging output'\n ' (either a folder, or file ending in \".log\")')\n\n # Add provider-specific arguments\n\n # Shared between the \"google-cls-v2\" and \"google-v2\" providers\n google_common = parser.add_argument_group(\n title='google-common',\n description=\"\"\"Options common to the \"google-cls-v2\" and \"google-v2\"\n providers\"\"\")\n google_common.add_argument(\n '--project', help='Cloud project ID in which to run the job')\n google_common.add_argument(\n '--boot-disk-size',\n default=job_model.DEFAULT_BOOT_DISK_SIZE,\n type=int,\n help='Size (in GB) of the boot disk (default: {})'.format(\n job_model.DEFAULT_BOOT_DISK_SIZE))\n google_common.add_argument(\n '--preemptible',\n const=param_util.preemptile_param_type(True),\n default=param_util.preemptile_param_type(False),\n nargs='?', # Be careful if we ever add positional arguments\n type=param_util.preemptile_param_type,\n help=\"\"\"If --preemptible is given without a number, enables preemptible\n VMs for all attempts for all tasks. If a number value N is used,\n enables preemptible VMs for up to N attempts for each task.\n Defaults to not using preemptible VMs.\"\"\")\n google_common.add_argument(\n '--zones', nargs='+', help='List of Google Compute Engine zones.')\n google_common.add_argument(\n '--scopes',\n nargs='+',\n help=\"\"\"Space-separated scopes for Google Compute Engine instances.\n If unspecified, provider will use '%s'\"\"\" % ','.join(\n google_base.DEFAULT_SCOPES))\n google_common.add_argument(\n '--accelerator-type',\n help=\"\"\"The Compute Engine accelerator type. See\n https:\/\/cloud.google.com\/compute\/docs\/gpus\/ for supported GPU types.\n\n Only NVIDIA GPU accelerators are currently supported. If an NVIDIA GPU\n is attached, the required runtime libraries will be made available to\n all containers under \/usr\/local\/nvidia.\n\n Each version of Container-Optimized OS image (used by the Pipelines\n API) has a default supported NVIDIA GPU driver version. See\n https:\/\/cloud.google.com\/container-optimized-os\/docs\/how-to\/run-gpus#install\n\n Note that attaching a GPU increases the worker VM startup time by a\n few minutes. (default: None)\"\"\")\n google_common.add_argument(\n '--accelerator-count',\n type=int,\n default=0,\n help=\"\"\"The number of accelerators of the specified type to attach.\n By specifying this parameter, you will download and install the\n following third-party software onto your job's Compute Engine\n instances: NVIDIA(R) Tesla(R) drivers and NVIDIA(R) CUDA toolkit.\n (default: 0)\"\"\")\n google_common.add_argument(\n '--credentials-file',\n type=str,\n help='Path to a local file with JSON credentials for a service account.')\n google_common.add_argument(\n '--regions',\n nargs='+',\n help=\"\"\"List of Google Compute Engine regions.\n Only one of --zones and --regions may be specified.\"\"\")\n google_common.add_argument(\n '--machine-type', help='Provider-specific machine type (default: None)')\n google_common.add_argument(\n '--cpu-platform',\n help=\"\"\"The CPU platform to request. Supported values can be found at\n https:\/\/cloud.google.com\/compute\/docs\/instances\/specify-min-cpu-platform\n (default: None)\"\"\")\n google_common.add_argument(\n '--network',\n help=\"\"\"The Compute Engine VPC network name to attach the VM's network\n interface to. The value will be prefixed with global\/networks\/ unless\n it contains a \/, in which case it is assumed to be a fully specified\n network resource URL. (default: None)\"\"\")\n google_common.add_argument(\n '--subnetwork',\n help=\"\"\"The name of the Compute Engine subnetwork to attach the instance\n to. (default: None)\"\"\")\n google_common.add_argument(\n '--use-private-address',\n default=False,\n action='store_true',\n help=\"\"\"If set to true, do not attach a public IP address to the VM.\n (default: False)\"\"\")\n google_common.add_argument(\n '--timeout',\n help=\"\"\"The maximum amount of time to give the task to complete.\n This includes the time spent waiting for a worker to be allocated.\n Time can be listed using a number followed by a unit. Supported units\n are s (seconds), m (minutes), h (hours), d (days), w (weeks). The\n provider-specific default is 7 days. Example: '7d' (7 days).\"\"\")\n google_common.add_argument(\n '--log-interval',\n help=\"\"\"The amount of time to sleep between copies of log files from\n the task to the logging path.\n Time can be listed using a number followed by a unit. Supported units\n are s (seconds), m (minutes), h (hours).\n Example: '5m' (5 minutes). Default is '1m'.\"\"\")\n google_common.add_argument(\n '--ssh',\n default=False,\n action='store_true',\n help=\"\"\"If set to true, start an ssh container in the background\n to allow you to log in using SSH and debug in real time.\n (default: False)\"\"\")\n google_common.add_argument('--nvidia-driver-version', help=argparse.SUPPRESS)\n google_common.add_argument(\n '--service-account',\n type=str,\n help=\"\"\"Email address of the service account to be authorized on the\n Compute Engine VM for each job task. If not specified, the default\n Compute Engine service account for the project will be used.\"\"\")\n google_common.add_argument(\n '--disk-type',\n help=\"\"\"\n The disk type to use for the data disk. Valid values are pd-standard\n pd-ssd and local-ssd. The default value is pd-standard.\"\"\")\n google_common.add_argument(\n '--enable-stackdriver-monitoring',\n default=False,\n action='store_true',\n help=\"\"\"If set to true, enables Stackdriver monitoring on the VM.\n (default: False)\"\"\")\n google_common.add_argument(\n '--block-external-network',\n default=False,\n action='store_true',\n help=\"\"\"If set to true, prevents the container for the user's\n script\/command from accessing the external network.\n (default: False)\"\"\")\n\n google_cls_v2 = parser.add_argument_group(\n title='\"google-cls-v2\" provider options',\n description='See also the \"google-common\" options listed above')\n google_cls_v2.add_argument(\n '--location',\n default=job_model.DEFAULT_LOCATION,\n help=\"\"\"Specifies the Google Cloud region to which the pipeline request\n will be sent and where operation metadata will be stored. The associated\n dsub task may be executed in another region if the --regions or --zones\n arguments are specified. (default: {})\"\"\".format(\n job_model.DEFAULT_LOCATION))\n\n args = provider_base.parse_args(\n parser, {\n 'google-cls-v2': ['project', 'logging'],\n 'google-v2': ['project', 'logging'],\n 'test-fails': [],\n 'local': ['logging'],\n }, argv)\n\n if args.provider == 'google-cls-v2':\n _google_cls_v2_parse_arguments(args)\n if args.provider == 'google-v2':\n _google_v2_parse_arguments(args)\n\n return args","function_tokens":["def","_parse_arguments","(","prog",",","argv",")",":","# Handle version flag and exit if it was passed.","param_util",".","handle_version_flag","(",")","parser","=","provider_base",".","create_parser","(","prog",")","# Add dsub core job submission arguments","parser",".","add_argument","(","'--version'",",","'-v'",",","default","=","False",",","help","=","'Print the dsub version and exit.'",")","parser",".","add_argument","(","'--unique-job-id'",",","default","=","False",",","action","=","'store_true'",",","help","=","\"\"\"Experimental: create a unique 32 character UUID for the dsub\n job-id using https:\/\/docs.python.org\/3\/library\/uuid.html.\n (default: False)\"\"\"",")","parser",".","add_argument","(","'--name'",",","help","=","\"\"\"Name for the job. Defaults to the script name or\n first token of the --command if specified.\"\"\"",")","parser",".","add_argument","(","'--tasks'",",","nargs","=","'*'",",","action","=","TaskParamAction",",","help","=","\"\"\"Path to a file of tab separated values (TSV) for task parameters.\n The file may be located in the local filesystem or in a Google Cloud\n Storage bucket.\n\n The first line is a list of column headers specifying an --env,\n --input, --input-recursive, --output or --output-recursive variable,\n and each subsequent line specifies the values for a task.\n\n Optionally specify tasks from the file to submit. Can take the form\n \"m\", \"m-\", or \"m-n\" where m and n are task numbers starting at 1.\n (default: None)\"\"\"",",","metavar","=","'FILE M-N'",")","parser",".","add_argument","(","'--image'",",","# Defaults to None so we can emit a warning if not specified","# Will later on be set to DEFAULT_IMAGE","default","=","None",",","help","=","\"\"\"Image name from Docker Hub, Google Container Repository, or other\n Docker image service. The task must have READ access to the\n image. (default: {})\"\"\"",".","format","(","DEFAULT_IMAGE",")",")","parser",".","add_argument","(","'--dry-run'",",","default","=","False",",","action","=","'store_true'",",","help","=","'Print the task(s) that would be run and then exit. (default: False)'",")","parser",".","add_argument","(","'--command'",",","help","=","\"\"\"Command to run inside the job\\'s Docker container. This argument\n or the --script argument must be provided.\"\"\"",",","metavar","=","'COMMAND'",")","parser",".","add_argument","(","'--script'",",","help","=","\"\"\"Path to a script that is located in the local file system or\n inside a Google Cloud Storage bucket. This script will be run inside\n the job\\'s Docker container. This argument or the --command\n argument must be provided.\"\"\"",",","metavar","=","'SCRIPT'",")","parser",".","add_argument","(","'--env'",",","nargs","=","'*'",",","action","=","param_util",".","ListParamAction",",","default","=","[","]",",","help","=","'Environment variables for the script\\'s execution environment'",",","metavar","=","'KEY=VALUE'",")","parser",".","add_argument","(","'--label'",",","nargs","=","'*'",",","action","=","param_util",".","ListParamAction",",","default","=","[","]",",","help","=","'Labels to associate to the job.'",",","metavar","=","'KEY=VALUE'",")","parser",".","add_argument","(","'--input'",",","nargs","=","'*'",",","action","=","param_util",".","ListParamAction",",","default","=","[","]",",","help","=","\"\"\"Input path arguments to localize into the script's execution\n environment\"\"\"",",","metavar","=","'KEY=REMOTE_PATH'",")","parser",".","add_argument","(","'--input-recursive'",",","nargs","=","'*'",",","action","=","param_util",".","ListParamAction",",","default","=","[","]",",","help","=","\"\"\"Input path arguments to localize recursively into the script\\'s\n execution environment\"\"\"",",","metavar","=","'KEY=REMOTE_PATH'",")","parser",".","add_argument","(","'--output'",",","nargs","=","'*'",",","action","=","param_util",".","ListParamAction",",","default","=","[","]",",","help","=","\"\"\"Output path arguments to de-localize from the script\\'s execution\n environment\"\"\"",",","metavar","=","'KEY=REMOTE_PATH'",")","parser",".","add_argument","(","'--output-recursive'",",","nargs","=","'*'",",","action","=","param_util",".","ListParamAction",",","default","=","[","]",",","help","=","\"\"\"Output path arguments to de-localize recursively from the script's\n execution environment\"\"\"",",","metavar","=","'KEY=REMOTE_PATH'",")","parser",".","add_argument","(","'--user'",",","'-u'",",","help","=","'User submitting the dsub job, defaults to the current OS user.'",")","parser",".","add_argument","(","'--user-project'",",","help","=","\"\"\"Specify a user project to be billed for all requests to Google\n Cloud Storage (logging, localization, delocalization). This flag exists\n to support accessing Requester Pays buckets (default: None)\"\"\"",")","parser",".","add_argument","(","'--mount'",",","nargs","=","'*'",",","action","=","param_util",".","ListParamAction",",","default","=","[","]",",","help","=","\"\"\"Mount a resource such as a bucket, disk, or directory into your\n Docker container\"\"\"",",","metavar","=","'KEY=PATH_SPEC'",")","# Add dsub job management arguments","parser",".","add_argument","(","'--wait'",",","action","=","'store_true'",",","help","=","'Wait for the job to finish all its tasks. (default: False)'",")","parser",".","add_argument","(","'--retries'",",","default","=","0",",","type","=","int",",","help","=","'Number of retries to perform on failed tasks. (default: 0)'",")","parser",".","add_argument","(","'--poll-interval'",",","default","=","10",",","type","=","int",",","help","=","'Polling interval (in seconds) for checking job status '","'when --wait or --after are set. (default: 10)'",")","parser",".","add_argument","(","'--after'",",","nargs","=","'+'",",","default","=","[","]",",","help","=","'Job ID(s) to wait for before starting this job.'",")","parser",".","add_argument","(","'--skip'",",","default","=","False",",","action","=","'store_true'",",","help","=","\"\"\"Do not submit the job if all output specified using the --output\n and --output-recursive parameters already exist. Note that wildcard\n and recursive outputs cannot be strictly verified. See the\n documentation for details. (default: False)\"\"\"",")","parser",".","add_argument","(","'--summary'",",","default","=","False",",","action","=","'store_true'",",","help","=","\"\"\"During the --wait loop, display a summary of the results,\n grouped by (job, status). (default: False)\"\"\"",")","# Add dsub resource requirement arguments","parser",".","add_argument","(","'--min-cores'",",","type","=","int",",","help","=","\"\"\"Minimum CPU cores for each job. The default is provider-specific.\n The google-v2 provider default is 1 core.\n The local provider does not allocate resources, but uses available\n resources of your machine.\"\"\"",")","parser",".","add_argument","(","'--min-ram'",",","type","=","float",",","help","=","\"\"\"Minimum RAM per job in GB. The default is provider-specific.\n The google-v2 provider default is 3.75 GB.\n The local provider does not allocate resources, but uses available\n resources of your machine.\"\"\"",")","parser",".","add_argument","(","'--disk-size'",",","default","=","job_model",".","DEFAULT_DISK_SIZE",",","type","=","int",",","help","=","'Size (in GB) of data disk to attach for each job (default: {})'",".","format","(","job_model",".","DEFAULT_DISK_SIZE",")",")","parser",".","add_argument","(","'--logging'",",","help","=","'Cloud Storage path to send logging output'","' (either a folder, or file ending in \".log\")'",")","# Add provider-specific arguments","# Shared between the \"google-cls-v2\" and \"google-v2\" providers","google_common","=","parser",".","add_argument_group","(","title","=","'google-common'",",","description","=","\"\"\"Options common to the \"google-cls-v2\" and \"google-v2\"\n providers\"\"\"",")","google_common",".","add_argument","(","'--project'",",","help","=","'Cloud project ID in which to run the job'",")","google_common",".","add_argument","(","'--boot-disk-size'",",","default","=","job_model",".","DEFAULT_BOOT_DISK_SIZE",",","type","=","int",",","help","=","'Size (in GB) of the boot disk (default: {})'",".","format","(","job_model",".","DEFAULT_BOOT_DISK_SIZE",")",")","google_common",".","add_argument","(","'--preemptible'",",","const","=","param_util",".","preemptile_param_type","(","True",")",",","default","=","param_util",".","preemptile_param_type","(","False",")",",","nargs","=","'?'",",","# Be careful if we ever add positional arguments","type","=","param_util",".","preemptile_param_type",",","help","=","\"\"\"If --preemptible is given without a number, enables preemptible\n VMs for all attempts for all tasks. If a number value N is used,\n enables preemptible VMs for up to N attempts for each task.\n Defaults to not using preemptible VMs.\"\"\"",")","google_common",".","add_argument","(","'--zones'",",","nargs","=","'+'",",","help","=","'List of Google Compute Engine zones.'",")","google_common",".","add_argument","(","'--scopes'",",","nargs","=","'+'",",","help","=","\"\"\"Space-separated scopes for Google Compute Engine instances.\n If unspecified, provider will use '%s'\"\"\"","%","','",".","join","(","google_base",".","DEFAULT_SCOPES",")",")","google_common",".","add_argument","(","'--accelerator-type'",",","help","=","\"\"\"The Compute Engine accelerator type. See\n https:\/\/cloud.google.com\/compute\/docs\/gpus\/ for supported GPU types.\n\n Only NVIDIA GPU accelerators are currently supported. If an NVIDIA GPU\n is attached, the required runtime libraries will be made available to\n all containers under \/usr\/local\/nvidia.\n\n Each version of Container-Optimized OS image (used by the Pipelines\n API) has a default supported NVIDIA GPU driver version. See\n https:\/\/cloud.google.com\/container-optimized-os\/docs\/how-to\/run-gpus#install\n\n Note that attaching a GPU increases the worker VM startup time by a\n few minutes. (default: None)\"\"\"",")","google_common",".","add_argument","(","'--accelerator-count'",",","type","=","int",",","default","=","0",",","help","=","\"\"\"The number of accelerators of the specified type to attach.\n By specifying this parameter, you will download and install the\n following third-party software onto your job's Compute Engine\n instances: NVIDIA(R) Tesla(R) drivers and NVIDIA(R) CUDA toolkit.\n (default: 0)\"\"\"",")","google_common",".","add_argument","(","'--credentials-file'",",","type","=","str",",","help","=","'Path to a local file with JSON credentials for a service account.'",")","google_common",".","add_argument","(","'--regions'",",","nargs","=","'+'",",","help","=","\"\"\"List of Google Compute Engine regions.\n Only one of --zones and --regions may be specified.\"\"\"",")","google_common",".","add_argument","(","'--machine-type'",",","help","=","'Provider-specific machine type (default: None)'",")","google_common",".","add_argument","(","'--cpu-platform'",",","help","=","\"\"\"The CPU platform to request. Supported values can be found at\n https:\/\/cloud.google.com\/compute\/docs\/instances\/specify-min-cpu-platform\n (default: None)\"\"\"",")","google_common",".","add_argument","(","'--network'",",","help","=","\"\"\"The Compute Engine VPC network name to attach the VM's network\n interface to. The value will be prefixed with global\/networks\/ unless\n it contains a \/, in which case it is assumed to be a fully specified\n network resource URL. (default: None)\"\"\"",")","google_common",".","add_argument","(","'--subnetwork'",",","help","=","\"\"\"The name of the Compute Engine subnetwork to attach the instance\n to. (default: None)\"\"\"",")","google_common",".","add_argument","(","'--use-private-address'",",","default","=","False",",","action","=","'store_true'",",","help","=","\"\"\"If set to true, do not attach a public IP address to the VM.\n (default: False)\"\"\"",")","google_common",".","add_argument","(","'--timeout'",",","help","=","\"\"\"The maximum amount of time to give the task to complete.\n This includes the time spent waiting for a worker to be allocated.\n Time can be listed using a number followed by a unit. Supported units\n are s (seconds), m (minutes), h (hours), d (days), w (weeks). The\n provider-specific default is 7 days. Example: '7d' (7 days).\"\"\"",")","google_common",".","add_argument","(","'--log-interval'",",","help","=","\"\"\"The amount of time to sleep between copies of log files from\n the task to the logging path.\n Time can be listed using a number followed by a unit. Supported units\n are s (seconds), m (minutes), h (hours).\n Example: '5m' (5 minutes). Default is '1m'.\"\"\"",")","google_common",".","add_argument","(","'--ssh'",",","default","=","False",",","action","=","'store_true'",",","help","=","\"\"\"If set to true, start an ssh container in the background\n to allow you to log in using SSH and debug in real time.\n (default: False)\"\"\"",")","google_common",".","add_argument","(","'--nvidia-driver-version'",",","help","=","argparse",".","SUPPRESS",")","google_common",".","add_argument","(","'--service-account'",",","type","=","str",",","help","=","\"\"\"Email address of the service account to be authorized on the\n Compute Engine VM for each job task. If not specified, the default\n Compute Engine service account for the project will be used.\"\"\"",")","google_common",".","add_argument","(","'--disk-type'",",","help","=","\"\"\"\n The disk type to use for the data disk. Valid values are pd-standard\n pd-ssd and local-ssd. The default value is pd-standard.\"\"\"",")","google_common",".","add_argument","(","'--enable-stackdriver-monitoring'",",","default","=","False",",","action","=","'store_true'",",","help","=","\"\"\"If set to true, enables Stackdriver monitoring on the VM.\n (default: False)\"\"\"",")","google_common",".","add_argument","(","'--block-external-network'",",","default","=","False",",","action","=","'store_true'",",","help","=","\"\"\"If set to true, prevents the container for the user's\n script\/command from accessing the external network.\n (default: False)\"\"\"",")","google_cls_v2","=","parser",".","add_argument_group","(","title","=","'\"google-cls-v2\" provider options'",",","description","=","'See also the \"google-common\" options listed above'",")","google_cls_v2",".","add_argument","(","'--location'",",","default","=","job_model",".","DEFAULT_LOCATION",",","help","=","\"\"\"Specifies the Google Cloud region to which the pipeline request\n will be sent and where operation metadata will be stored. The associated\n dsub task may be executed in another region if the --regions or --zones\n arguments are specified. (default: {})\"\"\"",".","format","(","job_model",".","DEFAULT_LOCATION",")",")","args","=","provider_base",".","parse_args","(","parser",",","{","'google-cls-v2'",":","[","'project'",",","'logging'","]",",","'google-v2'",":","[","'project'",",","'logging'","]",",","'test-fails'",":","[","]",",","'local'",":","[","'logging'","]",",","}",",","argv",")","if","args",".","provider","==","'google-cls-v2'",":","_google_cls_v2_parse_arguments","(","args",")","if","args",".","provider","==","'google-v2'",":","_google_v2_parse_arguments","(","args",")","return","args"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L238-L599"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_get_job_resources","parameters":"(args)","argument_list":"","return_statement":"return job_model.Resources(\n min_cores=args.min_cores,\n min_ram=args.min_ram,\n machine_type=args.machine_type,\n disk_size=args.disk_size,\n disk_type=args.disk_type,\n boot_disk_size=args.boot_disk_size,\n image=args.image,\n regions=args.regions,\n zones=args.zones,\n logging=logging,\n logging_path=None,\n service_account=args.service_account,\n scopes=args.scopes,\n cpu_platform=args.cpu_platform,\n network=args.network,\n subnetwork=args.subnetwork,\n use_private_address=args.use_private_address,\n accelerator_type=args.accelerator_type,\n accelerator_count=args.accelerator_count,\n nvidia_driver_version=None,\n timeout=timeout,\n log_interval=log_interval,\n ssh=args.ssh,\n enable_stackdriver_monitoring=args.enable_stackdriver_monitoring,\n max_retries=args.retries,\n max_preemptible_attempts=args.preemptible,\n block_external_network=args.block_external_network)","docstring":"Extract job-global resources requirements from input args.\n\n Args:\n args: parsed command-line arguments\n\n Returns:\n Resources object containing the requested resources for the job","docstring_summary":"Extract job-global resources requirements from input args.","docstring_tokens":["Extract","job","-","global","resources","requirements","from","input","args","."],"function":"def _get_job_resources(args):\n \"\"\"Extract job-global resources requirements from input args.\n\n Args:\n args: parsed command-line arguments\n\n Returns:\n Resources object containing the requested resources for the job\n \"\"\"\n logging = param_util.build_logging_param(\n args.logging) if args.logging else None\n timeout = param_util.timeout_in_seconds(args.timeout)\n log_interval = param_util.log_interval_in_seconds(args.log_interval)\n\n return job_model.Resources(\n min_cores=args.min_cores,\n min_ram=args.min_ram,\n machine_type=args.machine_type,\n disk_size=args.disk_size,\n disk_type=args.disk_type,\n boot_disk_size=args.boot_disk_size,\n image=args.image,\n regions=args.regions,\n zones=args.zones,\n logging=logging,\n logging_path=None,\n service_account=args.service_account,\n scopes=args.scopes,\n cpu_platform=args.cpu_platform,\n network=args.network,\n subnetwork=args.subnetwork,\n use_private_address=args.use_private_address,\n accelerator_type=args.accelerator_type,\n accelerator_count=args.accelerator_count,\n nvidia_driver_version=None,\n timeout=timeout,\n log_interval=log_interval,\n ssh=args.ssh,\n enable_stackdriver_monitoring=args.enable_stackdriver_monitoring,\n max_retries=args.retries,\n max_preemptible_attempts=args.preemptible,\n block_external_network=args.block_external_network)","function_tokens":["def","_get_job_resources","(","args",")",":","logging","=","param_util",".","build_logging_param","(","args",".","logging",")","if","args",".","logging","else","None","timeout","=","param_util",".","timeout_in_seconds","(","args",".","timeout",")","log_interval","=","param_util",".","log_interval_in_seconds","(","args",".","log_interval",")","return","job_model",".","Resources","(","min_cores","=","args",".","min_cores",",","min_ram","=","args",".","min_ram",",","machine_type","=","args",".","machine_type",",","disk_size","=","args",".","disk_size",",","disk_type","=","args",".","disk_type",",","boot_disk_size","=","args",".","boot_disk_size",",","image","=","args",".","image",",","regions","=","args",".","regions",",","zones","=","args",".","zones",",","logging","=","logging",",","logging_path","=","None",",","service_account","=","args",".","service_account",",","scopes","=","args",".","scopes",",","cpu_platform","=","args",".","cpu_platform",",","network","=","args",".","network",",","subnetwork","=","args",".","subnetwork",",","use_private_address","=","args",".","use_private_address",",","accelerator_type","=","args",".","accelerator_type",",","accelerator_count","=","args",".","accelerator_count",",","nvidia_driver_version","=","None",",","timeout","=","timeout",",","log_interval","=","log_interval",",","ssh","=","args",".","ssh",",","enable_stackdriver_monitoring","=","args",".","enable_stackdriver_monitoring",",","max_retries","=","args",".","retries",",","max_preemptible_attempts","=","args",".","preemptible",",","block_external_network","=","args",".","block_external_network",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L602-L643"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_get_job_metadata","parameters":"(provider, user_id, job_name, script, task_ids,\n user_project, unique_job_id)","argument_list":"","return_statement":"return job_metadata","docstring":"Allow provider to extract job-specific metadata from command-line args.\n\n Args:\n provider: job service provider\n user_id: user submitting the job\n job_name: name for the job\n script: the script to run\n task_ids: a set of the task-ids for all tasks in the job\n user_project: name of the project to be billed for the request\n unique_job_id: generate a unique job id\n\n Returns:\n A dictionary of job-specific metadata (such as job id, name, etc.)","docstring_summary":"Allow provider to extract job-specific metadata from command-line args.","docstring_tokens":["Allow","provider","to","extract","job","-","specific","metadata","from","command","-","line","args","."],"function":"def _get_job_metadata(provider, user_id, job_name, script, task_ids,\n user_project, unique_job_id):\n \"\"\"Allow provider to extract job-specific metadata from command-line args.\n\n Args:\n provider: job service provider\n user_id: user submitting the job\n job_name: name for the job\n script: the script to run\n task_ids: a set of the task-ids for all tasks in the job\n user_project: name of the project to be billed for the request\n unique_job_id: generate a unique job id\n\n Returns:\n A dictionary of job-specific metadata (such as job id, name, etc.)\n \"\"\"\n create_time = dsub_util.replace_timezone(datetime.datetime.now(),\n dateutil.tz.tzlocal())\n user_id = user_id or dsub_util.get_os_user()\n job_metadata = provider.prepare_job_metadata(script.name, job_name, user_id)\n if unique_job_id:\n job_metadata['job-id'] = uuid.uuid4().hex\n else:\n # Build the job-id. We want the job-id to be expressive while also\n # having a low-likelihood of collisions.\n #\n # For expressiveness, we:\n # * use the job name (truncated at 10 characters).\n # * insert the user-id\n # * add a datetime value\n # To have a high likelihood of uniqueness, the datetime value is out to\n # hundredths of a second.\n #\n # The full job-id is:\n # ----\n job_metadata['job-id'] = '%s--%s--%s' % (\n job_metadata['job-name'][:10], job_metadata['user-id'],\n create_time.strftime('%y%m%d-%H%M%S-%f')[:16])\n\n job_metadata['create-time'] = create_time\n job_metadata['script'] = script\n job_metadata['user-project'] = user_project\n if task_ids:\n job_metadata['task-ids'] = dsub_util.compact_interval_string(list(task_ids))\n\n return job_metadata","function_tokens":["def","_get_job_metadata","(","provider",",","user_id",",","job_name",",","script",",","task_ids",",","user_project",",","unique_job_id",")",":","create_time","=","dsub_util",".","replace_timezone","(","datetime",".","datetime",".","now","(",")",",","dateutil",".","tz",".","tzlocal","(",")",")","user_id","=","user_id","or","dsub_util",".","get_os_user","(",")","job_metadata","=","provider",".","prepare_job_metadata","(","script",".","name",",","job_name",",","user_id",")","if","unique_job_id",":","job_metadata","[","'job-id'","]","=","uuid",".","uuid4","(",")",".","hex","else",":","# Build the job-id. We want the job-id to be expressive while also","# having a low-likelihood of collisions.","#","# For expressiveness, we:","# * use the job name (truncated at 10 characters).","# * insert the user-id","# * add a datetime value","# To have a high likelihood of uniqueness, the datetime value is out to","# hundredths of a second.","#","# The full job-id is:","# ----","job_metadata","[","'job-id'","]","=","'%s--%s--%s'","%","(","job_metadata","[","'job-name'","]","[",":","10","]",",","job_metadata","[","'user-id'","]",",","create_time",".","strftime","(","'%y%m%d-%H%M%S-%f'",")","[",":","16","]",")","job_metadata","[","'create-time'","]","=","create_time","job_metadata","[","'script'","]","=","script","job_metadata","[","'user-project'","]","=","user_project","if","task_ids",":","job_metadata","[","'task-ids'","]","=","dsub_util",".","compact_interval_string","(","list","(","task_ids",")",")","return","job_metadata"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L646-L691"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_resolve_task_logging","parameters":"(job_metadata, job_resources, task_descriptors)","argument_list":"","return_statement":"","docstring":"Resolve the logging path from job and task properties.\n\n Args:\n job_metadata: Job metadata, such as job-id, job-name, and user-id.\n job_resources: Resources specified such as ram, cpu, and logging path.\n task_descriptors: Task metadata, parameters, and resources.\n\n Resolve the logging path, which may have substitution parameters such as\n job-id, task-id, user-id, and job-name.","docstring_summary":"Resolve the logging path from job and task properties.","docstring_tokens":["Resolve","the","logging","path","from","job","and","task","properties","."],"function":"def _resolve_task_logging(job_metadata, job_resources, task_descriptors):\n \"\"\"Resolve the logging path from job and task properties.\n\n Args:\n job_metadata: Job metadata, such as job-id, job-name, and user-id.\n job_resources: Resources specified such as ram, cpu, and logging path.\n task_descriptors: Task metadata, parameters, and resources.\n\n Resolve the logging path, which may have substitution parameters such as\n job-id, task-id, user-id, and job-name.\n \"\"\"\n if not job_resources.logging:\n return\n\n for task_descriptor in task_descriptors:\n logging_uri = provider_base.format_logging_uri(\n job_resources.logging.uri, job_metadata, task_descriptor.task_metadata)\n logging_path = job_model.LoggingParam(logging_uri,\n job_resources.logging.file_provider)\n\n if task_descriptor.task_resources:\n task_descriptor.task_resources = task_descriptor.task_resources._replace(\n logging_path=logging_path)\n else:\n task_descriptor.task_resources = job_model.Resources(\n logging_path=logging_path)","function_tokens":["def","_resolve_task_logging","(","job_metadata",",","job_resources",",","task_descriptors",")",":","if","not","job_resources",".","logging",":","return","for","task_descriptor","in","task_descriptors",":","logging_uri","=","provider_base",".","format_logging_uri","(","job_resources",".","logging",".","uri",",","job_metadata",",","task_descriptor",".","task_metadata",")","logging_path","=","job_model",".","LoggingParam","(","logging_uri",",","job_resources",".","logging",".","file_provider",")","if","task_descriptor",".","task_resources",":","task_descriptor",".","task_resources","=","task_descriptor",".","task_resources",".","_replace","(","logging_path","=","logging_path",")","else",":","task_descriptor",".","task_resources","=","job_model",".","Resources","(","logging_path","=","logging_path",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L694-L719"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_resolve_preemptible","parameters":"(job_resources, task_descriptors)","argument_list":"","return_statement":"","docstring":"Resolve whether or not to use a preemptible machine.\n\n Args:\n job_resources: Resources specified such as max_preemptible_attempts.\n task_descriptors: Task metadata, parameters, and resources.","docstring_summary":"Resolve whether or not to use a preemptible machine.","docstring_tokens":["Resolve","whether","or","not","to","use","a","preemptible","machine","."],"function":"def _resolve_preemptible(job_resources, task_descriptors):\n \"\"\"Resolve whether or not to use a preemptible machine.\n\n Args:\n job_resources: Resources specified such as max_preemptible_attempts.\n task_descriptors: Task metadata, parameters, and resources.\n \"\"\"\n # Determine if the next attempt should be preemptible\n for task_descriptor in task_descriptors:\n # The original attempt is attempt number 1.\n # The first retry is attempt number 2.\n attempt_number = task_descriptor.task_metadata.get('task-attempt', 1)\n max_preemptible_attempts = job_resources.max_preemptible_attempts\n if max_preemptible_attempts:\n use_preemptible = max_preemptible_attempts.should_use_preemptible(\n attempt_number)\n else:\n use_preemptible = job_model.DEFAULT_PREEMPTIBLE\n task_descriptor.task_resources = task_descriptor.task_resources._replace(\n preemptible=use_preemptible)","function_tokens":["def","_resolve_preemptible","(","job_resources",",","task_descriptors",")",":","# Determine if the next attempt should be preemptible","for","task_descriptor","in","task_descriptors",":","# The original attempt is attempt number 1.","# The first retry is attempt number 2.","attempt_number","=","task_descriptor",".","task_metadata",".","get","(","'task-attempt'",",","1",")","max_preemptible_attempts","=","job_resources",".","max_preemptible_attempts","if","max_preemptible_attempts",":","use_preemptible","=","max_preemptible_attempts",".","should_use_preemptible","(","attempt_number",")","else",":","use_preemptible","=","job_model",".","DEFAULT_PREEMPTIBLE","task_descriptor",".","task_resources","=","task_descriptor",".","task_resources",".","_replace","(","preemptible","=","use_preemptible",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L722-L741"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_resolve_task_resources","parameters":"(job_metadata, job_resources, task_descriptors)","argument_list":"","return_statement":"","docstring":"Resolve task properties (such as the logging path) from job properties.\n\n Args:\n job_metadata: Job metadata, such as job-id, job-name, and user-id.\n job_resources: Resources specified such as ram, cpu, and logging path.\n task_descriptors: Task metadata, parameters, and resources. This function\n exists to be called at the point that all job properties have been\n validated and resolved. It is also called prior to re-trying a task.\n Right now we resolve two properties: 1) the logging path, which may have\n substitution parameters such as job-id, task-id, task-attempt, user-id, and\n job-name. and 2) preemptible, which depends on how many preemptible attempts\n we have done.","docstring_summary":"Resolve task properties (such as the logging path) from job properties.","docstring_tokens":["Resolve","task","properties","(","such","as","the","logging","path",")","from","job","properties","."],"function":"def _resolve_task_resources(job_metadata, job_resources, task_descriptors):\n \"\"\"Resolve task properties (such as the logging path) from job properties.\n\n Args:\n job_metadata: Job metadata, such as job-id, job-name, and user-id.\n job_resources: Resources specified such as ram, cpu, and logging path.\n task_descriptors: Task metadata, parameters, and resources. This function\n exists to be called at the point that all job properties have been\n validated and resolved. It is also called prior to re-trying a task.\n Right now we resolve two properties: 1) the logging path, which may have\n substitution parameters such as job-id, task-id, task-attempt, user-id, and\n job-name. and 2) preemptible, which depends on how many preemptible attempts\n we have done.\n \"\"\"\n _resolve_task_logging(job_metadata, job_resources, task_descriptors)\n _resolve_preemptible(job_resources, task_descriptors)","function_tokens":["def","_resolve_task_resources","(","job_metadata",",","job_resources",",","task_descriptors",")",":","_resolve_task_logging","(","job_metadata",",","job_resources",",","task_descriptors",")","_resolve_preemptible","(","job_resources",",","task_descriptors",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L744-L759"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_wait_after","parameters":"(provider, job_ids, poll_interval, stop_on_failure, summary)","argument_list":"","return_statement":"return error_messages","docstring":"Print status info as we wait for those jobs.\n\n Blocks until either all of the listed jobs succeed,\n or one of them fails.\n\n Args:\n provider: job service provider\n job_ids: a set of job IDs (string) to wait for\n poll_interval: integer seconds to wait between iterations\n stop_on_failure: whether to stop waiting if one of the tasks fails.\n summary: whether to output summary messages\n\n Returns:\n Empty list if there was no error,\n a list of error messages from the failed tasks otherwise.","docstring_summary":"Print status info as we wait for those jobs.","docstring_tokens":["Print","status","info","as","we","wait","for","those","jobs","."],"function":"def _wait_after(provider, job_ids, poll_interval, stop_on_failure, summary):\n \"\"\"Print status info as we wait for those jobs.\n\n Blocks until either all of the listed jobs succeed,\n or one of them fails.\n\n Args:\n provider: job service provider\n job_ids: a set of job IDs (string) to wait for\n poll_interval: integer seconds to wait between iterations\n stop_on_failure: whether to stop waiting if one of the tasks fails.\n summary: whether to output summary messages\n\n Returns:\n Empty list if there was no error,\n a list of error messages from the failed tasks otherwise.\n \"\"\"\n\n # Each time through the loop, the job_set is re-set to the jobs remaining to\n # check. Jobs are removed from the list when they complete.\n #\n # We exit the loop when:\n # * No jobs remain are running, OR\n # * stop_on_failure is TRUE AND at least one job returned an error\n\n # remove NO_JOB\n job_ids_to_check = {j for j in job_ids if j != dsub_util.NO_JOB}\n error_messages = []\n while job_ids_to_check and (not error_messages or not stop_on_failure):\n print('Waiting for: %s.' % (', '.join(job_ids_to_check)))\n\n # Poll until any remaining jobs have completed\n jobs_left = _wait_for_any_job(provider, job_ids_to_check, poll_interval,\n summary)\n\n # Calculate which jobs just completed\n jobs_completed = job_ids_to_check.difference(jobs_left)\n\n # Get all tasks for the newly completed jobs\n tasks_completed = provider.lookup_job_tasks({'*'}, job_ids=jobs_completed)\n\n # We don't want to overwhelm the user with output when there are many\n # tasks per job. So we get a single \"dominant\" task for each of the\n # completed jobs (one that is representative of the job's fate).\n dominant_job_tasks = _dominant_task_for_jobs(tasks_completed)\n if len(dominant_job_tasks) != len(jobs_completed):\n # print info about the jobs we couldn't find\n # (should only occur for \"--after\" where the job ID is a typo).\n jobs_found = dsub_util.tasks_to_job_ids(dominant_job_tasks)\n jobs_not_found = jobs_completed.difference(jobs_found)\n for j in jobs_not_found:\n error = '%s: not found' % j\n dsub_util.print_error(' %s' % error)\n error_messages += [error]\n\n # Print the dominant task for the completed jobs\n for t in dominant_job_tasks:\n job_id = t.get_field('job-id')\n status = t.get_field('task-status')\n print(' %s: %s' % (str(job_id), str(status)))\n if status in ['FAILURE', 'CANCELED']:\n error_messages += [provider.get_tasks_completion_messages([t])]\n\n job_ids_to_check = jobs_left\n\n return error_messages","function_tokens":["def","_wait_after","(","provider",",","job_ids",",","poll_interval",",","stop_on_failure",",","summary",")",":","# Each time through the loop, the job_set is re-set to the jobs remaining to","# check. Jobs are removed from the list when they complete.","#","# We exit the loop when:","# * No jobs remain are running, OR","# * stop_on_failure is TRUE AND at least one job returned an error","# remove NO_JOB","job_ids_to_check","=","{","j","for","j","in","job_ids","if","j","!=","dsub_util",".","NO_JOB","}","error_messages","=","[","]","while","job_ids_to_check","and","(","not","error_messages","or","not","stop_on_failure",")",":","print","(","'Waiting for: %s.'","%","(","', '",".","join","(","job_ids_to_check",")",")",")","# Poll until any remaining jobs have completed","jobs_left","=","_wait_for_any_job","(","provider",",","job_ids_to_check",",","poll_interval",",","summary",")","# Calculate which jobs just completed","jobs_completed","=","job_ids_to_check",".","difference","(","jobs_left",")","# Get all tasks for the newly completed jobs","tasks_completed","=","provider",".","lookup_job_tasks","(","{","'*'","}",",","job_ids","=","jobs_completed",")","# We don't want to overwhelm the user with output when there are many","# tasks per job. So we get a single \"dominant\" task for each of the","# completed jobs (one that is representative of the job's fate).","dominant_job_tasks","=","_dominant_task_for_jobs","(","tasks_completed",")","if","len","(","dominant_job_tasks",")","!=","len","(","jobs_completed",")",":","# print info about the jobs we couldn't find","# (should only occur for \"--after\" where the job ID is a typo).","jobs_found","=","dsub_util",".","tasks_to_job_ids","(","dominant_job_tasks",")","jobs_not_found","=","jobs_completed",".","difference","(","jobs_found",")","for","j","in","jobs_not_found",":","error","=","'%s: not found'","%","j","dsub_util",".","print_error","(","' %s'","%","error",")","error_messages","+=","[","error","]","# Print the dominant task for the completed jobs","for","t","in","dominant_job_tasks",":","job_id","=","t",".","get_field","(","'job-id'",")","status","=","t",".","get_field","(","'task-status'",")","print","(","' %s: %s'","%","(","str","(","job_id",")",",","str","(","status",")",")",")","if","status","in","[","'FAILURE'",",","'CANCELED'","]",":","error_messages","+=","[","provider",".","get_tasks_completion_messages","(","[","t","]",")","]","job_ids_to_check","=","jobs_left","return","error_messages"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L762-L827"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_wait_and_retry","parameters":"(provider, job_id, poll_interval, retries, job_descriptor,\n summary)","argument_list":"","return_statement":"","docstring":"Wait for job and retry any tasks that fail.\n\n Stops retrying an individual task when: it succeeds, is canceled, or has been\n retried \"retries\" times.\n\n This function exits when there are no tasks running and there are no tasks\n eligible to be retried.\n\n Args:\n provider: job service provider\n job_id: a single job ID (string) to wait for\n poll_interval: integer seconds to wait between iterations\n retries: number of retries\n job_descriptor: job descriptor used to originally submit job\n summary: whether to output summary messages\n\n Returns:\n Empty list if there was no error,\n a list containing an error message from a failed task otherwise.","docstring_summary":"Wait for job and retry any tasks that fail.","docstring_tokens":["Wait","for","job","and","retry","any","tasks","that","fail","."],"function":"def _wait_and_retry(provider, job_id, poll_interval, retries, job_descriptor,\n summary):\n \"\"\"Wait for job and retry any tasks that fail.\n\n Stops retrying an individual task when: it succeeds, is canceled, or has been\n retried \"retries\" times.\n\n This function exits when there are no tasks running and there are no tasks\n eligible to be retried.\n\n Args:\n provider: job service provider\n job_id: a single job ID (string) to wait for\n poll_interval: integer seconds to wait between iterations\n retries: number of retries\n job_descriptor: job descriptor used to originally submit job\n summary: whether to output summary messages\n\n Returns:\n Empty list if there was no error,\n a list containing an error message from a failed task otherwise.\n \"\"\"\n\n while True:\n formatted_tasks = []\n tasks = provider.lookup_job_tasks({'*'}, job_ids=[job_id])\n\n running_tasks = set()\n completed_tasks = set()\n canceled_tasks = set()\n fully_failed_tasks = set()\n task_fail_count = dict()\n\n # This is an arbitrary task that is either fully failed or canceled (with\n # preference for the former).\n message_task = None\n\n task_dict = dict()\n for t in tasks:\n task_id = t.get_field('task-id')\n if task_id is not None:\n task_id = int(task_id)\n\n task_dict[task_id] = t\n\n status = t.get_field('task-status')\n if status == 'FAILURE':\n # Could compute this from task-attempt as well.\n task_fail_count[task_id] = task_fail_count.get(task_id, 0) + 1\n if task_fail_count[task_id] > retries:\n fully_failed_tasks.add(task_id)\n message_task = t\n elif status == 'CANCELED':\n canceled_tasks.add(task_id)\n if not message_task:\n message_task = t\n elif status == 'SUCCESS':\n completed_tasks.add(task_id)\n elif status == 'RUNNING':\n running_tasks.add(task_id)\n\n if summary:\n formatted_tasks.append(\n output_formatter.prepare_row(t, full=False, summary=True))\n\n if summary:\n formatter = output_formatter.TextOutput(full=False)\n formatter.prepare_and_print_table(formatted_tasks, summary)\n\n retry_tasks = (\n set(task_fail_count).difference(fully_failed_tasks)\n .difference(running_tasks).difference(completed_tasks)\n .difference(canceled_tasks))\n\n # job completed.\n if not retry_tasks and not running_tasks:\n # If there are any fully failed tasks, return the completion message of an\n # arbitrary one.\n # If not, but there are canceled tasks, return the completion message of\n # an arbitrary one.\n if message_task:\n return [provider.get_tasks_completion_messages([message_task])]\n\n # Otherwise successful completion.\n return []\n\n for task_id in retry_tasks:\n identifier = '{}.{}'.format(job_id, task_id) if task_id else job_id\n print(' {} (attempt {}) failed. Retrying.'.format(\n identifier, task_fail_count[task_id]))\n msg = task_dict[task_id].get_field('status-message')\n print(' Failure message: ' + msg)\n\n _retry_task(provider, job_descriptor, task_id,\n task_fail_count[task_id] + 1)\n\n SLEEP_FUNCTION(poll_interval)","function_tokens":["def","_wait_and_retry","(","provider",",","job_id",",","poll_interval",",","retries",",","job_descriptor",",","summary",")",":","while","True",":","formatted_tasks","=","[","]","tasks","=","provider",".","lookup_job_tasks","(","{","'*'","}",",","job_ids","=","[","job_id","]",")","running_tasks","=","set","(",")","completed_tasks","=","set","(",")","canceled_tasks","=","set","(",")","fully_failed_tasks","=","set","(",")","task_fail_count","=","dict","(",")","# This is an arbitrary task that is either fully failed or canceled (with","# preference for the former).","message_task","=","None","task_dict","=","dict","(",")","for","t","in","tasks",":","task_id","=","t",".","get_field","(","'task-id'",")","if","task_id","is","not","None",":","task_id","=","int","(","task_id",")","task_dict","[","task_id","]","=","t","status","=","t",".","get_field","(","'task-status'",")","if","status","==","'FAILURE'",":","# Could compute this from task-attempt as well.","task_fail_count","[","task_id","]","=","task_fail_count",".","get","(","task_id",",","0",")","+","1","if","task_fail_count","[","task_id","]",">","retries",":","fully_failed_tasks",".","add","(","task_id",")","message_task","=","t","elif","status","==","'CANCELED'",":","canceled_tasks",".","add","(","task_id",")","if","not","message_task",":","message_task","=","t","elif","status","==","'SUCCESS'",":","completed_tasks",".","add","(","task_id",")","elif","status","==","'RUNNING'",":","running_tasks",".","add","(","task_id",")","if","summary",":","formatted_tasks",".","append","(","output_formatter",".","prepare_row","(","t",",","full","=","False",",","summary","=","True",")",")","if","summary",":","formatter","=","output_formatter",".","TextOutput","(","full","=","False",")","formatter",".","prepare_and_print_table","(","formatted_tasks",",","summary",")","retry_tasks","=","(","set","(","task_fail_count",")",".","difference","(","fully_failed_tasks",")",".","difference","(","running_tasks",")",".","difference","(","completed_tasks",")",".","difference","(","canceled_tasks",")",")","# job completed.","if","not","retry_tasks","and","not","running_tasks",":","# If there are any fully failed tasks, return the completion message of an","# arbitrary one.","# If not, but there are canceled tasks, return the completion message of","# an arbitrary one.","if","message_task",":","return","[","provider",".","get_tasks_completion_messages","(","[","message_task","]",")","]","# Otherwise successful completion.","return","[","]","for","task_id","in","retry_tasks",":","identifier","=","'{}.{}'",".","format","(","job_id",",","task_id",")","if","task_id","else","job_id","print","(","' {} (attempt {}) failed. Retrying.'",".","format","(","identifier",",","task_fail_count","[","task_id","]",")",")","msg","=","task_dict","[","task_id","]",".","get_field","(","'status-message'",")","print","(","' Failure message: '","+","msg",")","_retry_task","(","provider",",","job_descriptor",",","task_id",",","task_fail_count","[","task_id","]","+","1",")","SLEEP_FUNCTION","(","poll_interval",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L830-L926"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_retry_task","parameters":"(provider, job_descriptor, task_id, task_attempt)","argument_list":"","return_statement":"","docstring":"Retry task_id (numeric id) assigning it task_attempt.","docstring_summary":"Retry task_id (numeric id) assigning it task_attempt.","docstring_tokens":["Retry","task_id","(","numeric","id",")","assigning","it","task_attempt","."],"function":"def _retry_task(provider, job_descriptor, task_id, task_attempt):\n \"\"\"Retry task_id (numeric id) assigning it task_attempt.\"\"\"\n td_orig = job_descriptor.find_task_descriptor(task_id)\n\n new_task_descriptors = [\n job_model.TaskDescriptor({\n 'task-id': task_id,\n 'task-attempt': task_attempt\n }, td_orig.task_params, td_orig.task_resources)\n ]\n\n # Update the logging path and preemptible field.\n _resolve_task_resources(job_descriptor.job_metadata,\n job_descriptor.job_resources, new_task_descriptors)\n\n provider.submit_job(\n job_model.JobDescriptor(\n job_descriptor.job_metadata, job_descriptor.job_params,\n job_descriptor.job_resources, new_task_descriptors), False)","function_tokens":["def","_retry_task","(","provider",",","job_descriptor",",","task_id",",","task_attempt",")",":","td_orig","=","job_descriptor",".","find_task_descriptor","(","task_id",")","new_task_descriptors","=","[","job_model",".","TaskDescriptor","(","{","'task-id'",":","task_id",",","'task-attempt'",":","task_attempt","}",",","td_orig",".","task_params",",","td_orig",".","task_resources",")","]","# Update the logging path and preemptible field.","_resolve_task_resources","(","job_descriptor",".","job_metadata",",","job_descriptor",".","job_resources",",","new_task_descriptors",")","provider",".","submit_job","(","job_model",".","JobDescriptor","(","job_descriptor",".","job_metadata",",","job_descriptor",".","job_params",",","job_descriptor",".","job_resources",",","new_task_descriptors",")",",","False",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L929-L947"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_dominant_task_for_jobs","parameters":"(tasks)","argument_list":"","return_statement":"return ret","docstring":"A list with, for each job, its dominant task.\n\n The dominant task is the one that exemplifies its job's\n status. It is either:\n - the first (FAILURE or CANCELED) task, or if none\n - the first RUNNING task, or if none\n - the first SUCCESS task.\n\n Args:\n tasks: a list of tasks to consider\n\n Returns:\n A list with, for each job, its dominant task.","docstring_summary":"A list with, for each job, its dominant task.","docstring_tokens":["A","list","with","for","each","job","its","dominant","task","."],"function":"def _dominant_task_for_jobs(tasks):\n \"\"\"A list with, for each job, its dominant task.\n\n The dominant task is the one that exemplifies its job's\n status. It is either:\n - the first (FAILURE or CANCELED) task, or if none\n - the first RUNNING task, or if none\n - the first SUCCESS task.\n\n Args:\n tasks: a list of tasks to consider\n\n Returns:\n A list with, for each job, its dominant task.\n \"\"\"\n\n per_job = _group_tasks_by_jobid(tasks)\n\n ret = []\n for job_id in per_job.keys():\n tasks_in_salience_order = sorted(per_job[job_id], key=_importance_of_task)\n ret.append(tasks_in_salience_order[0])\n return ret","function_tokens":["def","_dominant_task_for_jobs","(","tasks",")",":","per_job","=","_group_tasks_by_jobid","(","tasks",")","ret","=","[","]","for","job_id","in","per_job",".","keys","(",")",":","tasks_in_salience_order","=","sorted","(","per_job","[","job_id","]",",","key","=","_importance_of_task",")","ret",".","append","(","tasks_in_salience_order","[","0","]",")","return","ret"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L950-L972"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_group_tasks_by_jobid","parameters":"(tasks)","argument_list":"","return_statement":"return ret","docstring":"A defaultdict with, for each job, a list of its tasks.","docstring_summary":"A defaultdict with, for each job, a list of its tasks.","docstring_tokens":["A","defaultdict","with","for","each","job","a","list","of","its","tasks","."],"function":"def _group_tasks_by_jobid(tasks):\n \"\"\"A defaultdict with, for each job, a list of its tasks.\"\"\"\n ret = collections.defaultdict(list)\n for t in tasks:\n ret[t.get_field('job-id')].append(t)\n return ret","function_tokens":["def","_group_tasks_by_jobid","(","tasks",")",":","ret","=","collections",".","defaultdict","(","list",")","for","t","in","tasks",":","ret","[","t",".","get_field","(","'job-id'",")","]",".","append","(","t",")","return","ret"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L975-L980"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_importance_of_task","parameters":"(task)","argument_list":"","return_statement":"return (importance[task.get_field('task-status')],\n task.get_field(\n 'end-time',\n dsub_util.replace_timezone(datetime.datetime.max,\n dateutil.tz.tzlocal())))","docstring":"Tuple (importance, end-time). Smaller values are more important.","docstring_summary":"Tuple (importance, end-time). Smaller values are more important.","docstring_tokens":["Tuple","(","importance","end","-","time",")",".","Smaller","values","are","more","important","."],"function":"def _importance_of_task(task):\n \"\"\"Tuple (importance, end-time). Smaller values are more important.\"\"\"\n # The status of a job is going to be determined by the roll-up of its tasks.\n # A FAILURE or CANCELED task means the job has FAILED.\n # If none, then any RUNNING task, the job is still RUNNING.\n # If none, then the job status is SUCCESS.\n #\n # Thus the dominant task for each job is one that exemplifies its\n # status:\n #\n # 1- The first (FAILURE or CANCELED) task, or if none\n # 2- The first RUNNING task, or if none\n # 3- The first SUCCESS task.\n importance = {'FAILURE': 0, 'CANCELED': 0, 'RUNNING': 1, 'SUCCESS': 2}\n return (importance[task.get_field('task-status')],\n task.get_field(\n 'end-time',\n dsub_util.replace_timezone(datetime.datetime.max,\n dateutil.tz.tzlocal())))","function_tokens":["def","_importance_of_task","(","task",")",":","# The status of a job is going to be determined by the roll-up of its tasks.","# A FAILURE or CANCELED task means the job has FAILED.","# If none, then any RUNNING task, the job is still RUNNING.","# If none, then the job status is SUCCESS.","#","# Thus the dominant task for each job is one that exemplifies its","# status:","#","# 1- The first (FAILURE or CANCELED) task, or if none","# 2- The first RUNNING task, or if none","# 3- The first SUCCESS task.","importance","=","{","'FAILURE'",":","0",",","'CANCELED'",":","0",",","'RUNNING'",":","1",",","'SUCCESS'",":","2","}","return","(","importance","[","task",".","get_field","(","'task-status'",")","]",",","task",".","get_field","(","'end-time'",",","dsub_util",".","replace_timezone","(","datetime",".","datetime",".","max",",","dateutil",".","tz",".","tzlocal","(",")",")",")",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L983-L1001"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_wait_for_any_job","parameters":"(provider, job_ids, poll_interval, summary)","argument_list":"","return_statement":"","docstring":"Waits until any of the listed jobs is not running.\n\n In particular, if any of the jobs sees one of its tasks fail,\n we count the whole job as failing (but do not terminate the remaining\n tasks ourselves).\n\n Args:\n provider: job service provider\n job_ids: a list of job IDs (string) to wait for\n poll_interval: integer seconds to wait between iterations\n summary: whether to output summary messages\n\n Returns:\n A set of the jobIDs with still at least one running task.","docstring_summary":"Waits until any of the listed jobs is not running.","docstring_tokens":["Waits","until","any","of","the","listed","jobs","is","not","running","."],"function":"def _wait_for_any_job(provider, job_ids, poll_interval, summary):\n \"\"\"Waits until any of the listed jobs is not running.\n\n In particular, if any of the jobs sees one of its tasks fail,\n we count the whole job as failing (but do not terminate the remaining\n tasks ourselves).\n\n Args:\n provider: job service provider\n job_ids: a list of job IDs (string) to wait for\n poll_interval: integer seconds to wait between iterations\n summary: whether to output summary messages\n\n Returns:\n A set of the jobIDs with still at least one running task.\n \"\"\"\n if not job_ids:\n return\n while True:\n formatted_tasks = []\n tasks = provider.lookup_job_tasks({'*'}, job_ids=job_ids)\n running_jobs = set()\n failed_jobs = set()\n for t in tasks:\n status = t.get_field('task-status')\n job_id = t.get_field('job-id')\n if status in ['FAILURE', 'CANCELED']:\n failed_jobs.add(job_id)\n if status == 'RUNNING':\n running_jobs.add(job_id)\n\n if summary:\n formatted_tasks.append(\n output_formatter.prepare_row(t, full=False, summary=True))\n\n if summary:\n formatter = output_formatter.TextOutput(full=False)\n formatter.prepare_and_print_table(formatted_tasks, summary)\n\n remaining_jobs = running_jobs.difference(failed_jobs)\n if failed_jobs or len(remaining_jobs) != len(job_ids):\n return remaining_jobs\n SLEEP_FUNCTION(poll_interval)","function_tokens":["def","_wait_for_any_job","(","provider",",","job_ids",",","poll_interval",",","summary",")",":","if","not","job_ids",":","return","while","True",":","formatted_tasks","=","[","]","tasks","=","provider",".","lookup_job_tasks","(","{","'*'","}",",","job_ids","=","job_ids",")","running_jobs","=","set","(",")","failed_jobs","=","set","(",")","for","t","in","tasks",":","status","=","t",".","get_field","(","'task-status'",")","job_id","=","t",".","get_field","(","'job-id'",")","if","status","in","[","'FAILURE'",",","'CANCELED'","]",":","failed_jobs",".","add","(","job_id",")","if","status","==","'RUNNING'",":","running_jobs",".","add","(","job_id",")","if","summary",":","formatted_tasks",".","append","(","output_formatter",".","prepare_row","(","t",",","full","=","False",",","summary","=","True",")",")","if","summary",":","formatter","=","output_formatter",".","TextOutput","(","full","=","False",")","formatter",".","prepare_and_print_table","(","formatted_tasks",",","summary",")","remaining_jobs","=","running_jobs",".","difference","(","failed_jobs",")","if","failed_jobs","or","len","(","remaining_jobs",")","!=","len","(","job_ids",")",":","return","remaining_jobs","SLEEP_FUNCTION","(","poll_interval",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L1004-L1046"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_validate_job_and_task_arguments","parameters":"(job_params, task_descriptors)","argument_list":"","return_statement":"","docstring":"Validates that job and task argument names do not overlap.","docstring_summary":"Validates that job and task argument names do not overlap.","docstring_tokens":["Validates","that","job","and","task","argument","names","do","not","overlap","."],"function":"def _validate_job_and_task_arguments(job_params, task_descriptors):\n \"\"\"Validates that job and task argument names do not overlap.\"\"\"\n\n if not task_descriptors:\n return\n\n task_params = task_descriptors[0].task_params\n\n # The use case for specifying a label or env\/input\/output parameter on\n # the command-line and also including it in the --tasks file is not obvious.\n # Should the command-line override the --tasks file? Why?\n # Until this use is articulated, generate an error on overlapping names.\n\n # Check labels\n from_jobs = {label.name for label in job_params['labels']}\n from_tasks = {label.name for label in task_params['labels']}\n\n intersect = from_jobs & from_tasks\n if intersect:\n raise ValueError(\n 'Names for labels on the command-line and in the --tasks file must not '\n 'be repeated: {}'.format(','.join(intersect)))\n\n # Check envs, inputs, and outputs, all of which must not overlap each other\n from_jobs = {\n item.name\n for item in job_params['envs'] | job_params['inputs']\n | job_params['outputs']\n }\n from_tasks = {\n item.name\n for item in task_params['envs'] | task_params['inputs']\n | task_params['outputs']\n }\n\n intersect = from_jobs & from_tasks\n if intersect:\n raise ValueError(\n 'Names for envs, inputs, and outputs on the command-line and in the '\n '--tasks file must not be repeated: {}'.format(','.join(intersect)))","function_tokens":["def","_validate_job_and_task_arguments","(","job_params",",","task_descriptors",")",":","if","not","task_descriptors",":","return","task_params","=","task_descriptors","[","0","]",".","task_params","# The use case for specifying a label or env\/input\/output parameter on","# the command-line and also including it in the --tasks file is not obvious.","# Should the command-line override the --tasks file? Why?","# Until this use is articulated, generate an error on overlapping names.","# Check labels","from_jobs","=","{","label",".","name","for","label","in","job_params","[","'labels'","]","}","from_tasks","=","{","label",".","name","for","label","in","task_params","[","'labels'","]","}","intersect","=","from_jobs","&","from_tasks","if","intersect",":","raise","ValueError","(","'Names for labels on the command-line and in the --tasks file must not '","'be repeated: {}'",".","format","(","','",".","join","(","intersect",")",")",")","# Check envs, inputs, and outputs, all of which must not overlap each other","from_jobs","=","{","item",".","name","for","item","in","job_params","[","'envs'","]","|","job_params","[","'inputs'","]","|","job_params","[","'outputs'","]","}","from_tasks","=","{","item",".","name","for","item","in","task_params","[","'envs'","]","|","task_params","[","'inputs'","]","|","task_params","[","'outputs'","]","}","intersect","=","from_jobs","&","from_tasks","if","intersect",":","raise","ValueError","(","'Names for envs, inputs, and outputs on the command-line and in the '","'--tasks file must not be repeated: {}'",".","format","(","','",".","join","(","intersect",")",")",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L1049-L1088"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"dsub_main","parameters":"(prog, argv)","argument_list":"","return_statement":"return launched_job","docstring":"Main entry point for dsub.","docstring_summary":"Main entry point for dsub.","docstring_tokens":["Main","entry","point","for","dsub","."],"function":"def dsub_main(prog, argv):\n \"\"\"Main entry point for dsub.\"\"\"\n # Parse args and validate\n args = _parse_arguments(prog, argv)\n # intent:\n # * dsub tightly controls the output to stdout.\n # * wrap the main body such that output goes to stderr.\n # * only emit the job-id to stdout (which can then be used programmatically).\n with dsub_util.replace_print():\n launched_job = run_main(args)\n print(launched_job.get('job-id', ''))\n return launched_job","function_tokens":["def","dsub_main","(","prog",",","argv",")",":","# Parse args and validate","args","=","_parse_arguments","(","prog",",","argv",")","# intent:","# * dsub tightly controls the output to stdout.","# * wrap the main body such that output goes to stderr.","# * only emit the job-id to stdout (which can then be used programmatically).","with","dsub_util",".","replace_print","(",")",":","launched_job","=","run_main","(","args",")","print","(","launched_job",".","get","(","'job-id'",",","''",")",")","return","launched_job"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L1091-L1102"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"run_main","parameters":"(args)","argument_list":"","return_statement":"return run(\n provider_base.get_provider(\n args, resources, credentials_fn=get_credentials),\n _get_job_resources(args),\n job_params,\n task_descriptors,\n name=args.name,\n dry_run=args.dry_run,\n command=args.command,\n script=args.script,\n user=args.user,\n user_project=args.user_project,\n wait=args.wait,\n retries=args.retries,\n max_preemptible_attempts=args.preemptible,\n poll_interval=args.poll_interval,\n after=args.after,\n skip=args.skip,\n project=args.project,\n location=args.location,\n disable_warning=True,\n unique_job_id=args.unique_job_id,\n summary=args.summary)","docstring":"Execute job\/task submission from command-line arguments.","docstring_summary":"Execute job\/task submission from command-line arguments.","docstring_tokens":["Execute","job","\/","task","submission","from","command","-","line","arguments","."],"function":"def run_main(args):\n \"\"\"Execute job\/task submission from command-line arguments.\"\"\"\n\n if args.command and args.script:\n raise ValueError('Cannot supply both a --command and --script flag')\n\n provider_base.check_for_unsupported_flag(args)\n\n # Set up job parameters and job data from a tasks file or flags.\n input_file_param_util = param_util.InputFileParamUtil(\n DEFAULT_INPUT_LOCAL_PATH)\n output_file_param_util = param_util.OutputFileParamUtil(\n DEFAULT_OUTPUT_LOCAL_PATH)\n mount_param_util = param_util.MountParamUtil(DEFAULT_MOUNT_LOCAL_PATH)\n\n # Get job arguments from the command line\n job_params = param_util.args_to_job_params(\n args.env, args.label, args.input, args.input_recursive, args.output,\n args.output_recursive, args.mount, input_file_param_util,\n output_file_param_util, mount_param_util)\n # If --tasks is on the command-line, then get task-specific data\n if args.tasks:\n task_descriptors = param_util.tasks_file_to_task_descriptors(\n args.tasks, args.retries, input_file_param_util, output_file_param_util)\n\n # Validate job data + task data\n _validate_job_and_task_arguments(job_params, task_descriptors)\n else:\n # Create the implicit task\n task_metadata = {'task-id': None}\n if args.retries:\n task_metadata['task-attempt'] = 1\n task_descriptors = [\n job_model.TaskDescriptor(task_metadata, {\n 'labels': set(),\n 'envs': set(),\n 'inputs': set(),\n 'outputs': set()\n }, job_model.Resources())\n ]\n\n # Emit a warning if default image is used\n if args.image is None:\n print('***WARNING: No Docker image specified. The default, '\n '`{}` will be used.'.format(DEFAULT_IMAGE))\n print('***WARNING: For reproducible pipelines, specify an image with the '\n '`--image` flag.')\n args.image = DEFAULT_IMAGE\n\n return run(\n provider_base.get_provider(\n args, resources, credentials_fn=get_credentials),\n _get_job_resources(args),\n job_params,\n task_descriptors,\n name=args.name,\n dry_run=args.dry_run,\n command=args.command,\n script=args.script,\n user=args.user,\n user_project=args.user_project,\n wait=args.wait,\n retries=args.retries,\n max_preemptible_attempts=args.preemptible,\n poll_interval=args.poll_interval,\n after=args.after,\n skip=args.skip,\n project=args.project,\n location=args.location,\n disable_warning=True,\n unique_job_id=args.unique_job_id,\n summary=args.summary)","function_tokens":["def","run_main","(","args",")",":","if","args",".","command","and","args",".","script",":","raise","ValueError","(","'Cannot supply both a --command and --script flag'",")","provider_base",".","check_for_unsupported_flag","(","args",")","# Set up job parameters and job data from a tasks file or flags.","input_file_param_util","=","param_util",".","InputFileParamUtil","(","DEFAULT_INPUT_LOCAL_PATH",")","output_file_param_util","=","param_util",".","OutputFileParamUtil","(","DEFAULT_OUTPUT_LOCAL_PATH",")","mount_param_util","=","param_util",".","MountParamUtil","(","DEFAULT_MOUNT_LOCAL_PATH",")","# Get job arguments from the command line","job_params","=","param_util",".","args_to_job_params","(","args",".","env",",","args",".","label",",","args",".","input",",","args",".","input_recursive",",","args",".","output",",","args",".","output_recursive",",","args",".","mount",",","input_file_param_util",",","output_file_param_util",",","mount_param_util",")","# If --tasks is on the command-line, then get task-specific data","if","args",".","tasks",":","task_descriptors","=","param_util",".","tasks_file_to_task_descriptors","(","args",".","tasks",",","args",".","retries",",","input_file_param_util",",","output_file_param_util",")","# Validate job data + task data","_validate_job_and_task_arguments","(","job_params",",","task_descriptors",")","else",":","# Create the implicit task","task_metadata","=","{","'task-id'",":","None","}","if","args",".","retries",":","task_metadata","[","'task-attempt'","]","=","1","task_descriptors","=","[","job_model",".","TaskDescriptor","(","task_metadata",",","{","'labels'",":","set","(",")",",","'envs'",":","set","(",")",",","'inputs'",":","set","(",")",",","'outputs'",":","set","(",")","}",",","job_model",".","Resources","(",")",")","]","# Emit a warning if default image is used","if","args",".","image","is","None",":","print","(","'***WARNING: No Docker image specified. The default, '","'`{}` will be used.'",".","format","(","DEFAULT_IMAGE",")",")","print","(","'***WARNING: For reproducible pipelines, specify an image with the '","'`--image` flag.'",")","args",".","image","=","DEFAULT_IMAGE","return","run","(","provider_base",".","get_provider","(","args",",","resources",",","credentials_fn","=","get_credentials",")",",","_get_job_resources","(","args",")",",","job_params",",","task_descriptors",",","name","=","args",".","name",",","dry_run","=","args",".","dry_run",",","command","=","args",".","command",",","script","=","args",".","script",",","user","=","args",".","user",",","user_project","=","args",".","user_project",",","wait","=","args",".","wait",",","retries","=","args",".","retries",",","max_preemptible_attempts","=","args",".","preemptible",",","poll_interval","=","args",".","poll_interval",",","after","=","args",".","after",",","skip","=","args",".","skip",",","project","=","args",".","project",",","location","=","args",".","location",",","disable_warning","=","True",",","unique_job_id","=","args",".","unique_job_id",",","summary","=","args",".","summary",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L1128-L1199"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"run","parameters":"(provider,\n job_resources,\n job_params,\n task_descriptors,\n name=None,\n dry_run=False,\n command=None,\n script=None,\n user=None,\n user_project=None,\n wait=False,\n retries=0,\n max_preemptible_attempts=None,\n poll_interval=10,\n after=None,\n skip=False,\n project=None,\n location=None,\n disable_warning=False,\n unique_job_id=False,\n summary=False)","argument_list":"","return_statement":"return launched_job","docstring":"Actual dsub body, post-stdout-redirection.","docstring_summary":"Actual dsub body, post-stdout-redirection.","docstring_tokens":["Actual","dsub","body","post","-","stdout","-","redirection","."],"function":"def run(provider,\n job_resources,\n job_params,\n task_descriptors,\n name=None,\n dry_run=False,\n command=None,\n script=None,\n user=None,\n user_project=None,\n wait=False,\n retries=0,\n max_preemptible_attempts=None,\n poll_interval=10,\n after=None,\n skip=False,\n project=None,\n location=None,\n disable_warning=False,\n unique_job_id=False,\n summary=False):\n \"\"\"Actual dsub body, post-stdout-redirection.\"\"\"\n if not dry_run:\n provider_base.emit_provider_message(provider)\n\n if not disable_warning:\n raise ValueError('Do not use this unstable API component!')\n\n if command and script:\n raise ValueError('Cannot supply both a command and script value.')\n\n if command:\n if name:\n command_name = name\n else:\n command_name = _name_for_command(command)\n\n # Add the shebang line to ensure the command is treated as Bash\n script = job_model.Script(command_name, '#!\/usr\/bin\/env bash\\n' + command)\n elif script:\n # Read the script file\n script_file_contents = dsub_util.load_file(script)\n script = job_model.Script(os.path.basename(script), script_file_contents)\n else:\n raise ValueError('One of --command or a script name must be supplied')\n\n if retries and not wait:\n raise ValueError('Requesting retries requires requesting wait')\n\n if summary and not wait:\n raise ValueError('Requesting summary requires requesting wait')\n\n if max_preemptible_attempts:\n max_preemptible_attempts.validate(retries)\n\n # The contract with providers and downstream code is that the job_params\n # and task_params contain 'labels', 'envs', 'inputs', and 'outputs'.\n job_model.ensure_job_params_are_complete(job_params)\n job_model.ensure_task_params_are_complete(task_descriptors)\n\n task_ids = {\n task_descriptor.task_metadata.get('task-id')\n for task_descriptor in task_descriptors\n if task_descriptor.task_metadata.get('task-id') is not None\n }\n\n # Job and task parameters from the user have been validated.\n # We can now compute some job and task properties, including:\n # job_metadata such as the job-id, create-time, user-id, etc.\n # task_resources such as the logging_path (which may include job-id, task-id)\n job_metadata = _get_job_metadata(provider, user, name, script, task_ids,\n user_project, unique_job_id)\n _resolve_task_resources(job_metadata, job_resources, task_descriptors)\n\n # Job and task properties are now all resolved. Begin execution!\n if not dry_run:\n print('Job properties:')\n print(' job-id: %s' % job_metadata['job-id'])\n print(' job-name: %s' % job_metadata['job-name'])\n print(' user-id: %s' % job_metadata['user-id'])\n\n # Wait for predecessor jobs (if any)\n if after:\n if dry_run:\n print('(Pretend) waiting for: %s.' % after)\n else:\n print('Waiting for predecessor jobs to complete...')\n error_messages = _wait_after(provider, after, poll_interval, True,\n summary)\n if error_messages:\n for msg in error_messages:\n dsub_util.print_error(msg)\n raise dsub_errors.PredecessorJobFailureError(\n 'One or more predecessor jobs completed but did not succeed.',\n error_messages, None)\n\n # Launch all the job tasks!\n job_descriptor = job_model.JobDescriptor(job_metadata, job_params,\n job_resources, task_descriptors)\n launched_job = provider.submit_job(job_descriptor, skip)\n\n if not dry_run:\n if launched_job['job-id'] == dsub_util.NO_JOB:\n print('Job output already present, skipping new job submission.')\n return {'job-id': dsub_util.NO_JOB}\n print('Launched job-id: %s' % launched_job['job-id'])\n if launched_job.get('task-id'):\n print('%s task(s)' % len(launched_job['task-id']))\n print('To check the status, run:')\n print(\" dstat %s --jobs '%s' --users '%s' --status '*'\" %\n (provider_base.get_dstat_provider_args(provider, project, location),\n launched_job['job-id'], launched_job['user-id']))\n print('To cancel the job, run:')\n print(\" ddel %s --jobs '%s' --users '%s'\" %\n (provider_base.get_ddel_provider_args(provider, project, location),\n launched_job['job-id'], launched_job['user-id']))\n\n # Poll for job completion\n if wait:\n print('Waiting for job to complete...')\n\n if retries:\n print('Monitoring for failed tasks to retry...')\n print(\n '*** This dsub process must continue running to retry failed tasks.')\n error_messages = _wait_and_retry(provider, job_metadata['job-id'],\n poll_interval, retries, job_descriptor,\n summary)\n else:\n error_messages = _wait_after(provider, [job_metadata['job-id']],\n poll_interval, False, summary)\n if error_messages:\n for msg in error_messages:\n dsub_util.print_error(msg)\n raise dsub_errors.JobExecutionError(\n 'One or more jobs finished with status FAILURE or CANCELED'\n ' during wait.', error_messages, launched_job)\n\n return launched_job","function_tokens":["def","run","(","provider",",","job_resources",",","job_params",",","task_descriptors",",","name","=","None",",","dry_run","=","False",",","command","=","None",",","script","=","None",",","user","=","None",",","user_project","=","None",",","wait","=","False",",","retries","=","0",",","max_preemptible_attempts","=","None",",","poll_interval","=","10",",","after","=","None",",","skip","=","False",",","project","=","None",",","location","=","None",",","disable_warning","=","False",",","unique_job_id","=","False",",","summary","=","False",")",":","if","not","dry_run",":","provider_base",".","emit_provider_message","(","provider",")","if","not","disable_warning",":","raise","ValueError","(","'Do not use this unstable API component!'",")","if","command","and","script",":","raise","ValueError","(","'Cannot supply both a command and script value.'",")","if","command",":","if","name",":","command_name","=","name","else",":","command_name","=","_name_for_command","(","command",")","# Add the shebang line to ensure the command is treated as Bash","script","=","job_model",".","Script","(","command_name",",","'#!\/usr\/bin\/env bash\\n'","+","command",")","elif","script",":","# Read the script file","script_file_contents","=","dsub_util",".","load_file","(","script",")","script","=","job_model",".","Script","(","os",".","path",".","basename","(","script",")",",","script_file_contents",")","else",":","raise","ValueError","(","'One of --command or a script name must be supplied'",")","if","retries","and","not","wait",":","raise","ValueError","(","'Requesting retries requires requesting wait'",")","if","summary","and","not","wait",":","raise","ValueError","(","'Requesting summary requires requesting wait'",")","if","max_preemptible_attempts",":","max_preemptible_attempts",".","validate","(","retries",")","# The contract with providers and downstream code is that the job_params","# and task_params contain 'labels', 'envs', 'inputs', and 'outputs'.","job_model",".","ensure_job_params_are_complete","(","job_params",")","job_model",".","ensure_task_params_are_complete","(","task_descriptors",")","task_ids","=","{","task_descriptor",".","task_metadata",".","get","(","'task-id'",")","for","task_descriptor","in","task_descriptors","if","task_descriptor",".","task_metadata",".","get","(","'task-id'",")","is","not","None","}","# Job and task parameters from the user have been validated.","# We can now compute some job and task properties, including:","# job_metadata such as the job-id, create-time, user-id, etc.","# task_resources such as the logging_path (which may include job-id, task-id)","job_metadata","=","_get_job_metadata","(","provider",",","user",",","name",",","script",",","task_ids",",","user_project",",","unique_job_id",")","_resolve_task_resources","(","job_metadata",",","job_resources",",","task_descriptors",")","# Job and task properties are now all resolved. Begin execution!","if","not","dry_run",":","print","(","'Job properties:'",")","print","(","' job-id: %s'","%","job_metadata","[","'job-id'","]",")","print","(","' job-name: %s'","%","job_metadata","[","'job-name'","]",")","print","(","' user-id: %s'","%","job_metadata","[","'user-id'","]",")","# Wait for predecessor jobs (if any)","if","after",":","if","dry_run",":","print","(","'(Pretend) waiting for: %s.'","%","after",")","else",":","print","(","'Waiting for predecessor jobs to complete...'",")","error_messages","=","_wait_after","(","provider",",","after",",","poll_interval",",","True",",","summary",")","if","error_messages",":","for","msg","in","error_messages",":","dsub_util",".","print_error","(","msg",")","raise","dsub_errors",".","PredecessorJobFailureError","(","'One or more predecessor jobs completed but did not succeed.'",",","error_messages",",","None",")","# Launch all the job tasks!","job_descriptor","=","job_model",".","JobDescriptor","(","job_metadata",",","job_params",",","job_resources",",","task_descriptors",")","launched_job","=","provider",".","submit_job","(","job_descriptor",",","skip",")","if","not","dry_run",":","if","launched_job","[","'job-id'","]","==","dsub_util",".","NO_JOB",":","print","(","'Job output already present, skipping new job submission.'",")","return","{","'job-id'",":","dsub_util",".","NO_JOB","}","print","(","'Launched job-id: %s'","%","launched_job","[","'job-id'","]",")","if","launched_job",".","get","(","'task-id'",")",":","print","(","'%s task(s)'","%","len","(","launched_job","[","'task-id'","]",")",")","print","(","'To check the status, run:'",")","print","(","\" dstat %s --jobs '%s' --users '%s' --status '*'\"","%","(","provider_base",".","get_dstat_provider_args","(","provider",",","project",",","location",")",",","launched_job","[","'job-id'","]",",","launched_job","[","'user-id'","]",")",")","print","(","'To cancel the job, run:'",")","print","(","\" ddel %s --jobs '%s' --users '%s'\"","%","(","provider_base",".","get_ddel_provider_args","(","provider",",","project",",","location",")",",","launched_job","[","'job-id'","]",",","launched_job","[","'user-id'","]",")",")","# Poll for job completion","if","wait",":","print","(","'Waiting for job to complete...'",")","if","retries",":","print","(","'Monitoring for failed tasks to retry...'",")","print","(","'*** This dsub process must continue running to retry failed tasks.'",")","error_messages","=","_wait_and_retry","(","provider",",","job_metadata","[","'job-id'","]",",","poll_interval",",","retries",",","job_descriptor",",","summary",")","else",":","error_messages","=","_wait_after","(","provider",",","[","job_metadata","[","'job-id'","]","]",",","poll_interval",",","False",",","summary",")","if","error_messages",":","for","msg","in","error_messages",":","dsub_util",".","print_error","(","msg",")","raise","dsub_errors",".","JobExecutionError","(","'One or more jobs finished with status FAILURE or CANCELED'","' during wait.'",",","error_messages",",","launched_job",")","return","launched_job"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L1202-L1340"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/dsub.py","language":"python","identifier":"_name_for_command","parameters":"(command)","argument_list":"","return_statement":"return 'command'","docstring":"r\"\"\"Craft a simple command name from the command.\n\n The best command strings for this are going to be those where a simple\n command was given; we will use the command to derive the name.\n\n We won't always be able to figure something out and the caller should just\n specify a \"--name\" on the command-line.\n\n For example, commands like \"export VAR=val\\necho ${VAR}\", this function would\n return \"export\".\n\n If the command starts space or a comment, then we'll skip to the first code\n we can find.\n\n If we find nothing, just return \"command\".\n\n >>> _name_for_command('samtools index \"${BAM}\"')\n 'samtools'\n >>> _name_for_command('\/usr\/bin\/sort \"${INFILE}\" > \"${OUTFILE}\"')\n 'sort'\n >>> _name_for_command('# This should be ignored')\n 'command'\n >>> _name_for_command('\\\\\\n\\\\\\n# Bad continuations, but ignore.\\necho hello.')\n 'echo'\n >>> _name_for_command('(uname -a && pwd) # Command begins with non-letter.')\n 'uname'\n >>> _name_for_command('my-program.sh # Command with hyphens.')\n 'my-program.sh'\n >>> _name_for_command('\/home\/user\/bin\/-my-sort # Path with hyphen.')\n 'my-sort'\n\n Arguments:\n command: the user-provided command\n Returns:\n a proposed name for the task.","docstring_summary":"r\"\"\"Craft a simple command name from the command.","docstring_tokens":["r","Craft","a","simple","command","name","from","the","command","."],"function":"def _name_for_command(command):\n r\"\"\"Craft a simple command name from the command.\n\n The best command strings for this are going to be those where a simple\n command was given; we will use the command to derive the name.\n\n We won't always be able to figure something out and the caller should just\n specify a \"--name\" on the command-line.\n\n For example, commands like \"export VAR=val\\necho ${VAR}\", this function would\n return \"export\".\n\n If the command starts space or a comment, then we'll skip to the first code\n we can find.\n\n If we find nothing, just return \"command\".\n\n >>> _name_for_command('samtools index \"${BAM}\"')\n 'samtools'\n >>> _name_for_command('\/usr\/bin\/sort \"${INFILE}\" > \"${OUTFILE}\"')\n 'sort'\n >>> _name_for_command('# This should be ignored')\n 'command'\n >>> _name_for_command('\\\\\\n\\\\\\n# Bad continuations, but ignore.\\necho hello.')\n 'echo'\n >>> _name_for_command('(uname -a && pwd) # Command begins with non-letter.')\n 'uname'\n >>> _name_for_command('my-program.sh # Command with hyphens.')\n 'my-program.sh'\n >>> _name_for_command('\/home\/user\/bin\/-my-sort # Path with hyphen.')\n 'my-sort'\n\n Arguments:\n command: the user-provided command\n Returns:\n a proposed name for the task.\n \"\"\"\n\n lines = command.splitlines()\n for line in lines:\n line = line.strip()\n if line and not line.startswith('#') and line != '\\\\':\n # Tokenize on whitespace [ \\t\\n\\r\\f\\v]\n names = re.split(r'\\s', line)\n for name in names:\n # Make sure the first character is a letter, number, or underscore\n # Get basename so something like \"\/usr\/bin\/sort\" becomes just \"sort\"\n name = re.sub(r'^[^a-zA-Z0-9_]*', '', os.path.basename(name))\n if name:\n return name\n\n return 'command'","function_tokens":["def","_name_for_command","(","command",")",":","lines","=","command",".","splitlines","(",")","for","line","in","lines",":","line","=","line",".","strip","(",")","if","line","and","not","line",".","startswith","(","'#'",")","and","line","!=","'\\\\'",":","# Tokenize on whitespace [ \\t\\n\\r\\f\\v]","names","=","re",".","split","(","r'\\s'",",","line",")","for","name","in","names",":","# Make sure the first character is a letter, number, or underscore","# Get basename so something like \"\/usr\/bin\/sort\" becomes just \"sort\"","name","=","re",".","sub","(","r'^[^a-zA-Z0-9_]*'",",","''",",","os",".","path",".","basename","(","name",")",")","if","name",":","return","name","return","'command'"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/dsub.py#L1343-L1394"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/ddel.py","language":"python","identifier":"get_credentials","parameters":"(args)","argument_list":"","return_statement":"return provider_base.credentials_from_args(args)","docstring":"Returns credentials for API requests.","docstring_summary":"Returns credentials for API requests.","docstring_tokens":["Returns","credentials","for","API","requests","."],"function":"def get_credentials(args):\n \"\"\"Returns credentials for API requests.\"\"\"\n\n # Across dsub, dstat, ddel, defer to the provider for credentials handling\n return provider_base.credentials_from_args(args)","function_tokens":["def","get_credentials","(","args",")",":","# Across dsub, dstat, ddel, defer to the provider for credentials handling","return","provider_base",".","credentials_from_args","(","args",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/ddel.py#L29-L33"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/ddel.py","language":"python","identifier":"_parse_arguments","parameters":"()","argument_list":"","return_statement":"return provider_base.parse_args(\n parser, {\n 'google-cls-v2': ['project'],\n 'google-v2': ['project'],\n 'test-fails': [],\n 'local': [],\n }, sys.argv[1:])","docstring":"Parses command line arguments.\n\n Returns:\n A Namespace of parsed arguments.","docstring_summary":"Parses command line arguments.","docstring_tokens":["Parses","command","line","arguments","."],"function":"def _parse_arguments():\n \"\"\"Parses command line arguments.\n\n Returns:\n A Namespace of parsed arguments.\n \"\"\"\n # Handle version flag and exit if it was passed.\n param_util.handle_version_flag()\n\n parser = provider_base.create_parser(sys.argv[0])\n\n parser.add_argument(\n '--version', '-v', default=False, help='Print the dsub version and exit.')\n\n parser.add_argument(\n '--jobs',\n '-j',\n required=True,\n nargs='*',\n help='List of job-ids to delete. Use \"*\" to delete all running jobs.')\n parser.add_argument(\n '--tasks',\n '-t',\n nargs='*',\n help='List of tasks in an array job to delete.')\n parser.add_argument(\n '--users',\n '-u',\n nargs='*',\n default=[],\n help=\"\"\"Deletes only those jobs which were submitted by the list of users.\n Use \"*\" to delete jobs of any user.\"\"\")\n parser.add_argument(\n '--age',\n help=\"\"\"Deletes only those jobs newer than the specified age. Ages can be\n listed using a number followed by a unit. Supported units are\n s (seconds), m (minutes), h (hours), d (days), w (weeks).\n For example: '7d' (7 days). Bare numbers are treated as UTC.\"\"\")\n parser.add_argument(\n '--label',\n nargs='*',\n action=param_util.ListParamAction,\n default=[],\n help='User labels to match. Tasks returned must match all labels.',\n metavar='KEY=VALUE')\n\n # Shared between the \"google-cls-v2\" and \"google-v2\" providers\n google_common = parser.add_argument_group(\n title='google-common',\n description=\"\"\"Options common to the \"google\", \"google-cls-v2\", and\n \"google-v2\" providers\"\"\")\n google_common.add_argument(\n '--project',\n help='Cloud project ID in which to find and delete the job(s)')\n\n google_cls_v2 = parser.add_argument_group(\n title='\"google-cls-v2\" provider options',\n description='See also the \"google-common\" options listed')\n google_cls_v2.add_argument(\n '--location',\n default=job_model.DEFAULT_LOCATION,\n help=\"\"\"Specifies the Google Cloud region to which the dsub job was\n submitted. (default: {})\"\"\".format(job_model.DEFAULT_LOCATION))\n\n return provider_base.parse_args(\n parser, {\n 'google-cls-v2': ['project'],\n 'google-v2': ['project'],\n 'test-fails': [],\n 'local': [],\n }, sys.argv[1:])","function_tokens":["def","_parse_arguments","(",")",":","# Handle version flag and exit if it was passed.","param_util",".","handle_version_flag","(",")","parser","=","provider_base",".","create_parser","(","sys",".","argv","[","0","]",")","parser",".","add_argument","(","'--version'",",","'-v'",",","default","=","False",",","help","=","'Print the dsub version and exit.'",")","parser",".","add_argument","(","'--jobs'",",","'-j'",",","required","=","True",",","nargs","=","'*'",",","help","=","'List of job-ids to delete. Use \"*\" to delete all running jobs.'",")","parser",".","add_argument","(","'--tasks'",",","'-t'",",","nargs","=","'*'",",","help","=","'List of tasks in an array job to delete.'",")","parser",".","add_argument","(","'--users'",",","'-u'",",","nargs","=","'*'",",","default","=","[","]",",","help","=","\"\"\"Deletes only those jobs which were submitted by the list of users.\n Use \"*\" to delete jobs of any user.\"\"\"",")","parser",".","add_argument","(","'--age'",",","help","=","\"\"\"Deletes only those jobs newer than the specified age. Ages can be\n listed using a number followed by a unit. Supported units are\n s (seconds), m (minutes), h (hours), d (days), w (weeks).\n For example: '7d' (7 days). Bare numbers are treated as UTC.\"\"\"",")","parser",".","add_argument","(","'--label'",",","nargs","=","'*'",",","action","=","param_util",".","ListParamAction",",","default","=","[","]",",","help","=","'User labels to match. Tasks returned must match all labels.'",",","metavar","=","'KEY=VALUE'",")","# Shared between the \"google-cls-v2\" and \"google-v2\" providers","google_common","=","parser",".","add_argument_group","(","title","=","'google-common'",",","description","=","\"\"\"Options common to the \"google\", \"google-cls-v2\", and\n \"google-v2\" providers\"\"\"",")","google_common",".","add_argument","(","'--project'",",","help","=","'Cloud project ID in which to find and delete the job(s)'",")","google_cls_v2","=","parser",".","add_argument_group","(","title","=","'\"google-cls-v2\" provider options'",",","description","=","'See also the \"google-common\" options listed'",")","google_cls_v2",".","add_argument","(","'--location'",",","default","=","job_model",".","DEFAULT_LOCATION",",","help","=","\"\"\"Specifies the Google Cloud region to which the dsub job was\n submitted. (default: {})\"\"\"",".","format","(","job_model",".","DEFAULT_LOCATION",")",")","return","provider_base",".","parse_args","(","parser",",","{","'google-cls-v2'",":","[","'project'","]",",","'google-v2'",":","[","'project'","]",",","'test-fails'",":","[","]",",","'local'",":","[","]",",","}",",","sys",".","argv","[","1",":","]",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/ddel.py#L36-L106"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/ddel.py","language":"python","identifier":"_emit_search_criteria","parameters":"(user_ids, job_ids, task_ids, labels)","argument_list":"","return_statement":"","docstring":"Print the filters used to delete tasks. Use raw flags as arguments.","docstring_summary":"Print the filters used to delete tasks. Use raw flags as arguments.","docstring_tokens":["Print","the","filters","used","to","delete","tasks",".","Use","raw","flags","as","arguments","."],"function":"def _emit_search_criteria(user_ids, job_ids, task_ids, labels):\n \"\"\"Print the filters used to delete tasks. Use raw flags as arguments.\"\"\"\n print('Delete running jobs:')\n print(' user:')\n print(' %s\\n' % user_ids)\n print(' job-id:')\n print(' %s\\n' % job_ids)\n if task_ids:\n print(' task-id:')\n print(' %s\\n' % task_ids)\n # Labels are in a LabelParam namedtuple and must be reformated for printing.\n if labels:\n print(' labels:')\n print(' %s\\n' % repr(labels))","function_tokens":["def","_emit_search_criteria","(","user_ids",",","job_ids",",","task_ids",",","labels",")",":","print","(","'Delete running jobs:'",")","print","(","' user:'",")","print","(","' %s\\n'","%","user_ids",")","print","(","' job-id:'",")","print","(","' %s\\n'","%","job_ids",")","if","task_ids",":","print","(","' task-id:'",")","print","(","' %s\\n'","%","task_ids",")","# Labels are in a LabelParam namedtuple and must be reformated for printing.","if","labels",":","print","(","' labels:'",")","print","(","' %s\\n'","%","repr","(","labels",")",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/ddel.py#L109-L122"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/commands\/ddel.py","language":"python","identifier":"ddel_tasks","parameters":"(provider,\n user_ids=None,\n job_ids=None,\n task_ids=None,\n labels=None,\n create_time_min=None,\n create_time_max=None)","argument_list":"","return_statement":"return deleted_tasks","docstring":"Kill jobs or job tasks.\n\n This function separates ddel logic from flag parsing and user output. Users\n of ddel who intend to access the data programmatically should use this.\n\n Args:\n provider: an instantiated dsub provider.\n user_ids: a set of user ids who \"own\" the job(s) to delete.\n job_ids: a set of job ids to delete.\n task_ids: a set of task ids to delete.\n labels: a set of LabelParam, each must match the job(s) to be cancelled.\n create_time_min: a timezone-aware datetime value for the earliest create\n time of a task, inclusive.\n create_time_max: a timezone-aware datetime value for the most recent create\n time of a task, inclusive.\n\n Returns:\n list of job ids which were deleted.","docstring_summary":"Kill jobs or job tasks.","docstring_tokens":["Kill","jobs","or","job","tasks","."],"function":"def ddel_tasks(provider,\n user_ids=None,\n job_ids=None,\n task_ids=None,\n labels=None,\n create_time_min=None,\n create_time_max=None):\n \"\"\"Kill jobs or job tasks.\n\n This function separates ddel logic from flag parsing and user output. Users\n of ddel who intend to access the data programmatically should use this.\n\n Args:\n provider: an instantiated dsub provider.\n user_ids: a set of user ids who \"own\" the job(s) to delete.\n job_ids: a set of job ids to delete.\n task_ids: a set of task ids to delete.\n labels: a set of LabelParam, each must match the job(s) to be cancelled.\n create_time_min: a timezone-aware datetime value for the earliest create\n time of a task, inclusive.\n create_time_max: a timezone-aware datetime value for the most recent create\n time of a task, inclusive.\n\n Returns:\n list of job ids which were deleted.\n \"\"\"\n # Delete the requested jobs\n deleted_tasks, error_messages = provider.delete_jobs(\n user_ids, job_ids, task_ids, labels, create_time_min, create_time_max)\n\n # Emit any errors canceling jobs\n for msg in error_messages:\n print(msg)\n\n return deleted_tasks","function_tokens":["def","ddel_tasks","(","provider",",","user_ids","=","None",",","job_ids","=","None",",","task_ids","=","None",",","labels","=","None",",","create_time_min","=","None",",","create_time_max","=","None",")",":","# Delete the requested jobs","deleted_tasks",",","error_messages","=","provider",".","delete_jobs","(","user_ids",",","job_ids",",","task_ids",",","labels",",","create_time_min",",","create_time_max",")","# Emit any errors canceling jobs","for","msg","in","error_messages",":","print","(","msg",")","return","deleted_tasks"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/commands\/ddel.py#L175-L209"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_pipelines.py","language":"python","identifier":"build_machine","parameters":"(network=None,\n machine_type=None,\n preemptible=None,\n service_account=None,\n boot_disk_size_gb=None,\n disks=None,\n accelerators=None,\n labels=None,\n cpu_platform=None,\n nvidia_driver_version=None,\n enable_stackdriver_monitoring=None)","argument_list":"","return_statement":"return {\n 'network': network,\n 'machineType': machine_type,\n 'preemptible': preemptible,\n 'serviceAccount': service_account,\n 'bootDiskSizeGb': boot_disk_size_gb,\n 'disks': disks,\n 'accelerators': accelerators,\n 'labels': labels,\n 'cpuPlatform': cpu_platform,\n 'nvidiaDriverVersion': nvidia_driver_version,\n 'enableStackdriverMonitoring': enable_stackdriver_monitoring,\n }","docstring":"Build a VirtualMachine object for a Pipeline request.\n\n Args:\n network (dict): Network details for the pipeline to run in.\n machine_type (str): GCE Machine Type string for the pipeline.\n preemptible (bool): Use a preemptible VM for the job.\n service_account (dict): Service account configuration for the VM.\n boot_disk_size_gb (int): Boot disk size in GB.\n disks (list[dict]): List of disks to mount.\n accelerators (list[dict]): List of accelerators to attach to the VM.\n labels (dict[string, string]): Labels for the VM.\n cpu_platform (str): The CPU platform to request.\n nvidia_driver_version (str): The NVIDIA driver version to use when attaching\n an NVIDIA GPU accelerator.\n enable_stackdriver_monitoring (bool): Enable stackdriver monitoring\n on the VM.\n\n Returns:\n An object representing a VirtualMachine.","docstring_summary":"Build a VirtualMachine object for a Pipeline request.","docstring_tokens":["Build","a","VirtualMachine","object","for","a","Pipeline","request","."],"function":"def build_machine(network=None,\n machine_type=None,\n preemptible=None,\n service_account=None,\n boot_disk_size_gb=None,\n disks=None,\n accelerators=None,\n labels=None,\n cpu_platform=None,\n nvidia_driver_version=None,\n enable_stackdriver_monitoring=None):\n \"\"\"Build a VirtualMachine object for a Pipeline request.\n\n Args:\n network (dict): Network details for the pipeline to run in.\n machine_type (str): GCE Machine Type string for the pipeline.\n preemptible (bool): Use a preemptible VM for the job.\n service_account (dict): Service account configuration for the VM.\n boot_disk_size_gb (int): Boot disk size in GB.\n disks (list[dict]): List of disks to mount.\n accelerators (list[dict]): List of accelerators to attach to the VM.\n labels (dict[string, string]): Labels for the VM.\n cpu_platform (str): The CPU platform to request.\n nvidia_driver_version (str): The NVIDIA driver version to use when attaching\n an NVIDIA GPU accelerator.\n enable_stackdriver_monitoring (bool): Enable stackdriver monitoring\n on the VM.\n\n Returns:\n An object representing a VirtualMachine.\n \"\"\"\n return {\n 'network': network,\n 'machineType': machine_type,\n 'preemptible': preemptible,\n 'serviceAccount': service_account,\n 'bootDiskSizeGb': boot_disk_size_gb,\n 'disks': disks,\n 'accelerators': accelerators,\n 'labels': labels,\n 'cpuPlatform': cpu_platform,\n 'nvidiaDriverVersion': nvidia_driver_version,\n 'enableStackdriverMonitoring': enable_stackdriver_monitoring,\n }","function_tokens":["def","build_machine","(","network","=","None",",","machine_type","=","None",",","preemptible","=","None",",","service_account","=","None",",","boot_disk_size_gb","=","None",",","disks","=","None",",","accelerators","=","None",",","labels","=","None",",","cpu_platform","=","None",",","nvidia_driver_version","=","None",",","enable_stackdriver_monitoring","=","None",")",":","return","{","'network'",":","network",",","'machineType'",":","machine_type",",","'preemptible'",":","preemptible",",","'serviceAccount'",":","service_account",",","'bootDiskSizeGb'",":","boot_disk_size_gb",",","'disks'",":","disks",",","'accelerators'",":","accelerators",",","'labels'",":","labels",",","'cpuPlatform'",":","cpu_platform",",","'nvidiaDriverVersion'",":","nvidia_driver_version",",","'enableStackdriverMonitoring'",":","enable_stackdriver_monitoring",",","}"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_pipelines.py#L65-L108"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_pipelines.py","language":"python","identifier":"build_resources","parameters":"(project=None,\n regions=None,\n zones=None,\n virtual_machine=None)","argument_list":"","return_statement":"return resources","docstring":"Build a Resources object for a Pipeline request.\n\n Args:\n project (str): Cloud project for the Pipeline to run in.\n regions (List[str]): List of regions for the pipeline to run in.\n zones (List[str]): List of zones for the pipeline to run in.\n virtual_machine(str): Virtual machine type string.\n\n Returns:\n An object representing a Resource.","docstring_summary":"Build a Resources object for a Pipeline request.","docstring_tokens":["Build","a","Resources","object","for","a","Pipeline","request","."],"function":"def build_resources(project=None,\n regions=None,\n zones=None,\n virtual_machine=None):\n \"\"\"Build a Resources object for a Pipeline request.\n\n Args:\n project (str): Cloud project for the Pipeline to run in.\n regions (List[str]): List of regions for the pipeline to run in.\n zones (List[str]): List of zones for the pipeline to run in.\n virtual_machine(str): Virtual machine type string.\n\n Returns:\n An object representing a Resource.\n \"\"\"\n\n resources = {\n 'regions': regions,\n 'zones': zones,\n 'virtualMachine': virtual_machine,\n }\n\n if _API_VERSION == google_v2_versions.V2ALPHA1:\n resources['projectId'] = project\n\n return resources","function_tokens":["def","build_resources","(","project","=","None",",","regions","=","None",",","zones","=","None",",","virtual_machine","=","None",")",":","resources","=","{","'regions'",":","regions",",","'zones'",":","zones",",","'virtualMachine'",":","virtual_machine",",","}","if","_API_VERSION","==","google_v2_versions",".","V2ALPHA1",":","resources","[","'projectId'","]","=","project","return","resources"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_pipelines.py#L111-L136"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_pipelines.py","language":"python","identifier":"build_mount","parameters":"(disk, path, read_only)","argument_list":"","return_statement":"return {\n 'disk': disk,\n 'path': path,\n 'readOnly': read_only,\n }","docstring":"Build a Mount object for a Pipeline request.\n\n Args:\n disk (str): Name of disk to mount, as specified in the resources section.\n path (str): Path to mount the disk at inside the container.\n read_only (boolean): If true, disk is mounted read only in the container.\n\n Returns:\n An object representing a Mount.","docstring_summary":"Build a Mount object for a Pipeline request.","docstring_tokens":["Build","a","Mount","object","for","a","Pipeline","request","."],"function":"def build_mount(disk, path, read_only):\n \"\"\"Build a Mount object for a Pipeline request.\n\n Args:\n disk (str): Name of disk to mount, as specified in the resources section.\n path (str): Path to mount the disk at inside the container.\n read_only (boolean): If true, disk is mounted read only in the container.\n\n Returns:\n An object representing a Mount.\n \"\"\"\n\n return {\n 'disk': disk,\n 'path': path,\n 'readOnly': read_only,\n }","function_tokens":["def","build_mount","(","disk",",","path",",","read_only",")",":","return","{","'disk'",":","disk",",","'path'",":","path",",","'readOnly'",":","read_only",",","}"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_pipelines.py#L139-L155"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_pipelines.py","language":"python","identifier":"build_action","parameters":"(name=None,\n image_uri=None,\n commands=None,\n entrypoint=None,\n environment=None,\n pid_namespace=None,\n port_mappings=None,\n mounts=None,\n labels=None,\n always_run=None,\n enable_fuse=None,\n run_in_background=None,\n block_external_network=None)","argument_list":"","return_statement":"return action","docstring":"Build an Action object for a Pipeline request.\n\n Args:\n name (str): An optional name for the container.\n image_uri (str): The URI to pull the container image from.\n commands (List[str]): commands and arguments to run inside the container.\n entrypoint (str): overrides the ENTRYPOINT specified in the container.\n environment (dict[str,str]): The environment to pass into the container.\n pid_namespace (str): The PID namespace to run the action inside.\n port_mappings (dict[int, int]): A map of container to host port mappings for\n this container.\n mounts (List): A list of mounts to make available to the action.\n labels (dict[str]): Labels to associate with the action.\n always_run (bool): Action must run even if pipeline has already failed.\n enable_fuse (bool): Enable access to the FUSE device for this action.\n run_in_background (bool): Allow the action to run in the background.\n block_external_network (bool): Prevents the container from accessing the\n external network.\n\n Returns:\n An object representing an Action resource.","docstring_summary":"Build an Action object for a Pipeline request.","docstring_tokens":["Build","an","Action","object","for","a","Pipeline","request","."],"function":"def build_action(name=None,\n image_uri=None,\n commands=None,\n entrypoint=None,\n environment=None,\n pid_namespace=None,\n port_mappings=None,\n mounts=None,\n labels=None,\n always_run=None,\n enable_fuse=None,\n run_in_background=None,\n block_external_network=None):\n \"\"\"Build an Action object for a Pipeline request.\n\n Args:\n name (str): An optional name for the container.\n image_uri (str): The URI to pull the container image from.\n commands (List[str]): commands and arguments to run inside the container.\n entrypoint (str): overrides the ENTRYPOINT specified in the container.\n environment (dict[str,str]): The environment to pass into the container.\n pid_namespace (str): The PID namespace to run the action inside.\n port_mappings (dict[int, int]): A map of container to host port mappings for\n this container.\n mounts (List): A list of mounts to make available to the action.\n labels (dict[str]): Labels to associate with the action.\n always_run (bool): Action must run even if pipeline has already failed.\n enable_fuse (bool): Enable access to the FUSE device for this action.\n run_in_background (bool): Allow the action to run in the background.\n block_external_network (bool): Prevents the container from accessing the\n external network.\n\n Returns:\n An object representing an Action resource.\n \"\"\"\n\n action = {\n 'imageUri': image_uri,\n 'commands': commands,\n 'entrypoint': entrypoint,\n 'environment': environment,\n 'pidNamespace': pid_namespace,\n 'portMappings': port_mappings,\n 'mounts': mounts,\n 'labels': labels,\n }\n\n if _API_VERSION == google_v2_versions.V2ALPHA1:\n action['name'] = name\n\n # In v2alpha1, the flags are passed as a list of strings\n flags = []\n if always_run:\n flags.append('ALWAYS_RUN')\n if enable_fuse:\n flags.append('ENABLE_FUSE')\n if run_in_background:\n flags.append('RUN_IN_BACKGROUND')\n if block_external_network:\n flags.append('BLOCK_EXTERNAL_NETWORK')\n\n if flags:\n action['flags'] = flags\n\n elif _API_VERSION == google_v2_versions.V2BETA:\n action['containerName'] = name\n\n # In v2beta, the flags are direct members of the action\n action['alwaysRun'] = always_run\n action['enableFuse'] = enable_fuse\n action['runInBackground'] = run_in_background\n action['blockExternalNetwork'] = block_external_network\n\n else:\n assert False, 'Unexpected version: {}'.format(_API_VERSION)\n\n return action","function_tokens":["def","build_action","(","name","=","None",",","image_uri","=","None",",","commands","=","None",",","entrypoint","=","None",",","environment","=","None",",","pid_namespace","=","None",",","port_mappings","=","None",",","mounts","=","None",",","labels","=","None",",","always_run","=","None",",","enable_fuse","=","None",",","run_in_background","=","None",",","block_external_network","=","None",")",":","action","=","{","'imageUri'",":","image_uri",",","'commands'",":","commands",",","'entrypoint'",":","entrypoint",",","'environment'",":","environment",",","'pidNamespace'",":","pid_namespace",",","'portMappings'",":","port_mappings",",","'mounts'",":","mounts",",","'labels'",":","labels",",","}","if","_API_VERSION","==","google_v2_versions",".","V2ALPHA1",":","action","[","'name'","]","=","name","# In v2alpha1, the flags are passed as a list of strings","flags","=","[","]","if","always_run",":","flags",".","append","(","'ALWAYS_RUN'",")","if","enable_fuse",":","flags",".","append","(","'ENABLE_FUSE'",")","if","run_in_background",":","flags",".","append","(","'RUN_IN_BACKGROUND'",")","if","block_external_network",":","flags",".","append","(","'BLOCK_EXTERNAL_NETWORK'",")","if","flags",":","action","[","'flags'","]","=","flags","elif","_API_VERSION","==","google_v2_versions",".","V2BETA",":","action","[","'containerName'","]","=","name","# In v2beta, the flags are direct members of the action","action","[","'alwaysRun'","]","=","always_run","action","[","'enableFuse'","]","=","enable_fuse","action","[","'runInBackground'","]","=","run_in_background","action","[","'blockExternalNetwork'","]","=","block_external_network","else",":","assert","False",",","'Unexpected version: {}'",".","format","(","_API_VERSION",")","return","action"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_pipelines.py#L158-L234"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_pipelines.py","language":"python","identifier":"build_pipeline","parameters":"(actions, resources, environment, timeout)","argument_list":"","return_statement":"return {\n 'actions': actions,\n 'resources': resources,\n 'environment': environment,\n 'timeout': timeout,\n }","docstring":"Build an Pipeline argument for a Pipeline request.\n\n Args:\n actions (List): A list of actions to execute.\n resources (dict): An object indicating pipeline resources.\n environment (dict[str,str]): The environment to pass into the container.\n timeout (str): A duration in seconds with up to nine fractional digits,\n terminated by 's'.\n\n Returns:\n An object representing a Pipelines Resource.","docstring_summary":"Build an Pipeline argument for a Pipeline request.","docstring_tokens":["Build","an","Pipeline","argument","for","a","Pipeline","request","."],"function":"def build_pipeline(actions, resources, environment, timeout):\n \"\"\"Build an Pipeline argument for a Pipeline request.\n\n Args:\n actions (List): A list of actions to execute.\n resources (dict): An object indicating pipeline resources.\n environment (dict[str,str]): The environment to pass into the container.\n timeout (str): A duration in seconds with up to nine fractional digits,\n terminated by 's'.\n\n Returns:\n An object representing a Pipelines Resource.\n \"\"\"\n\n return {\n 'actions': actions,\n 'resources': resources,\n 'environment': environment,\n 'timeout': timeout,\n }","function_tokens":["def","build_pipeline","(","actions",",","resources",",","environment",",","timeout",")",":","return","{","'actions'",":","actions",",","'resources'",":","resources",",","'environment'",":","environment",",","'timeout'",":","timeout",",","}"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_pipelines.py#L237-L256"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/base.py","language":"python","identifier":"JobProvider.prepare_job_metadata","parameters":"(self, script, job_name, user_id)","argument_list":"","return_statement":"","docstring":"Returns a dictionary of metadata fields for the job.\n\n Call this before calling submit_job.\n\n The job metadata is a dictionary of values relevant to the job as a whole,\n such as:\n\n * job-name\n * job-id\n * user-id\n * script\n * dsub-version\n\n The creation of the job metadata is done by the provider, as the rules\n around these items are provider-specific. For example, one job provider\n might have an 8 character restriction on job names, while another may be\n restricted to lower-case characters.\n\n The provider *must* set the job-name, job-id, and user-id. They may be used\n by the dsub infrastructure for display and direct return to callers.\n\n The provider is free to set any other key that it will need in\n submit_job().\n\n The \"job_name\" passed in need not be the same 'job-name' value set in the\n returned job metadata, as provider-specific character restrictions may need\n to be applied.\n\n Args:\n script: path to the job script\n job_name: user-supplied job name, if any\n user_id: user whose jobs to look for","docstring_summary":"Returns a dictionary of metadata fields for the job.","docstring_tokens":["Returns","a","dictionary","of","metadata","fields","for","the","job","."],"function":"def prepare_job_metadata(self, script, job_name, user_id):\n \"\"\"Returns a dictionary of metadata fields for the job.\n\n Call this before calling submit_job.\n\n The job metadata is a dictionary of values relevant to the job as a whole,\n such as:\n\n * job-name\n * job-id\n * user-id\n * script\n * dsub-version\n\n The creation of the job metadata is done by the provider, as the rules\n around these items are provider-specific. For example, one job provider\n might have an 8 character restriction on job names, while another may be\n restricted to lower-case characters.\n\n The provider *must* set the job-name, job-id, and user-id. They may be used\n by the dsub infrastructure for display and direct return to callers.\n\n The provider is free to set any other key that it will need in\n submit_job().\n\n The \"job_name\" passed in need not be the same 'job-name' value set in the\n returned job metadata, as provider-specific character restrictions may need\n to be applied.\n\n Args:\n script: path to the job script\n job_name: user-supplied job name, if any\n user_id: user whose jobs to look for\n \"\"\"\n raise NotImplementedError()","function_tokens":["def","prepare_job_metadata","(","self",",","script",",","job_name",",","user_id",")",":","raise","NotImplementedError","(",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/base.py#L45-L79"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/base.py","language":"python","identifier":"JobProvider.submit_job","parameters":"(self, job_descriptor, skip_if_output_present)","argument_list":"","return_statement":"","docstring":"Submit the job to be executed.\n\n Args:\n job_descriptor (job_model.JobDescriptor): parameters needed to launch all\n job tasks\n skip_if_output_present: (boolean) if true, skip tasks whose output\n is present (see --skip flag for more explanation).\n\n\n Returns:\n A dictionary containing the 'user-id', 'job-id', and 'task-id' list.\n For jobs that are not task array jobs, the task-id list should be empty.\n If all tasks were skipped, then the job-id is dsub_lib.NO_JOB.\n\n\n Raises:\n ValueError: submit_job may validate any of the parameters and raise\n a value error if any parameter (or specific combination of parameters)\n is not supported by the provider.","docstring_summary":"Submit the job to be executed.","docstring_tokens":["Submit","the","job","to","be","executed","."],"function":"def submit_job(self, job_descriptor, skip_if_output_present):\n \"\"\"Submit the job to be executed.\n\n Args:\n job_descriptor (job_model.JobDescriptor): parameters needed to launch all\n job tasks\n skip_if_output_present: (boolean) if true, skip tasks whose output\n is present (see --skip flag for more explanation).\n\n\n Returns:\n A dictionary containing the 'user-id', 'job-id', and 'task-id' list.\n For jobs that are not task array jobs, the task-id list should be empty.\n If all tasks were skipped, then the job-id is dsub_lib.NO_JOB.\n\n\n Raises:\n ValueError: submit_job may validate any of the parameters and raise\n a value error if any parameter (or specific combination of parameters)\n is not supported by the provider.\n \"\"\"\n raise NotImplementedError()","function_tokens":["def","submit_job","(","self",",","job_descriptor",",","skip_if_output_present",")",":","raise","NotImplementedError","(",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/base.py#L82-L103"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/base.py","language":"python","identifier":"JobProvider.delete_jobs","parameters":"(self,\n user_ids,\n job_ids,\n task_ids,\n labels,\n create_time_min=None,\n create_time_max=None)","argument_list":"","return_statement":"","docstring":"Kills the operations associated with the specified job or job.task.\n\n Some providers may provide only a \"cancel\" operation, which terminates the\n task but does not truly \"delete\" it from the \"task list\".\n\n Args:\n user_ids: a set of user ids who \"own\" the job(s) to delete.\n job_ids: a set of job ids to delete.\n task_ids: a set of task ids to delete.\n labels: a set of LabelParam, each must match the job(s) to be cancelled.\n create_time_min: a timezone-aware datetime value for the earliest create\n time of a task, inclusive.\n create_time_max: a timezone-aware datetime value for the most recent\n create time of a task, inclusive.\n\n Returns:\n (list of tasks canceled,\n for each task that couldn't be canceled, the error message).\n\n Only tasks that were running are included in the return value.","docstring_summary":"Kills the operations associated with the specified job or job.task.","docstring_tokens":["Kills","the","operations","associated","with","the","specified","job","or","job",".","task","."],"function":"def delete_jobs(self,\n user_ids,\n job_ids,\n task_ids,\n labels,\n create_time_min=None,\n create_time_max=None):\n \"\"\"Kills the operations associated with the specified job or job.task.\n\n Some providers may provide only a \"cancel\" operation, which terminates the\n task but does not truly \"delete\" it from the \"task list\".\n\n Args:\n user_ids: a set of user ids who \"own\" the job(s) to delete.\n job_ids: a set of job ids to delete.\n task_ids: a set of task ids to delete.\n labels: a set of LabelParam, each must match the job(s) to be cancelled.\n create_time_min: a timezone-aware datetime value for the earliest create\n time of a task, inclusive.\n create_time_max: a timezone-aware datetime value for the most recent\n create time of a task, inclusive.\n\n Returns:\n (list of tasks canceled,\n for each task that couldn't be canceled, the error message).\n\n Only tasks that were running are included in the return value.\n \"\"\"\n raise NotImplementedError()","function_tokens":["def","delete_jobs","(","self",",","user_ids",",","job_ids",",","task_ids",",","labels",",","create_time_min","=","None",",","create_time_max","=","None",")",":","raise","NotImplementedError","(",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/base.py#L106-L134"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/base.py","language":"python","identifier":"JobProvider.lookup_job_tasks","parameters":"(self,\n statuses,\n user_ids=None,\n job_ids=None,\n job_names=None,\n task_ids=None,\n task_attempts=None,\n labels=None,\n create_time_min=None,\n create_time_max=None,\n max_tasks=0,\n page_size=0)","argument_list":"","return_statement":"","docstring":"Return a list of tasks based on the search criteria.\n\n If any of the filters are empty or {'*'}, then no filtering is performed on\n that field. Filtering by both a job id list and job name list is\n unsupported.\n\n Args:\n statuses: {'*'}, or a set of job status strings to return. Valid\n status strings are 'RUNNING', 'SUCCESS', 'FAILURE', or 'CANCELED'.\n user_ids: a set of ids for the user(s) who launched the job.\n job_ids: a set of job ids to return.\n job_names: a set of job names to return.\n task_ids: a set of specific tasks within the specified job(s) to return.\n task_attempts: a set of specific task attempts within the specified\n tasks(s) to return.\n labels: a list of LabelParam, each must match the job(s) returned.\n create_time_min: a timezone-aware datetime value for the earliest create\n time of a task, inclusive.\n create_time_max: a timezone-aware datetime value for the most recent\n create time of a task, inclusive.\n max_tasks: the maximum number of job tasks to return or 0 for no limit.\n page_size: the page size to use for each query to the backend. May be\n ignored by provider implementations.\n\n Returns:\n A list of Task objects.\n\n Raises:\n ValueError: if both a job id list and a job name list are provided","docstring_summary":"Return a list of tasks based on the search criteria.","docstring_tokens":["Return","a","list","of","tasks","based","on","the","search","criteria","."],"function":"def lookup_job_tasks(self,\n statuses,\n user_ids=None,\n job_ids=None,\n job_names=None,\n task_ids=None,\n task_attempts=None,\n labels=None,\n create_time_min=None,\n create_time_max=None,\n max_tasks=0,\n page_size=0):\n \"\"\"Return a list of tasks based on the search criteria.\n\n If any of the filters are empty or {'*'}, then no filtering is performed on\n that field. Filtering by both a job id list and job name list is\n unsupported.\n\n Args:\n statuses: {'*'}, or a set of job status strings to return. Valid\n status strings are 'RUNNING', 'SUCCESS', 'FAILURE', or 'CANCELED'.\n user_ids: a set of ids for the user(s) who launched the job.\n job_ids: a set of job ids to return.\n job_names: a set of job names to return.\n task_ids: a set of specific tasks within the specified job(s) to return.\n task_attempts: a set of specific task attempts within the specified\n tasks(s) to return.\n labels: a list of LabelParam, each must match the job(s) returned.\n create_time_min: a timezone-aware datetime value for the earliest create\n time of a task, inclusive.\n create_time_max: a timezone-aware datetime value for the most recent\n create time of a task, inclusive.\n max_tasks: the maximum number of job tasks to return or 0 for no limit.\n page_size: the page size to use for each query to the backend. May be\n ignored by provider implementations.\n\n Returns:\n A list of Task objects.\n\n Raises:\n ValueError: if both a job id list and a job name list are provided\n \"\"\"\n raise NotImplementedError()","function_tokens":["def","lookup_job_tasks","(","self",",","statuses",",","user_ids","=","None",",","job_ids","=","None",",","job_names","=","None",",","task_ids","=","None",",","task_attempts","=","None",",","labels","=","None",",","create_time_min","=","None",",","create_time_max","=","None",",","max_tasks","=","0",",","page_size","=","0",")",":","raise","NotImplementedError","(",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/base.py#L137-L179"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/base.py","language":"python","identifier":"JobProvider.get_tasks_completion_messages","parameters":"(self, tasks)","argument_list":"","return_statement":"","docstring":"List of the error message of each given task.","docstring_summary":"List of the error message of each given task.","docstring_tokens":["List","of","the","error","message","of","each","given","task","."],"function":"def get_tasks_completion_messages(self, tasks):\n \"\"\"List of the error message of each given task.\"\"\"\n raise NotImplementedError()","function_tokens":["def","get_tasks_completion_messages","(","self",",","tasks",")",":","raise","NotImplementedError","(",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/base.py#L182-L184"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/base.py","language":"python","identifier":"Task.raw_task_data","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Return a provider-specific representation of task data.\n\n Returns:\n dictionary of task data from the provider.","docstring_summary":"Return a provider-specific representation of task data.","docstring_tokens":["Return","a","provider","-","specific","representation","of","task","data","."],"function":"def raw_task_data(self):\n \"\"\"Return a provider-specific representation of task data.\n\n Returns:\n dictionary of task data from the provider.\n \"\"\"\n raise NotImplementedError()","function_tokens":["def","raw_task_data","(","self",")",":","raise","NotImplementedError","(",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/base.py#L191-L197"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/base.py","language":"python","identifier":"Task.get_field","parameters":"(self, field, default=None)","argument_list":"","return_statement":"","docstring":"Return a metadata-field for the task.\n\n Not all fields need to be supported by all providers.\n Field identifiers include:\n\n 'job-name', 'job-id', 'task-id', 'task-attempt', 'user-id', 'task-status',\n 'error-message', 'create-time', 'start-time', 'end-time', 'inputs',\n 'outputs', 'events'\n\n The following are required by dstat:\n - status: The task status ('RUNNING', 'CANCELED', 'FAILED', 'SUCCESS')\n - status-message: A short message that is displayed in the default\n dstat output. This should be as concise and useful as\n possible (\"Pending\", \"Running\", \"Error: invalid...\")\n - status-detail: A longer status message that is displayed in full dstat\n output. Ideally, this is the last few lines of a log which\n gives the user enough information that they do not need\n to go to the log files.\n\n dstat's short output shows status-message.\n dstat's full output shows status and status-detail\n\n Args:\n field: one of the choices listed above.\n default: the value to return if no value if found.","docstring_summary":"Return a metadata-field for the task.","docstring_tokens":["Return","a","metadata","-","field","for","the","task","."],"function":"def get_field(self, field, default=None):\n \"\"\"Return a metadata-field for the task.\n\n Not all fields need to be supported by all providers.\n Field identifiers include:\n\n 'job-name', 'job-id', 'task-id', 'task-attempt', 'user-id', 'task-status',\n 'error-message', 'create-time', 'start-time', 'end-time', 'inputs',\n 'outputs', 'events'\n\n The following are required by dstat:\n - status: The task status ('RUNNING', 'CANCELED', 'FAILED', 'SUCCESS')\n - status-message: A short message that is displayed in the default\n dstat output. This should be as concise and useful as\n possible (\"Pending\", \"Running\", \"Error: invalid...\")\n - status-detail: A longer status message that is displayed in full dstat\n output. Ideally, this is the last few lines of a log which\n gives the user enough information that they do not need\n to go to the log files.\n\n dstat's short output shows status-message.\n dstat's full output shows status and status-detail\n\n Args:\n field: one of the choices listed above.\n default: the value to return if no value if found.\n \"\"\"\n raise NotImplementedError()","function_tokens":["def","get_field","(","self",",","field",",","default","=","None",")",":","raise","NotImplementedError","(",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/base.py#L200-L227"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/stub.py","language":"python","identifier":"StubJobProvider.set_operations","parameters":"(self, ops)","argument_list":"","return_statement":"","docstring":"Set the state of the fictional world.\n\n Args:\n ops: a list of dict, each representing an operation.\n\n Operations can have the following fields:\n - status: tuple (string,date)\n - user: string\n - job-id: string\n - job-name: string\n - task-id: string\n - task-attempt: integer\n - labels: list\n - status-message: string\n - error-messages : list of string","docstring_summary":"Set the state of the fictional world.","docstring_tokens":["Set","the","state","of","the","fictional","world","."],"function":"def set_operations(self, ops):\n \"\"\"Set the state of the fictional world.\n\n Args:\n ops: a list of dict, each representing an operation.\n\n Operations can have the following fields:\n - status: tuple (string,date)\n - user: string\n - job-id: string\n - job-name: string\n - task-id: string\n - task-attempt: integer\n - labels: list\n - status-message: string\n - error-messages : list of string\n \"\"\"\n self._operations = [StubTask(o) for o in ops]","function_tokens":["def","set_operations","(","self",",","ops",")",":","self",".","_operations","=","[","StubTask","(","o",")","for","o","in","ops","]"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/stub.py#L45-L62"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/stub.py","language":"python","identifier":"StubJobProvider.lookup_job_tasks","parameters":"(self,\n statuses,\n user_ids=None,\n job_ids=None,\n job_names=None,\n task_ids=None,\n task_attempts=None,\n labels=None,\n create_time_min=None,\n create_time_max=None,\n max_tasks=0)","argument_list":"","return_statement":"return operations","docstring":"Return a list of operations. See base.py for additional detail.","docstring_summary":"Return a list of operations. See base.py for additional detail.","docstring_tokens":["Return","a","list","of","operations",".","See","base",".","py","for","additional","detail","."],"function":"def lookup_job_tasks(self,\n statuses,\n user_ids=None,\n job_ids=None,\n job_names=None,\n task_ids=None,\n task_attempts=None,\n labels=None,\n create_time_min=None,\n create_time_max=None,\n max_tasks=0):\n \"\"\"Return a list of operations. See base.py for additional detail.\"\"\"\n statuses = None if statuses == {'*'} else statuses\n user_ids = None if user_ids == {'*'} else user_ids\n job_ids = None if job_ids == {'*'} else job_ids\n job_names = None if job_names == {'*'} else job_names\n task_ids = None if task_ids == {'*'} else task_ids\n task_attempts = None if task_attempts == {'*'} else task_attempts\n\n if labels or create_time_min or create_time_max:\n raise NotImplementedError(\n 'Lookup by labels and create_time not yet supported by stub.')\n\n # pylint: disable=g-complex-comprehension\n operations = [\n x for x in self._operations\n if ((not statuses or x.get_field('status', (None, None))[0] in statuses\n ) and (not user_ids or x.get_field('user', None) in user_ids) and\n (not job_ids or x.get_field('job-id', None) in job_ids) and\n (not job_names or x.get_field('job-name', None) in job_names) and\n (not task_ids or x.get_field('task-id', None) in task_ids) and\n (not task_attempts or\n x.get_field('task-attempt', None) in task_attempts))\n ]\n # pylint: enable=g-complex-comprehension\n\n if max_tasks > 0:\n operations = operations[:max_tasks]\n return operations","function_tokens":["def","lookup_job_tasks","(","self",",","statuses",",","user_ids","=","None",",","job_ids","=","None",",","job_names","=","None",",","task_ids","=","None",",","task_attempts","=","None",",","labels","=","None",",","create_time_min","=","None",",","create_time_max","=","None",",","max_tasks","=","0",")",":","statuses","=","None","if","statuses","==","{","'*'","}","else","statuses","user_ids","=","None","if","user_ids","==","{","'*'","}","else","user_ids","job_ids","=","None","if","job_ids","==","{","'*'","}","else","job_ids","job_names","=","None","if","job_names","==","{","'*'","}","else","job_names","task_ids","=","None","if","task_ids","==","{","'*'","}","else","task_ids","task_attempts","=","None","if","task_attempts","==","{","'*'","}","else","task_attempts","if","labels","or","create_time_min","or","create_time_max",":","raise","NotImplementedError","(","'Lookup by labels and create_time not yet supported by stub.'",")","# pylint: disable=g-complex-comprehension","operations","=","[","x","for","x","in","self",".","_operations","if","(","(","not","statuses","or","x",".","get_field","(","'status'",",","(","None",",","None",")",")","[","0","]","in","statuses",")","and","(","not","user_ids","or","x",".","get_field","(","'user'",",","None",")","in","user_ids",")","and","(","not","job_ids","or","x",".","get_field","(","'job-id'",",","None",")","in","job_ids",")","and","(","not","job_names","or","x",".","get_field","(","'job-name'",",","None",")","in","job_names",")","and","(","not","task_ids","or","x",".","get_field","(","'task-id'",",","None",")","in","task_ids",")","and","(","not","task_attempts","or","x",".","get_field","(","'task-attempt'",",","None",")","in","task_attempts",")",")","]","# pylint: enable=g-complex-comprehension","if","max_tasks",">","0",":","operations","=","operations","[",":","max_tasks","]","return","operations"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/stub.py#L75-L113"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_base.py","language":"python","identifier":"get_zones","parameters":"(input_list)","argument_list":"","return_statement":"return output_list","docstring":"Returns a list of zones based on any wildcard input.\n\n This function is intended to provide an easy method for producing a list\n of desired zones for a pipeline to run in.\n\n The Pipelines API default zone list is \"any zone\". The problem with\n \"any zone\" is that it can lead to incurring Cloud Storage egress charges\n if the GCE zone selected is in a different region than the GCS bucket.\n See https:\/\/cloud.google.com\/storage\/pricing#network-egress.\n\n A user with a multi-region US bucket would want to pipelines to run in\n a \"us-*\" zone.\n A user with a regional bucket in US would want to restrict pipelines to\n run in a zone in that region.\n\n Rarely does the specific zone matter for a pipeline.\n\n This function allows for a simple short-hand such as:\n [ \"us-*\" ]\n [ \"us-central1-*\" ]\n These examples will expand out to the full list of US and us-central1 zones\n respectively.\n\n Args:\n input_list: list of zone names\/patterns\n\n Returns:\n A list of zones, with any wildcard zone specifications expanded.","docstring_summary":"Returns a list of zones based on any wildcard input.","docstring_tokens":["Returns","a","list","of","zones","based","on","any","wildcard","input","."],"function":"def get_zones(input_list):\n \"\"\"Returns a list of zones based on any wildcard input.\n\n This function is intended to provide an easy method for producing a list\n of desired zones for a pipeline to run in.\n\n The Pipelines API default zone list is \"any zone\". The problem with\n \"any zone\" is that it can lead to incurring Cloud Storage egress charges\n if the GCE zone selected is in a different region than the GCS bucket.\n See https:\/\/cloud.google.com\/storage\/pricing#network-egress.\n\n A user with a multi-region US bucket would want to pipelines to run in\n a \"us-*\" zone.\n A user with a regional bucket in US would want to restrict pipelines to\n run in a zone in that region.\n\n Rarely does the specific zone matter for a pipeline.\n\n This function allows for a simple short-hand such as:\n [ \"us-*\" ]\n [ \"us-central1-*\" ]\n These examples will expand out to the full list of US and us-central1 zones\n respectively.\n\n Args:\n input_list: list of zone names\/patterns\n\n Returns:\n A list of zones, with any wildcard zone specifications expanded.\n \"\"\"\n if not input_list:\n return []\n\n output_list = []\n\n for zone in input_list:\n if zone.endswith('*'):\n prefix = zone[:-1]\n output_list.extend([z for z in _ZONES if z.startswith(prefix)])\n else:\n output_list.append(zone)\n\n return output_list","function_tokens":["def","get_zones","(","input_list",")",":","if","not","input_list",":","return","[","]","output_list","=","[","]","for","zone","in","input_list",":","if","zone",".","endswith","(","'*'",")",":","prefix","=","zone","[",":","-","1","]","output_list",".","extend","(","[","z","for","z","in","_ZONES","if","z",".","startswith","(","prefix",")","]",")","else",":","output_list",".","append","(","zone",")","return","output_list"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_base.py#L152-L194"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_base.py","language":"python","identifier":"build_pipeline_labels","parameters":"(job_metadata, task_metadata, task_id_pattern=None)","argument_list":"","return_statement":"return labels","docstring":"Build a set() of standard job and task labels.\n\n Args:\n job_metadata: Job metadata, such as job-id, job-name, and user-id.\n task_metadata: Task metadata, such as the task-id.\n task_id_pattern: A pattern for the task-id value, such as \"task-%d\"; the\n original google label values could not be strictly numeric, so \"task-\"\n was prepended.\n\n Returns:\n A set of standard dsub Label() objects to attach to a pipeline.","docstring_summary":"Build a set() of standard job and task labels.","docstring_tokens":["Build","a","set","()","of","standard","job","and","task","labels","."],"function":"def build_pipeline_labels(job_metadata, task_metadata, task_id_pattern=None):\n \"\"\"Build a set() of standard job and task labels.\n\n Args:\n job_metadata: Job metadata, such as job-id, job-name, and user-id.\n task_metadata: Task metadata, such as the task-id.\n task_id_pattern: A pattern for the task-id value, such as \"task-%d\"; the\n original google label values could not be strictly numeric, so \"task-\"\n was prepended.\n\n Returns:\n A set of standard dsub Label() objects to attach to a pipeline.\n \"\"\"\n labels = {\n Label(name, job_metadata[name])\n for name in ['job-name', 'job-id', 'user-id', 'dsub-version']\n }\n\n task_id = task_metadata.get('task-id')\n if task_id is not None: # Check for None (as 0 is conceivably valid)\n if task_id_pattern:\n task_id = task_id_pattern % task_id\n labels.add(Label('task-id', str(task_id)))\n\n task_attempt = task_metadata.get('task-attempt')\n if task_attempt is not None:\n labels.add(Label('task-attempt', str(task_attempt)))\n\n return labels","function_tokens":["def","build_pipeline_labels","(","job_metadata",",","task_metadata",",","task_id_pattern","=","None",")",":","labels","=","{","Label","(","name",",","job_metadata","[","name","]",")","for","name","in","[","'job-name'",",","'job-id'",",","'user-id'",",","'dsub-version'","]","}","task_id","=","task_metadata",".","get","(","'task-id'",")","if","task_id","is","not","None",":","# Check for None (as 0 is conceivably valid)","if","task_id_pattern",":","task_id","=","task_id_pattern","%","task_id","labels",".","add","(","Label","(","'task-id'",",","str","(","task_id",")",")",")","task_attempt","=","task_metadata",".","get","(","'task-attempt'",")","if","task_attempt","is","not","None",":","labels",".","add","(","Label","(","'task-attempt'",",","str","(","task_attempt",")",")",")","return","labels"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_base.py#L208-L236"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_base.py","language":"python","identifier":"prepare_query_label_value","parameters":"(labels)","argument_list":"","return_statement":"return [job_model.convert_to_label_chars(label) for label in labels]","docstring":"Converts the label strings to contain label-appropriate characters.\n\n Args:\n labels: A set of strings to be converted.\n\n Returns:\n A list of converted strings.","docstring_summary":"Converts the label strings to contain label-appropriate characters.","docstring_tokens":["Converts","the","label","strings","to","contain","label","-","appropriate","characters","."],"function":"def prepare_query_label_value(labels):\n \"\"\"Converts the label strings to contain label-appropriate characters.\n\n Args:\n labels: A set of strings to be converted.\n\n Returns:\n A list of converted strings.\n \"\"\"\n if not labels:\n return None\n return [job_model.convert_to_label_chars(label) for label in labels]","function_tokens":["def","prepare_query_label_value","(","labels",")",":","if","not","labels",":","return","None","return","[","job_model",".","convert_to_label_chars","(","label",")","for","label","in","labels","]"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_base.py#L239-L250"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_base.py","language":"python","identifier":"parse_rfc3339_utc_string","parameters":"(rfc3339_utc_string)","argument_list":"","return_statement":"","docstring":"Converts a datestamp from RFC3339 UTC to a datetime.\n\n Args:\n rfc3339_utc_string: a datetime string in RFC3339 UTC \"Zulu\" format\n\n Returns:\n A datetime.","docstring_summary":"Converts a datestamp from RFC3339 UTC to a datetime.","docstring_tokens":["Converts","a","datestamp","from","RFC3339","UTC","to","a","datetime","."],"function":"def parse_rfc3339_utc_string(rfc3339_utc_string):\n \"\"\"Converts a datestamp from RFC3339 UTC to a datetime.\n\n Args:\n rfc3339_utc_string: a datetime string in RFC3339 UTC \"Zulu\" format\n\n Returns:\n A datetime.\n \"\"\"\n\n # The timestamp from the Google Operations are all in RFC3339 format, but\n # they are sometimes formatted to millisconds, microseconds, sometimes\n # nanoseconds, and sometimes only seconds:\n # * 2016-11-14T23:05:56Z\n # * 2016-11-14T23:05:56.010Z\n # * 2016-11-14T23:05:56.010429Z\n # * 2016-11-14T23:05:56.010429380Z\n m = re.match(r'(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):(\\d{2}):(\\d{2}).?(\\d*)Z',\n rfc3339_utc_string)\n\n # It would be unexpected to get a different date format back from Google.\n # If we raise an exception here, we can break people completely.\n # Instead, let's just return None and people can report that some dates\n # are not showing up.\n # We might reconsider this approach in the future; it was originally\n # established when dates were only used for display.\n if not m:\n return None\n\n groups = m.groups()\n if len(groups[6]) not in (0, 3, 6, 9):\n return None\n\n # Create a UTC datestamp from parsed components\n # 1- Turn components 0-5 from strings to integers\n # 2- If the last component does not exist, set it to 0.\n # If it does exist, make sure to interpret it as milliseconds.\n g = [int(val) for val in groups[:6]]\n\n fraction = groups[6]\n if not fraction:\n micros = 0\n elif len(fraction) == 3:\n micros = int(fraction) * 1000\n elif len(fraction) == 6:\n micros = int(fraction)\n elif len(fraction) == 9:\n # When nanoseconds are provided, we round\n micros = int(round(int(fraction) \/\/ 1000))\n else:\n assert False, 'Fraction length not 0, 6, or 9: {}'.format(len(fraction))\n\n try:\n return datetime.datetime(\n g[0], g[1], g[2], g[3], g[4], g[5], micros, tzinfo=pytz.utc)\n except ValueError as e:\n assert False, 'Could not parse RFC3339 datestring: {} exception: {}'.format(\n rfc3339_utc_string, e)","function_tokens":["def","parse_rfc3339_utc_string","(","rfc3339_utc_string",")",":","# The timestamp from the Google Operations are all in RFC3339 format, but","# they are sometimes formatted to millisconds, microseconds, sometimes","# nanoseconds, and sometimes only seconds:","# * 2016-11-14T23:05:56Z","# * 2016-11-14T23:05:56.010Z","# * 2016-11-14T23:05:56.010429Z","# * 2016-11-14T23:05:56.010429380Z","m","=","re",".","match","(","r'(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):(\\d{2}):(\\d{2}).?(\\d*)Z'",",","rfc3339_utc_string",")","# It would be unexpected to get a different date format back from Google.","# If we raise an exception here, we can break people completely.","# Instead, let's just return None and people can report that some dates","# are not showing up.","# We might reconsider this approach in the future; it was originally","# established when dates were only used for display.","if","not","m",":","return","None","groups","=","m",".","groups","(",")","if","len","(","groups","[","6","]",")","not","in","(","0",",","3",",","6",",","9",")",":","return","None","# Create a UTC datestamp from parsed components","# 1- Turn components 0-5 from strings to integers","# 2- If the last component does not exist, set it to 0.","# If it does exist, make sure to interpret it as milliseconds.","g","=","[","int","(","val",")","for","val","in","groups","[",":","6","]","]","fraction","=","groups","[","6","]","if","not","fraction",":","micros","=","0","elif","len","(","fraction",")","==","3",":","micros","=","int","(","fraction",")","*","1000","elif","len","(","fraction",")","==","6",":","micros","=","int","(","fraction",")","elif","len","(","fraction",")","==","9",":","# When nanoseconds are provided, we round","micros","=","int","(","round","(","int","(","fraction",")","\/\/","1000",")",")","else",":","assert","False",",","'Fraction length not 0, 6, or 9: {}'",".","format","(","len","(","fraction",")",")","try",":","return","datetime",".","datetime","(","g","[","0","]",",","g","[","1","]",",","g","[","2","]",",","g","[","3","]",",","g","[","4","]",",","g","[","5","]",",","micros",",","tzinfo","=","pytz",".","utc",")","except","ValueError","as","e",":","assert","False",",","'Could not parse RFC3339 datestring: {} exception: {}'",".","format","(","rfc3339_utc_string",",","e",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_base.py#L253-L310"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_base.py","language":"python","identifier":"get_operation_full_job_id","parameters":"(op)","argument_list":"","return_statement":"","docstring":"Returns the job-id or job-id.task-id for the operation.","docstring_summary":"Returns the job-id or job-id.task-id for the operation.","docstring_tokens":["Returns","the","job","-","id","or","job","-","id",".","task","-","id","for","the","operation","."],"function":"def get_operation_full_job_id(op):\n \"\"\"Returns the job-id or job-id.task-id for the operation.\"\"\"\n job_id = op.get_field('job-id')\n task_id = op.get_field('task-id')\n if task_id:\n return '%s.%s' % (job_id, task_id)\n else:\n return job_id","function_tokens":["def","get_operation_full_job_id","(","op",")",":","job_id","=","op",".","get_field","(","'job-id'",")","task_id","=","op",".","get_field","(","'task-id'",")","if","task_id",":","return","'%s.%s'","%","(","job_id",",","task_id",")","else",":","return","job_id"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_base.py#L313-L320"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_base.py","language":"python","identifier":"_cancel_batch","parameters":"(batch_fn, cancel_fn, ops)","argument_list":"","return_statement":"return canceled_ops, error_messages","docstring":"Cancel a batch of operations.\n\n Args:\n batch_fn: API-specific batch function.\n cancel_fn: API-specific cancel function.\n ops: A list of operations to cancel.\n\n Returns:\n A list of operations canceled and a list of error messages.","docstring_summary":"Cancel a batch of operations.","docstring_tokens":["Cancel","a","batch","of","operations","."],"function":"def _cancel_batch(batch_fn, cancel_fn, ops):\n \"\"\"Cancel a batch of operations.\n\n Args:\n batch_fn: API-specific batch function.\n cancel_fn: API-specific cancel function.\n ops: A list of operations to cancel.\n\n Returns:\n A list of operations canceled and a list of error messages.\n \"\"\"\n\n # We define an inline callback which will populate a list of\n # successfully canceled operations as well as a list of operations\n # which were not successfully canceled.\n\n canceled = []\n failed = []\n\n def handle_cancel_response(request_id, response, exception):\n \"\"\"Callback for the cancel response.\"\"\"\n del response # unused\n\n if exception:\n # We don't generally expect any failures here, except possibly trying\n # to cancel an operation that is already canceled or finished.\n #\n # If the operation is already finished, provide a clearer message than\n # \"error 400: Bad Request\".\n\n msg = 'error %s: %s' % (exception.resp.status, exception.resp.reason)\n if exception.resp.status == FAILED_PRECONDITION_CODE:\n detail = json.loads(exception.content)\n status = detail.get('error', {}).get('status')\n if status == FAILED_PRECONDITION_STATUS:\n msg = 'Not running'\n\n failed.append({'name': request_id, 'msg': msg})\n else:\n canceled.append({'name': request_id})\n\n return\n\n # Set up the batch object\n batch = batch_fn(callback=handle_cancel_response)\n\n # The callback gets a \"request_id\" which is the operation name.\n # Build a dict such that after the callback, we can lookup the operation\n # objects by name\n ops_by_name = {}\n for op in ops:\n op_name = op.get_field('internal-id')\n ops_by_name[op_name] = op\n batch.add(cancel_fn(name=op_name, body={}), request_id=op_name)\n\n # Cancel the operations\n batch.execute()\n\n # Iterate through the canceled and failed lists to build our return lists\n canceled_ops = [ops_by_name[op['name']] for op in canceled]\n error_messages = []\n for fail in failed:\n op = ops_by_name[fail['name']]\n error_messages.append(\"Error canceling '%s': %s\" %\n (get_operation_full_job_id(op), fail['msg']))\n\n return canceled_ops, error_messages","function_tokens":["def","_cancel_batch","(","batch_fn",",","cancel_fn",",","ops",")",":","# We define an inline callback which will populate a list of","# successfully canceled operations as well as a list of operations","# which were not successfully canceled.","canceled","=","[","]","failed","=","[","]","def","handle_cancel_response","(","request_id",",","response",",","exception",")",":","\"\"\"Callback for the cancel response.\"\"\"","del","response","# unused","if","exception",":","# We don't generally expect any failures here, except possibly trying","# to cancel an operation that is already canceled or finished.","#","# If the operation is already finished, provide a clearer message than","# \"error 400: Bad Request\".","msg","=","'error %s: %s'","%","(","exception",".","resp",".","status",",","exception",".","resp",".","reason",")","if","exception",".","resp",".","status","==","FAILED_PRECONDITION_CODE",":","detail","=","json",".","loads","(","exception",".","content",")","status","=","detail",".","get","(","'error'",",","{","}",")",".","get","(","'status'",")","if","status","==","FAILED_PRECONDITION_STATUS",":","msg","=","'Not running'","failed",".","append","(","{","'name'",":","request_id",",","'msg'",":","msg","}",")","else",":","canceled",".","append","(","{","'name'",":","request_id","}",")","return","# Set up the batch object","batch","=","batch_fn","(","callback","=","handle_cancel_response",")","# The callback gets a \"request_id\" which is the operation name.","# Build a dict such that after the callback, we can lookup the operation","# objects by name","ops_by_name","=","{","}","for","op","in","ops",":","op_name","=","op",".","get_field","(","'internal-id'",")","ops_by_name","[","op_name","]","=","op","batch",".","add","(","cancel_fn","(","name","=","op_name",",","body","=","{","}",")",",","request_id","=","op_name",")","# Cancel the operations","batch",".","execute","(",")","# Iterate through the canceled and failed lists to build our return lists","canceled_ops","=","[","ops_by_name","[","op","[","'name'","]","]","for","op","in","canceled","]","error_messages","=","[","]","for","fail","in","failed",":","op","=","ops_by_name","[","fail","[","'name'","]","]","error_messages",".","append","(","\"Error canceling '%s': %s\"","%","(","get_operation_full_job_id","(","op",")",",","fail","[","'msg'","]",")",")","return","canceled_ops",",","error_messages"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_base.py#L323-L389"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_base.py","language":"python","identifier":"cancel","parameters":"(batch_fn, cancel_fn, ops)","argument_list":"","return_statement":"return canceled_ops, error_messages","docstring":"Cancel operations.\n\n Args:\n batch_fn: API-specific batch function.\n cancel_fn: API-specific cancel function.\n ops: A list of operations to cancel.\n\n Returns:\n A list of operations canceled and a list of error messages.","docstring_summary":"Cancel operations.","docstring_tokens":["Cancel","operations","."],"function":"def cancel(batch_fn, cancel_fn, ops):\n \"\"\"Cancel operations.\n\n Args:\n batch_fn: API-specific batch function.\n cancel_fn: API-specific cancel function.\n ops: A list of operations to cancel.\n\n Returns:\n A list of operations canceled and a list of error messages.\n \"\"\"\n\n # Canceling many operations one-by-one can be slow.\n # The Pipelines API doesn't directly support a list of operations to cancel,\n # but the requests can be performed in batch.\n\n canceled_ops = []\n error_messages = []\n\n max_batch = 256\n total_ops = len(ops)\n for first_op in range(0, total_ops, max_batch):\n batch_canceled, batch_messages = _cancel_batch(\n batch_fn, cancel_fn, ops[first_op:first_op + max_batch])\n canceled_ops.extend(batch_canceled)\n error_messages.extend(batch_messages)\n\n return canceled_ops, error_messages","function_tokens":["def","cancel","(","batch_fn",",","cancel_fn",",","ops",")",":","# Canceling many operations one-by-one can be slow.","# The Pipelines API doesn't directly support a list of operations to cancel,","# but the requests can be performed in batch.","canceled_ops","=","[","]","error_messages","=","[","]","max_batch","=","256","total_ops","=","len","(","ops",")","for","first_op","in","range","(","0",",","total_ops",",","max_batch",")",":","batch_canceled",",","batch_messages","=","_cancel_batch","(","batch_fn",",","cancel_fn",",","ops","[","first_op",":","first_op","+","max_batch","]",")","canceled_ops",".","extend","(","batch_canceled",")","error_messages",".","extend","(","batch_messages",")","return","canceled_ops",",","error_messages"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_base.py#L392-L419"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_base.py","language":"python","identifier":"setup_service","parameters":"(api_name, api_version, credentials=None)","argument_list":"","return_statement":"return googleapiclient.discovery.build(\n api_name, api_version, cache_discovery=False, credentials=credentials)","docstring":"Configures genomics API client.\n\n Args:\n api_name: Name of the Google API (for example: \"genomics\")\n api_version: Version of the API (for example: \"v2alpha1\")\n credentials: Credentials to be used for the gcloud API calls.\n\n Returns:\n A configured Google Genomics API client with appropriate credentials.","docstring_summary":"Configures genomics API client.","docstring_tokens":["Configures","genomics","API","client","."],"function":"def setup_service(api_name, api_version, credentials=None):\n \"\"\"Configures genomics API client.\n\n Args:\n api_name: Name of the Google API (for example: \"genomics\")\n api_version: Version of the API (for example: \"v2alpha1\")\n credentials: Credentials to be used for the gcloud API calls.\n\n Returns:\n A configured Google Genomics API client with appropriate credentials.\n \"\"\"\n # dsub is not a server application, so it is ok to filter this warning.\n warnings.filterwarnings(\n 'ignore', 'Your application has authenticated using end user credentials')\n if not credentials:\n credentials, _ = google.auth.default()\n # Set cache_discovery to False because we use google-auth\n # See https:\/\/github.com\/googleapis\/google-api-python-client\/issues\/299\n return googleapiclient.discovery.build(\n api_name, api_version, cache_discovery=False, credentials=credentials)","function_tokens":["def","setup_service","(","api_name",",","api_version",",","credentials","=","None",")",":","# dsub is not a server application, so it is ok to filter this warning.","warnings",".","filterwarnings","(","'ignore'",",","'Your application has authenticated using end user credentials'",")","if","not","credentials",":","credentials",",","_","=","google",".","auth",".","default","(",")","# Set cache_discovery to False because we use google-auth","# See https:\/\/github.com\/googleapis\/google-api-python-client\/issues\/299","return","googleapiclient",".","discovery",".","build","(","api_name",",","api_version",",","cache_discovery","=","False",",","credentials","=","credentials",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_base.py#L436-L455"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_base.py","language":"python","identifier":"Api.execute","parameters":"(self, api)","argument_list":"","return_statement":"return api.execute()","docstring":"Executes operation.\n\n Args:\n api: The base API object\n\n Returns:\n A response body object","docstring_summary":"Executes operation.","docstring_tokens":["Executes","operation","."],"function":"def execute(self, api):\n \"\"\"Executes operation.\n\n Args:\n api: The base API object\n\n Returns:\n A response body object\n \"\"\"\n return api.execute()","function_tokens":["def","execute","(","self",",","api",")",":","return","api",".","execute","(",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_base.py#L481-L490"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/local.py","language":"python","identifier":"_format_task_name","parameters":"(job_id, task_id, task_attempt)","argument_list":"","return_statement":"return 'dsub-{}'.format(_convert_suffix_to_docker_chars(docker_name))","docstring":"Create a task name from a job-id, task-id, and task-attempt.\n\n Task names are used internally by dsub as well as by the docker task runner.\n The name is formatted as \".[.task-attempt]\". Task names\n follow formatting conventions allowing them to be safely used as a docker\n name.\n\n Args:\n job_id: (str) the job ID.\n task_id: (str) the task ID.\n task_attempt: (int) the task attempt.\n\n Returns:\n a task name string.","docstring_summary":"Create a task name from a job-id, task-id, and task-attempt.","docstring_tokens":["Create","a","task","name","from","a","job","-","id","task","-","id","and","task","-","attempt","."],"function":"def _format_task_name(job_id, task_id, task_attempt):\n \"\"\"Create a task name from a job-id, task-id, and task-attempt.\n\n Task names are used internally by dsub as well as by the docker task runner.\n The name is formatted as \".[.task-attempt]\". Task names\n follow formatting conventions allowing them to be safely used as a docker\n name.\n\n Args:\n job_id: (str) the job ID.\n task_id: (str) the task ID.\n task_attempt: (int) the task attempt.\n\n Returns:\n a task name string.\n \"\"\"\n docker_name = '%s.%s' % (job_id, 'task' if task_id is None else task_id)\n\n if task_attempt is not None:\n docker_name += '.' + str(task_attempt)\n\n # Docker container names must match: [a-zA-Z0-9][a-zA-Z0-9_.-]\n # So 1) prefix it with \"dsub-\" and 2) change all invalid characters to \"-\".\n return 'dsub-{}'.format(_convert_suffix_to_docker_chars(docker_name))","function_tokens":["def","_format_task_name","(","job_id",",","task_id",",","task_attempt",")",":","docker_name","=","'%s.%s'","%","(","job_id",",","'task'","if","task_id","is","None","else","task_id",")","if","task_attempt","is","not","None",":","docker_name","+=","'.'","+","str","(","task_attempt",")","# Docker container names must match: [a-zA-Z0-9][a-zA-Z0-9_.-]","# So 1) prefix it with \"dsub-\" and 2) change all invalid characters to \"-\".","return","'dsub-{}'",".","format","(","_convert_suffix_to_docker_chars","(","docker_name",")",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/local.py#L117-L140"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/local.py","language":"python","identifier":"_convert_suffix_to_docker_chars","parameters":"(suffix)","argument_list":"","return_statement":"return ''.join(label_char_transform(c) for c in suffix)","docstring":"Rewrite string so that all characters are valid in a docker name suffix.","docstring_summary":"Rewrite string so that all characters are valid in a docker name suffix.","docstring_tokens":["Rewrite","string","so","that","all","characters","are","valid","in","a","docker","name","suffix","."],"function":"def _convert_suffix_to_docker_chars(suffix):\n \"\"\"Rewrite string so that all characters are valid in a docker name suffix.\"\"\"\n # Docker container names must match: [a-zA-Z0-9][a-zA-Z0-9_.-]\n accepted_characters = string.ascii_letters + string.digits + '_.-'\n\n def label_char_transform(char):\n if char in accepted_characters:\n return char\n return '-'\n\n return ''.join(label_char_transform(c) for c in suffix)","function_tokens":["def","_convert_suffix_to_docker_chars","(","suffix",")",":","# Docker container names must match: [a-zA-Z0-9][a-zA-Z0-9_.-]","accepted_characters","=","string",".","ascii_letters","+","string",".","digits","+","'_.-'","def","label_char_transform","(","char",")",":","if","char","in","accepted_characters",":","return","char","return","'-'","return","''",".","join","(","label_char_transform","(","c",")","for","c","in","suffix",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/local.py#L143-L153"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/local.py","language":"python","identifier":"_task_sort_function","parameters":"(task)","argument_list":"","return_statement":"return (task.get_field('create-time'), int(task.get_field('task-id', 0)),\n int(task.get_field('task-attempt', 0)))","docstring":"Return a tuple for sorting 'most recent first'.","docstring_summary":"Return a tuple for sorting 'most recent first'.","docstring_tokens":["Return","a","tuple","for","sorting","most","recent","first","."],"function":"def _task_sort_function(task):\n \"\"\"Return a tuple for sorting 'most recent first'.\"\"\"\n return (task.get_field('create-time'), int(task.get_field('task-id', 0)),\n int(task.get_field('task-attempt', 0)))","function_tokens":["def","_task_sort_function","(","task",")",":","return","(","task",".","get_field","(","'create-time'",")",",","int","(","task",".","get_field","(","'task-id'",",","0",")",")",",","int","(","task",".","get_field","(","'task-attempt'",",","0",")",")",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/local.py#L156-L159"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/local.py","language":"python","identifier":"_sort_tasks","parameters":"(tasks)","argument_list":"","return_statement":"","docstring":"Sort tasks by 'most recent first'.","docstring_summary":"Sort tasks by 'most recent first'.","docstring_tokens":["Sort","tasks","by","most","recent","first","."],"function":"def _sort_tasks(tasks):\n \"\"\"Sort tasks by 'most recent first'.\"\"\"\n\n # The local provider can launch tasks quickly enough that they end up with\n # the same timestamp.\n #\n # lookup_job_tasks needs to return tasks sorted: \"most recent first\".\n # Sort the list of tasks by:\n # - create time (descending)\n # - task-id (descending, if any)\n\n tasks.sort(key=_task_sort_function, reverse=True)","function_tokens":["def","_sort_tasks","(","tasks",")",":","# The local provider can launch tasks quickly enough that they end up with","# the same timestamp.","#","# lookup_job_tasks needs to return tasks sorted: \"most recent first\".","# Sort the list of tasks by:","# - create time (descending)","# - task-id (descending, if any)","tasks",".","sort","(","key","=","_task_sort_function",",","reverse","=","True",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/local.py#L162-L173"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/local.py","language":"python","identifier":"LocalJobProvider.__init__","parameters":"(self, resources)","argument_list":"","return_statement":"","docstring":"Run jobs on your local machine.\n\n Args:\n resources: module providing access to files packaged with dsub\n (See dsub\/libs\/resources.py)","docstring_summary":"Run jobs on your local machine.","docstring_tokens":["Run","jobs","on","your","local","machine","."],"function":"def __init__(self, resources):\n \"\"\"Run jobs on your local machine.\n\n Args:\n resources: module providing access to files packaged with dsub\n (See dsub\/libs\/resources.py)\n \"\"\"\n self._operations = []\n self._resources = resources","function_tokens":["def","__init__","(","self",",","resources",")",":","self",".","_operations","=","[","]","self",".","_resources","=","resources"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/local.py#L179-L187"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/local.py","language":"python","identifier":"LocalJobProvider._datetime_in_range","parameters":"(self, dt, dt_min=None, dt_max=None)","argument_list":"","return_statement":"return dt_min <= dt <= dt_max","docstring":"Determine if the provided time is within the range, inclusive.","docstring_summary":"Determine if the provided time is within the range, inclusive.","docstring_tokens":["Determine","if","the","provided","time","is","within","the","range","inclusive","."],"function":"def _datetime_in_range(self, dt, dt_min=None, dt_max=None):\n \"\"\"Determine if the provided time is within the range, inclusive.\"\"\"\n # The pipelines API stores operation create-time with second granularity.\n # We mimic this behavior in the local provider by truncating to seconds.\n dt = dt.replace(microsecond=0)\n if dt_min:\n dt_min = dt_min.replace(microsecond=0)\n else:\n dt_min = dsub_util.replace_timezone(datetime.datetime.min, pytz.utc)\n if dt_max:\n dt_max = dt_max.replace(microsecond=0)\n else:\n dt_max = dsub_util.replace_timezone(datetime.datetime.max, pytz.utc)\n\n return dt_min <= dt <= dt_max","function_tokens":["def","_datetime_in_range","(","self",",","dt",",","dt_min","=","None",",","dt_max","=","None",")",":","# The pipelines API stores operation create-time with second granularity.","# We mimic this behavior in the local provider by truncating to seconds.","dt","=","dt",".","replace","(","microsecond","=","0",")","if","dt_min",":","dt_min","=","dt_min",".","replace","(","microsecond","=","0",")","else",":","dt_min","=","dsub_util",".","replace_timezone","(","datetime",".","datetime",".","min",",","pytz",".","utc",")","if","dt_max",":","dt_max","=","dt_max",".","replace","(","microsecond","=","0",")","else",":","dt_max","=","dsub_util",".","replace_timezone","(","datetime",".","datetime",".","max",",","pytz",".","utc",")","return","dt_min","<=","dt","<=","dt_max"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/local.py#L555-L569"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/local.py","language":"python","identifier":"LocalJobProvider._get_task_from_task_dir","parameters":"(self, job_id, user_id, task_id, task_attempt)","argument_list":"","return_statement":"return LocalTask(\n task_status=status,\n events=events,\n log_detail=log_detail,\n job_descriptor=job_descriptor,\n end_time=end_time,\n last_update=last_update,\n pid=pid,\n script=script)","docstring":"Return a Task object with this task's info.","docstring_summary":"Return a Task object with this task's info.","docstring_tokens":["Return","a","Task","object","with","this","task","s","info","."],"function":"def _get_task_from_task_dir(self, job_id, user_id, task_id, task_attempt):\n \"\"\"Return a Task object with this task's info.\"\"\"\n\n # We need to be very careful about how we read and interpret the contents\n # of the task directory. The directory could be changing because a new\n # task is being created. The directory could be changing because a task\n # is ending.\n #\n # If the meta.yaml does not exist, the task does not yet exist.\n # If the meta.yaml exists, it means the task is scheduled. It does not mean\n # it is yet running.\n # If the task.pid file exists, it means that the runner.sh was started.\n\n task_dir = self._task_directory(job_id, task_id, task_attempt)\n\n job_descriptor = self._read_task_metadata(task_dir)\n if not job_descriptor:\n return None\n\n # If we read up an old task, the user-id will not be in the job_descriptor.\n if not job_descriptor.job_metadata.get('user-id'):\n job_descriptor.job_metadata['user-id'] = user_id\n\n # Get the pid of the runner\n pid = -1\n try:\n with open(os.path.join(task_dir, 'task.pid'), 'r') as f:\n pid = int(f.readline().strip())\n except (IOError, OSError):\n pass\n\n # Get the script contents\n script = None\n script_name = job_descriptor.job_metadata.get('script-name')\n if script_name:\n script = self._read_script(task_dir, script_name)\n\n # Read the files written by the runner.sh.\n # For new tasks, these may not have been written yet.\n end_time = self._get_end_time_from_task_dir(task_dir)\n last_update = self._get_last_update_time_from_task_dir(task_dir)\n events = self._get_events_from_task_dir(task_dir)\n status = self._get_status_from_task_dir(task_dir)\n log_detail = self._get_log_detail_from_task_dir(task_dir)\n\n # If the status file is not yet written, then mark the task as pending\n if not status:\n status = 'RUNNING'\n log_detail = ['Pending']\n\n return LocalTask(\n task_status=status,\n events=events,\n log_detail=log_detail,\n job_descriptor=job_descriptor,\n end_time=end_time,\n last_update=last_update,\n pid=pid,\n script=script)","function_tokens":["def","_get_task_from_task_dir","(","self",",","job_id",",","user_id",",","task_id",",","task_attempt",")",":","# We need to be very careful about how we read and interpret the contents","# of the task directory. The directory could be changing because a new","# task is being created. The directory could be changing because a task","# is ending.","#","# If the meta.yaml does not exist, the task does not yet exist.","# If the meta.yaml exists, it means the task is scheduled. It does not mean","# it is yet running.","# If the task.pid file exists, it means that the runner.sh was started.","task_dir","=","self",".","_task_directory","(","job_id",",","task_id",",","task_attempt",")","job_descriptor","=","self",".","_read_task_metadata","(","task_dir",")","if","not","job_descriptor",":","return","None","# If we read up an old task, the user-id will not be in the job_descriptor.","if","not","job_descriptor",".","job_metadata",".","get","(","'user-id'",")",":","job_descriptor",".","job_metadata","[","'user-id'","]","=","user_id","# Get the pid of the runner","pid","=","-","1","try",":","with","open","(","os",".","path",".","join","(","task_dir",",","'task.pid'",")",",","'r'",")","as","f",":","pid","=","int","(","f",".","readline","(",")",".","strip","(",")",")","except","(","IOError",",","OSError",")",":","pass","# Get the script contents","script","=","None","script_name","=","job_descriptor",".","job_metadata",".","get","(","'script-name'",")","if","script_name",":","script","=","self",".","_read_script","(","task_dir",",","script_name",")","# Read the files written by the runner.sh.","# For new tasks, these may not have been written yet.","end_time","=","self",".","_get_end_time_from_task_dir","(","task_dir",")","last_update","=","self",".","_get_last_update_time_from_task_dir","(","task_dir",")","events","=","self",".","_get_events_from_task_dir","(","task_dir",")","status","=","self",".","_get_status_from_task_dir","(","task_dir",")","log_detail","=","self",".","_get_log_detail_from_task_dir","(","task_dir",")","# If the status file is not yet written, then mark the task as pending","if","not","status",":","status","=","'RUNNING'","log_detail","=","[","'Pending'","]","return","LocalTask","(","task_status","=","status",",","events","=","events",",","log_detail","=","log_detail",",","job_descriptor","=","job_descriptor",",","end_time","=","end_time",",","last_update","=","last_update",",","pid","=","pid",",","script","=","script",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/local.py#L630-L688"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/local.py","language":"python","identifier":"LocalJobProvider._delocalize_logging_command","parameters":"(self, logging_path, user_project)","argument_list":"","return_statement":"return body","docstring":"Returns a command to delocalize logs.\n\n Args:\n logging_path: location of log files.\n user_project: name of the project to be billed for the request.\n\n Returns:\n eg. 'gs:\/\/bucket\/path\/myfile' or 'gs:\/\/bucket\/script-foobar-12'","docstring_summary":"Returns a command to delocalize logs.","docstring_tokens":["Returns","a","command","to","delocalize","logs","."],"function":"def _delocalize_logging_command(self, logging_path, user_project):\n \"\"\"Returns a command to delocalize logs.\n\n Args:\n logging_path: location of log files.\n user_project: name of the project to be billed for the request.\n\n Returns:\n eg. 'gs:\/\/bucket\/path\/myfile' or 'gs:\/\/bucket\/script-foobar-12'\n \"\"\"\n\n # Get the logging prefix (everything up to \".log\")\n logging_prefix = os.path.splitext(logging_path.uri)[0]\n\n # Set the provider-specific mkdir and file copy commands\n if logging_path.file_provider == job_model.P_LOCAL:\n mkdir_cmd = 'mkdir -p \"%s\"\\n' % os.path.dirname(logging_prefix)\n cp_cmd = 'cp'\n elif logging_path.file_provider == job_model.P_GCS:\n mkdir_cmd = ''\n if user_project:\n cp_cmd = 'gsutil -u {} -mq cp'.format(user_project)\n else:\n cp_cmd = 'gsutil -mq cp'\n else:\n assert False\n\n # Construct the copy command\n copy_logs_cmd = textwrap.dedent(\"\"\"\\\n local cp_cmd=\"{cp_cmd}\"\n local prefix=\"{prefix}\"\n \"\"\").format(\n cp_cmd=cp_cmd, prefix=logging_prefix)\n\n # Build up the command\n body = textwrap.dedent(\"\"\"\\\n {mkdir_cmd}\n {copy_logs_cmd}\n \"\"\").format(\n mkdir_cmd=mkdir_cmd, copy_logs_cmd=copy_logs_cmd)\n\n return body","function_tokens":["def","_delocalize_logging_command","(","self",",","logging_path",",","user_project",")",":","# Get the logging prefix (everything up to \".log\")","logging_prefix","=","os",".","path",".","splitext","(","logging_path",".","uri",")","[","0","]","# Set the provider-specific mkdir and file copy commands","if","logging_path",".","file_provider","==","job_model",".","P_LOCAL",":","mkdir_cmd","=","'mkdir -p \"%s\"\\n'","%","os",".","path",".","dirname","(","logging_prefix",")","cp_cmd","=","'cp'","elif","logging_path",".","file_provider","==","job_model",".","P_GCS",":","mkdir_cmd","=","''","if","user_project",":","cp_cmd","=","'gsutil -u {} -mq cp'",".","format","(","user_project",")","else",":","cp_cmd","=","'gsutil -mq cp'","else",":","assert","False","# Construct the copy command","copy_logs_cmd","=","textwrap",".","dedent","(","\"\"\"\\\n local cp_cmd=\"{cp_cmd}\"\n local prefix=\"{prefix}\"\n \"\"\"",")",".","format","(","cp_cmd","=","cp_cmd",",","prefix","=","logging_prefix",")","# Build up the command","body","=","textwrap",".","dedent","(","\"\"\"\\\n {mkdir_cmd}\n {copy_logs_cmd}\n \"\"\"",")",".","format","(","mkdir_cmd","=","mkdir_cmd",",","copy_logs_cmd","=","copy_logs_cmd",")","return","body"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/local.py#L693-L734"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/local.py","language":"python","identifier":"LocalJobProvider._task_directory","parameters":"(self, job_id, task_id, task_attempt)","argument_list":"","return_statement":"return self._provider_root() + '\/' + job_id + '\/' + dir_name","docstring":"The local dir for staging files for that particular task.","docstring_summary":"The local dir for staging files for that particular task.","docstring_tokens":["The","local","dir","for","staging","files","for","that","particular","task","."],"function":"def _task_directory(self, job_id, task_id, task_attempt):\n \"\"\"The local dir for staging files for that particular task.\"\"\"\n dir_name = 'task' if task_id is None else str(task_id)\n if task_attempt:\n dir_name = '%s.%s' % (dir_name, task_attempt)\n return self._provider_root() + '\/' + job_id + '\/' + dir_name","function_tokens":["def","_task_directory","(","self",",","job_id",",","task_id",",","task_attempt",")",":","dir_name","=","'task'","if","task_id","is","None","else","str","(","task_id",")","if","task_attempt",":","dir_name","=","'%s.%s'","%","(","dir_name",",","task_attempt",")","return","self",".","_provider_root","(",")","+","'\/'","+","job_id","+","'\/'","+","dir_name"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/local.py#L736-L741"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/local.py","language":"python","identifier":"LocalJobProvider._split_task_directory","parameters":"(self, task_dir)","argument_list":"","return_statement":"","docstring":"Return task_id and task_attempt from dir_name.","docstring_summary":"Return task_id and task_attempt from dir_name.","docstring_tokens":["Return","task_id","and","task_attempt","from","dir_name","."],"function":"def _split_task_directory(self, task_dir):\n \"\"\"Return task_id and task_attempt from dir_name.\"\"\"\n if '.' in task_dir:\n return task_dir.split('.')\n else:\n return task_dir, None","function_tokens":["def","_split_task_directory","(","self",",","task_dir",")",":","if","'.'","in","task_dir",":","return","task_dir",".","split","(","'.'",")","else",":","return","task_dir",",","None"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/local.py#L743-L748"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/local.py","language":"python","identifier":"LocalJobProvider._make_environment","parameters":"(self, inputs, outputs, mounts)","argument_list":"","return_statement":"return env","docstring":"Return a dictionary of environment variables for the container.","docstring_summary":"Return a dictionary of environment variables for the container.","docstring_tokens":["Return","a","dictionary","of","environment","variables","for","the","container","."],"function":"def _make_environment(self, inputs, outputs, mounts):\n \"\"\"Return a dictionary of environment variables for the container.\"\"\"\n env = {}\n env.update(providers_util.get_file_environment_variables(inputs))\n env.update(providers_util.get_file_environment_variables(outputs))\n env.update(providers_util.get_file_environment_variables(mounts))\n return env","function_tokens":["def","_make_environment","(","self",",","inputs",",","outputs",",","mounts",")",":","env","=","{","}","env",".","update","(","providers_util",".","get_file_environment_variables","(","inputs",")",")","env",".","update","(","providers_util",".","get_file_environment_variables","(","outputs",")",")","env",".","update","(","providers_util",".","get_file_environment_variables","(","mounts",")",")","return","env"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/local.py#L750-L756"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/local.py","language":"python","identifier":"LocalJobProvider._localize_inputs_recursive_command","parameters":"(self, task_dir, inputs)","argument_list":"","return_statement":"return '\\n'.join(provider_commands)","docstring":"Returns a command that will stage recursive inputs.","docstring_summary":"Returns a command that will stage recursive inputs.","docstring_tokens":["Returns","a","command","that","will","stage","recursive","inputs","."],"function":"def _localize_inputs_recursive_command(self, task_dir, inputs):\n \"\"\"Returns a command that will stage recursive inputs.\"\"\"\n data_dir = os.path.join(task_dir, _DATA_SUBDIR)\n provider_commands = [\n providers_util.build_recursive_localize_command(data_dir, inputs,\n file_provider)\n for file_provider in _SUPPORTED_INPUT_PROVIDERS\n ]\n return '\\n'.join(provider_commands)","function_tokens":["def","_localize_inputs_recursive_command","(","self",",","task_dir",",","inputs",")",":","data_dir","=","os",".","path",".","join","(","task_dir",",","_DATA_SUBDIR",")","provider_commands","=","[","providers_util",".","build_recursive_localize_command","(","data_dir",",","inputs",",","file_provider",")","for","file_provider","in","_SUPPORTED_INPUT_PROVIDERS","]","return","'\\n'",".","join","(","provider_commands",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/local.py#L758-L766"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/local.py","language":"python","identifier":"LocalJobProvider._get_input_target_path","parameters":"(self, local_file_path)","argument_list":"","return_statement":"","docstring":"Returns a directory or file path to be the target for \"gsutil cp\".\n\n If the filename contains a wildcard, then the target path must\n be a directory in order to ensure consistency whether the source pattern\n contains one or multiple files.\n\n\n Args:\n local_file_path: A full path terminating in a file or a file wildcard.\n\n Returns:\n The path to use as the \"gsutil cp\" target.","docstring_summary":"Returns a directory or file path to be the target for \"gsutil cp\".","docstring_tokens":["Returns","a","directory","or","file","path","to","be","the","target","for","gsutil","cp","."],"function":"def _get_input_target_path(self, local_file_path):\n \"\"\"Returns a directory or file path to be the target for \"gsutil cp\".\n\n If the filename contains a wildcard, then the target path must\n be a directory in order to ensure consistency whether the source pattern\n contains one or multiple files.\n\n\n Args:\n local_file_path: A full path terminating in a file or a file wildcard.\n\n Returns:\n The path to use as the \"gsutil cp\" target.\n \"\"\"\n\n path, filename = os.path.split(local_file_path)\n if '*' in filename:\n return path + '\/'\n else:\n return local_file_path","function_tokens":["def","_get_input_target_path","(","self",",","local_file_path",")",":","path",",","filename","=","os",".","path",".","split","(","local_file_path",")","if","'*'","in","filename",":","return","path","+","'\/'","else",":","return","local_file_path"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/local.py#L768-L787"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/local.py","language":"python","identifier":"LocalJobProvider._localize_inputs_command","parameters":"(self, task_dir, inputs, user_project)","argument_list":"","return_statement":"return '\\n'.join(commands)","docstring":"Returns a command that will stage inputs.","docstring_summary":"Returns a command that will stage inputs.","docstring_tokens":["Returns","a","command","that","will","stage","inputs","."],"function":"def _localize_inputs_command(self, task_dir, inputs, user_project):\n \"\"\"Returns a command that will stage inputs.\"\"\"\n commands = []\n for i in inputs:\n if i.recursive or not i.value:\n continue\n\n source_file_path = i.uri\n local_file_path = task_dir + '\/' + _DATA_SUBDIR + '\/' + i.docker_path\n dest_file_path = self._get_input_target_path(local_file_path)\n\n commands.append('mkdir -p \"%s\"' % os.path.dirname(local_file_path))\n\n if i.file_provider in [job_model.P_LOCAL, job_model.P_GCS]:\n # The semantics that we expect here are implemented consistently in\n # \"gsutil cp\", and are a bit different than \"cp\" when it comes to\n # wildcard handling, so use it for both local and GCS:\n #\n # - `cp path\/* dest\/` will error if \"path\" has subdirectories.\n # - `cp \"path\/*\" \"dest\/\"` will fail (it expects wildcard expansion\n # to come from shell).\n if user_project:\n command = 'gsutil -u %s -mq cp \"%s\" \"%s\"' % (\n user_project, source_file_path, dest_file_path)\n else:\n command = 'gsutil -mq cp \"%s\" \"%s\"' % (source_file_path,\n dest_file_path)\n commands.append(command)\n\n return '\\n'.join(commands)","function_tokens":["def","_localize_inputs_command","(","self",",","task_dir",",","inputs",",","user_project",")",":","commands","=","[","]","for","i","in","inputs",":","if","i",".","recursive","or","not","i",".","value",":","continue","source_file_path","=","i",".","uri","local_file_path","=","task_dir","+","'\/'","+","_DATA_SUBDIR","+","'\/'","+","i",".","docker_path","dest_file_path","=","self",".","_get_input_target_path","(","local_file_path",")","commands",".","append","(","'mkdir -p \"%s\"'","%","os",".","path",".","dirname","(","local_file_path",")",")","if","i",".","file_provider","in","[","job_model",".","P_LOCAL",",","job_model",".","P_GCS","]",":","# The semantics that we expect here are implemented consistently in","# \"gsutil cp\", and are a bit different than \"cp\" when it comes to","# wildcard handling, so use it for both local and GCS:","#","# - `cp path\/* dest\/` will error if \"path\" has subdirectories.","# - `cp \"path\/*\" \"dest\/\"` will fail (it expects wildcard expansion","# to come from shell).","if","user_project",":","command","=","'gsutil -u %s -mq cp \"%s\" \"%s\"'","%","(","user_project",",","source_file_path",",","dest_file_path",")","else",":","command","=","'gsutil -mq cp \"%s\" \"%s\"'","%","(","source_file_path",",","dest_file_path",")","commands",".","append","(","command",")","return","'\\n'",".","join","(","commands",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/local.py#L789-L818"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/local.py","language":"python","identifier":"LocalJobProvider._delocalize_outputs_commands","parameters":"(self, task_dir, outputs, user_project)","argument_list":"","return_statement":"return '\\n'.join(commands)","docstring":"Copy outputs from local disk to GCS.","docstring_summary":"Copy outputs from local disk to GCS.","docstring_tokens":["Copy","outputs","from","local","disk","to","GCS","."],"function":"def _delocalize_outputs_commands(self, task_dir, outputs, user_project):\n \"\"\"Copy outputs from local disk to GCS.\"\"\"\n commands = []\n for o in outputs:\n if o.recursive or not o.value:\n continue\n\n # The destination path is o.uri.path, which is the target directory\n # (rather than o.uri, which includes the filename or wildcard).\n dest_path = o.uri.path\n local_path = task_dir + '\/' + _DATA_SUBDIR + '\/' + o.docker_path\n\n if o.file_provider == job_model.P_LOCAL:\n commands.append('mkdir -p \"%s\"' % dest_path)\n\n # Use gsutil even for local files (explained in _localize_inputs_command).\n if o.file_provider in [job_model.P_LOCAL, job_model.P_GCS]:\n if user_project:\n command = 'gsutil -u %s -mq cp \"%s\" \"%s\"' % (user_project, local_path,\n dest_path)\n else:\n command = 'gsutil -mq cp \"%s\" \"%s\"' % (local_path, dest_path)\n commands.append(command)\n\n return '\\n'.join(commands)","function_tokens":["def","_delocalize_outputs_commands","(","self",",","task_dir",",","outputs",",","user_project",")",":","commands","=","[","]","for","o","in","outputs",":","if","o",".","recursive","or","not","o",".","value",":","continue","# The destination path is o.uri.path, which is the target directory","# (rather than o.uri, which includes the filename or wildcard).","dest_path","=","o",".","uri",".","path","local_path","=","task_dir","+","'\/'","+","_DATA_SUBDIR","+","'\/'","+","o",".","docker_path","if","o",".","file_provider","==","job_model",".","P_LOCAL",":","commands",".","append","(","'mkdir -p \"%s\"'","%","dest_path",")","# Use gsutil even for local files (explained in _localize_inputs_command).","if","o",".","file_provider","in","[","job_model",".","P_LOCAL",",","job_model",".","P_GCS","]",":","if","user_project",":","command","=","'gsutil -u %s -mq cp \"%s\" \"%s\"'","%","(","user_project",",","local_path",",","dest_path",")","else",":","command","=","'gsutil -mq cp \"%s\" \"%s\"'","%","(","local_path",",","dest_path",")","commands",".","append","(","command",")","return","'\\n'",".","join","(","commands",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/local.py#L846-L870"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/local.py","language":"python","identifier":"LocalTask.raw_task_data","parameters":"(self)","argument_list":"","return_statement":"return self._raw._asdict()","docstring":"Return a provider-specific representation of task data.\n\n Returns:\n string of task data from the provider.","docstring_summary":"Return a provider-specific representation of task data.","docstring_tokens":["Return","a","provider","-","specific","representation","of","task","data","."],"function":"def raw_task_data(self):\n \"\"\"Return a provider-specific representation of task data.\n\n Returns:\n string of task data from the provider.\n \"\"\"\n return self._raw._asdict()","function_tokens":["def","raw_task_data","(","self",")",":","return","self",".","_raw",".","_asdict","(",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/local.py#L915-L921"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/local.py","language":"python","identifier":"LocalTask._last_lines","parameters":"(value, count)","argument_list":"","return_statement":"return '\\n'.join(value[-count:])","docstring":"Return the last line(s) as a single (newline delimited) string.","docstring_summary":"Return the last line(s) as a single (newline delimited) string.","docstring_tokens":["Return","the","last","line","(","s",")","as","a","single","(","newline","delimited",")","string","."],"function":"def _last_lines(value, count):\n \"\"\"Return the last line(s) as a single (newline delimited) string.\"\"\"\n if not value:\n return ''\n\n return '\\n'.join(value[-count:])","function_tokens":["def","_last_lines","(","value",",","count",")",":","if","not","value",":","return","''","return","'\\n'",".","join","(","value","[","-","count",":","]",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/local.py#L1009-L1014"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2.py","language":"python","identifier":"GoogleV2JobProvider._get_pipeline_regions","parameters":"(self, regions, zones)","argument_list":"","return_statement":"return regions or []","docstring":"Returns the list of regions to use for a pipeline request.\n\n For v2alpha1, just return the regions value that was set, even if it is\n empty.\n\n Args:\n regions (str): A space separated list of regions to use for the pipeline.\n zones (str): A space separated list of zones to use for the pipeline.","docstring_summary":"Returns the list of regions to use for a pipeline request.","docstring_tokens":["Returns","the","list","of","regions","to","use","for","a","pipeline","request","."],"function":"def _get_pipeline_regions(self, regions, zones):\n \"\"\"Returns the list of regions to use for a pipeline request.\n\n For v2alpha1, just return the regions value that was set, even if it is\n empty.\n\n Args:\n regions (str): A space separated list of regions to use for the pipeline.\n zones (str): A space separated list of zones to use for the pipeline.\n \"\"\"\n return regions or []","function_tokens":["def","_get_pipeline_regions","(","self",",","regions",",","zones",")",":","return","regions","or","[","]"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2.py#L35-L45"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/provider_base.py","language":"python","identifier":"get_provider","parameters":"(args, resources, credentials_fn=None)","argument_list":"","return_statement":"","docstring":"Returns a provider for job submission requests.","docstring_summary":"Returns a provider for job submission requests.","docstring_tokens":["Returns","a","provider","for","job","submission","requests","."],"function":"def get_provider(args, resources, credentials_fn=None):\n \"\"\"Returns a provider for job submission requests.\"\"\"\n\n # provider_base has a standard way for dsub, dstat, and ddel to\n # get credentials for each provider, but we allow for overriding\n # that by supplying a custom credentials function.\n if not credentials_fn:\n credentials_fn = credentials_from_args\n\n provider = getattr(args, 'provider', 'google-v2')\n\n if provider == 'google-cls-v2':\n return google_cls_v2.GoogleCLSV2JobProvider(\n getattr(args, 'dry_run', False),\n args.project,\n args.location,\n credentials=credentials_fn(args))\n elif provider == 'google-v2':\n return google_v2.GoogleV2JobProvider(\n getattr(args, 'dry_run', False),\n args.project,\n credentials=credentials_fn(args))\n elif provider == 'local':\n return local.LocalJobProvider(resources)\n elif provider == 'test-fails':\n return test_fails.FailsJobProvider()\n else:\n raise ValueError('Unknown provider: ' + provider)","function_tokens":["def","get_provider","(","args",",","resources",",","credentials_fn","=","None",")",":","# provider_base has a standard way for dsub, dstat, and ddel to","# get credentials for each provider, but we allow for overriding","# that by supplying a custom credentials function.","if","not","credentials_fn",":","credentials_fn","=","credentials_from_args","provider","=","getattr","(","args",",","'provider'",",","'google-v2'",")","if","provider","==","'google-cls-v2'",":","return","google_cls_v2",".","GoogleCLSV2JobProvider","(","getattr","(","args",",","'dry_run'",",","False",")",",","args",".","project",",","args",".","location",",","credentials","=","credentials_fn","(","args",")",")","elif","provider","==","'google-v2'",":","return","google_v2",".","GoogleV2JobProvider","(","getattr","(","args",",","'dry_run'",",","False",")",",","args",".","project",",","credentials","=","credentials_fn","(","args",")",")","elif","provider","==","'local'",":","return","local",".","LocalJobProvider","(","resources",")","elif","provider","==","'test-fails'",":","return","test_fails",".","FailsJobProvider","(",")","else",":","raise","ValueError","(","'Unknown provider: '","+","provider",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/provider_base.py#L45-L72"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/provider_base.py","language":"python","identifier":"get_provider_name","parameters":"(provider)","argument_list":"","return_statement":"return PROVIDER_NAME_MAP[provider.__class__]","docstring":"Returns the name of a given provider.","docstring_summary":"Returns the name of a given provider.","docstring_tokens":["Returns","the","name","of","a","given","provider","."],"function":"def get_provider_name(provider):\n \"\"\"Returns the name of a given provider.\"\"\"\n return PROVIDER_NAME_MAP[provider.__class__]","function_tokens":["def","get_provider_name","(","provider",")",":","return","PROVIDER_NAME_MAP","[","provider",".","__class__","]"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/provider_base.py#L75-L77"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/provider_base.py","language":"python","identifier":"create_parser","parameters":"(prog)","argument_list":"","return_statement":"return parser","docstring":"Create an argument parser, adding in the list of providers.","docstring_summary":"Create an argument parser, adding in the list of providers.","docstring_tokens":["Create","an","argument","parser","adding","in","the","list","of","providers","."],"function":"def create_parser(prog):\n \"\"\"Create an argument parser, adding in the list of providers.\"\"\"\n parser = argparse.ArgumentParser(\n prog=prog, formatter_class=argparse.RawDescriptionHelpFormatter)\n\n parser.add_argument(\n '--provider',\n default='google-v2',\n choices=['local', 'google-v2', 'google-cls-v2', 'test-fails'],\n help=\"\"\"Job service provider. Valid values are \"google-v2\" (Google's\n Pipeline API v2alpha1), \"google-cls-v2\" (Google's Pipelines API v2beta)\n and \"local\" (local Docker execution).\n \"test-*\" providers are for testing purposes only.\n (default: google-v2)\"\"\",\n metavar='PROVIDER')\n\n return parser","function_tokens":["def","create_parser","(","prog",")",":","parser","=","argparse",".","ArgumentParser","(","prog","=","prog",",","formatter_class","=","argparse",".","RawDescriptionHelpFormatter",")","parser",".","add_argument","(","'--provider'",",","default","=","'google-v2'",",","choices","=","[","'local'",",","'google-v2'",",","'google-cls-v2'",",","'test-fails'","]",",","help","=","\"\"\"Job service provider. Valid values are \"google-v2\" (Google's\n Pipeline API v2alpha1), \"google-cls-v2\" (Google's Pipelines API v2beta)\n and \"local\" (local Docker execution).\n \"test-*\" providers are for testing purposes only.\n (default: google-v2)\"\"\"",",","metavar","=","'PROVIDER'",")","return","parser"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/provider_base.py#L80-L96"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/provider_base.py","language":"python","identifier":"parse_args","parameters":"(parser, provider_required_args, argv)","argument_list":"","return_statement":"return args","docstring":"Add provider required arguments epilog message, parse, and validate.","docstring_summary":"Add provider required arguments epilog message, parse, and validate.","docstring_tokens":["Add","provider","required","arguments","epilog","message","parse","and","validate","."],"function":"def parse_args(parser, provider_required_args, argv):\n \"\"\"Add provider required arguments epilog message, parse, and validate.\"\"\"\n\n # Add the provider required arguments epilog message\n epilog = 'Provider-required arguments:\\n'\n for provider in provider_required_args:\n epilog += ' %s: %s\\n' % (provider, provider_required_args[provider])\n parser.epilog = epilog\n\n # Parse arguments\n args = parser.parse_args(argv)\n\n # For the selected provider, check the required arguments\n for arg in provider_required_args[args.provider]:\n if not vars(args)[arg]:\n parser.error('argument --%s is required' % arg)\n\n return args","function_tokens":["def","parse_args","(","parser",",","provider_required_args",",","argv",")",":","# Add the provider required arguments epilog message","epilog","=","'Provider-required arguments:\\n'","for","provider","in","provider_required_args",":","epilog","+=","' %s: %s\\n'","%","(","provider",",","provider_required_args","[","provider","]",")","parser",".","epilog","=","epilog","# Parse arguments","args","=","parser",".","parse_args","(","argv",")","# For the selected provider, check the required arguments","for","arg","in","provider_required_args","[","args",".","provider","]",":","if","not","vars","(","args",")","[","arg","]",":","parser",".","error","(","'argument --%s is required'","%","arg",")","return","args"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/provider_base.py#L99-L116"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/provider_base.py","language":"python","identifier":"get_dstat_provider_args","parameters":"(provider, project, location)","argument_list":"","return_statement":"return ' '.join(args)","docstring":"A string with the arguments to point dstat to the same provider+project.","docstring_summary":"A string with the arguments to point dstat to the same provider+project.","docstring_tokens":["A","string","with","the","arguments","to","point","dstat","to","the","same","provider","+","project","."],"function":"def get_dstat_provider_args(provider, project, location):\n \"\"\"A string with the arguments to point dstat to the same provider+project.\"\"\"\n provider_name = get_provider_name(provider)\n\n args = []\n if provider_name == 'google-cls-v2':\n args.append('--project %s --location %s' % (project, location))\n elif provider_name == 'google-v2':\n args.append('--project %s' % project)\n elif provider_name == 'local':\n pass\n elif provider_name == 'test-fails':\n pass\n else:\n # New providers should add their dstat required arguments here.\n assert False, 'Provider %s needs get_dstat_provider_args support' % provider\n\n args.insert(0, '--provider %s' % provider_name)\n return ' '.join(args)","function_tokens":["def","get_dstat_provider_args","(","provider",",","project",",","location",")",":","provider_name","=","get_provider_name","(","provider",")","args","=","[","]","if","provider_name","==","'google-cls-v2'",":","args",".","append","(","'--project %s --location %s'","%","(","project",",","location",")",")","elif","provider_name","==","'google-v2'",":","args",".","append","(","'--project %s'","%","project",")","elif","provider_name","==","'local'",":","pass","elif","provider_name","==","'test-fails'",":","pass","else",":","# New providers should add their dstat required arguments here.","assert","False",",","'Provider %s needs get_dstat_provider_args support'","%","provider","args",".","insert","(","0",",","'--provider %s'","%","provider_name",")","return","' '",".","join","(","args",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/provider_base.py#L119-L137"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/provider_base.py","language":"python","identifier":"get_ddel_provider_args","parameters":"(provider_type, project, location)","argument_list":"","return_statement":"return get_dstat_provider_args(provider_type, project, location)","docstring":"A string with the arguments to point ddel to the same provider+project.","docstring_summary":"A string with the arguments to point ddel to the same provider+project.","docstring_tokens":["A","string","with","the","arguments","to","point","ddel","to","the","same","provider","+","project","."],"function":"def get_ddel_provider_args(provider_type, project, location):\n \"\"\"A string with the arguments to point ddel to the same provider+project.\"\"\"\n # Change this if the two ever diverge.\n return get_dstat_provider_args(provider_type, project, location)","function_tokens":["def","get_ddel_provider_args","(","provider_type",",","project",",","location",")",":","# Change this if the two ever diverge.","return","get_dstat_provider_args","(","provider_type",",","project",",","location",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/provider_base.py#L140-L143"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/provider_base.py","language":"python","identifier":"check_for_unsupported_flag","parameters":"(args)","argument_list":"","return_statement":"","docstring":"Raise an error if the provider doesn't support a provided flag.","docstring_summary":"Raise an error if the provider doesn't support a provided flag.","docstring_tokens":["Raise","an","error","if","the","provider","doesn","t","support","a","provided","flag","."],"function":"def check_for_unsupported_flag(args):\n \"\"\"Raise an error if the provider doesn't support a provided flag.\"\"\"\n if args.label and args.provider not in [\n 'test-fails', 'local', 'google-v2', 'google-cls-v2'\n ]:\n raise ValueError(\n '--label is not supported by the \"%s\" provider.' % args.provider)","function_tokens":["def","check_for_unsupported_flag","(","args",")",":","if","args",".","label","and","args",".","provider","not","in","[","'test-fails'",",","'local'",",","'google-v2'",",","'google-cls-v2'","]",":","raise","ValueError","(","'--label is not supported by the \"%s\" provider.'","%","args",".","provider",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/provider_base.py#L151-L157"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/provider_base.py","language":"python","identifier":"_format_task_uri","parameters":"(fmt, job_metadata, task_metadata)","argument_list":"","return_statement":"return fmt.format(**values)","docstring":"Returns a URI with placeholders replaced by metadata values.","docstring_summary":"Returns a URI with placeholders replaced by metadata values.","docstring_tokens":["Returns","a","URI","with","placeholders","replaced","by","metadata","values","."],"function":"def _format_task_uri(fmt, job_metadata, task_metadata):\n \"\"\"Returns a URI with placeholders replaced by metadata values.\"\"\"\n\n values = {\n 'job-id': None,\n 'task-id': 'task',\n 'job-name': None,\n 'user-id': None,\n 'task-attempt': None\n }\n for key in values:\n values[key] = task_metadata.get(key) or job_metadata.get(key) or values[key]\n\n return fmt.format(**values)","function_tokens":["def","_format_task_uri","(","fmt",",","job_metadata",",","task_metadata",")",":","values","=","{","'job-id'",":","None",",","'task-id'",":","'task'",",","'job-name'",":","None",",","'user-id'",":","None",",","'task-attempt'",":","None","}","for","key","in","values",":","values","[","key","]","=","task_metadata",".","get","(","key",")","or","job_metadata",".","get","(","key",")","or","values","[","key","]","return","fmt",".","format","(","*","*","values",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/provider_base.py#L160-L173"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/provider_base.py","language":"python","identifier":"format_logging_uri","parameters":"(uri, job_metadata, task_metadata)","argument_list":"","return_statement":"return _format_task_uri(fmt, job_metadata, task_metadata)","docstring":"Inserts task metadata into the logging URI.\n\n The core behavior is inspired by the Google Pipelines API:\n (1) If a the uri ends in \".log\", then that is the logging path.\n (2) Otherwise, the uri is treated as \"directory\" for logs and a filename\n needs to be automatically generated.\n\n For (1), if the job is a --tasks job, then the {task-id} is inserted\n before \".log\".\n\n For (2), the file name generated is {job-id}, or for --tasks jobs, it is\n {job-id}.{task-id}.\n\n In both cases .{task-attempt} is inserted before .log for --retries jobs.\n\n In addition, full task metadata substitution is supported. The URI\n may include substitution strings such as\n \"{job-id}\", \"{task-id}\", \"{job-name}\", \"{user-id}\", and \"{task-attempt}\".\n\n Args:\n uri: User-specified logging URI which may contain substitution fields.\n job_metadata: job-global metadata.\n task_metadata: tasks-specific metadata.\n\n Returns:\n The logging_uri formatted as described above.","docstring_summary":"Inserts task metadata into the logging URI.","docstring_tokens":["Inserts","task","metadata","into","the","logging","URI","."],"function":"def format_logging_uri(uri, job_metadata, task_metadata):\n \"\"\"Inserts task metadata into the logging URI.\n\n The core behavior is inspired by the Google Pipelines API:\n (1) If a the uri ends in \".log\", then that is the logging path.\n (2) Otherwise, the uri is treated as \"directory\" for logs and a filename\n needs to be automatically generated.\n\n For (1), if the job is a --tasks job, then the {task-id} is inserted\n before \".log\".\n\n For (2), the file name generated is {job-id}, or for --tasks jobs, it is\n {job-id}.{task-id}.\n\n In both cases .{task-attempt} is inserted before .log for --retries jobs.\n\n In addition, full task metadata substitution is supported. The URI\n may include substitution strings such as\n \"{job-id}\", \"{task-id}\", \"{job-name}\", \"{user-id}\", and \"{task-attempt}\".\n\n Args:\n uri: User-specified logging URI which may contain substitution fields.\n job_metadata: job-global metadata.\n task_metadata: tasks-specific metadata.\n\n Returns:\n The logging_uri formatted as described above.\n \"\"\"\n\n # If the user specifies any formatting (with curly braces), then use that\n # as the format string unchanged.\n fmt = str(uri)\n if '{' not in fmt:\n if uri.endswith('.log'):\n # URI includes a filename. Trim the extension and just use the prefix.\n fmt = os.path.splitext(uri)[0]\n else:\n # URI is a path to a directory. The job-id becomes the filename prefix.\n fmt = os.path.join(uri, '{job-id}')\n\n # If this is a task job, add the task-id.\n if task_metadata.get('task-id') is not None:\n fmt += '.{task-id}'\n\n # If this is a retryable task, add the task-attempt.\n if task_metadata.get('task-attempt') is not None:\n fmt += '.{task-attempt}'\n\n fmt += '.log'\n\n return _format_task_uri(fmt, job_metadata, task_metadata)","function_tokens":["def","format_logging_uri","(","uri",",","job_metadata",",","task_metadata",")",":","# If the user specifies any formatting (with curly braces), then use that","# as the format string unchanged.","fmt","=","str","(","uri",")","if","'{'","not","in","fmt",":","if","uri",".","endswith","(","'.log'",")",":","# URI includes a filename. Trim the extension and just use the prefix.","fmt","=","os",".","path",".","splitext","(","uri",")","[","0","]","else",":","# URI is a path to a directory. The job-id becomes the filename prefix.","fmt","=","os",".","path",".","join","(","uri",",","'{job-id}'",")","# If this is a task job, add the task-id.","if","task_metadata",".","get","(","'task-id'",")","is","not","None",":","fmt","+=","'.{task-id}'","# If this is a retryable task, add the task-attempt.","if","task_metadata",".","get","(","'task-attempt'",")","is","not","None",":","fmt","+=","'.{task-attempt}'","fmt","+=","'.log'","return","_format_task_uri","(","fmt",",","job_metadata",",","task_metadata",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/provider_base.py#L176-L226"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2EventMap.get_filtered_normalized_events","parameters":"(self)","argument_list":"","return_statement":"return sorted(list(events.values()), key=operator.itemgetter('start-time'))","docstring":"Filter the granular v2 events down to events of interest.\n\n Filter through the large number of granular events returned by the\n pipelines API, and extract only those that are interesting to a user. This\n is implemented by filtering out events which are known to be uninteresting\n (i.e. the default actions run for every job) and by explicitly matching\n specific events which are interesting and mapping those to v1 style naming.\n\n Events which are not whitelisted or blacklisted will still be output,\n meaning any events which are added in the future won't be masked.\n We don't want to suppress display of events that we don't recognize.\n They may be important.\n\n Returns:\n A list of maps containing the normalized, filtered events.","docstring_summary":"Filter the granular v2 events down to events of interest.","docstring_tokens":["Filter","the","granular","v2","events","down","to","events","of","interest","."],"function":"def get_filtered_normalized_events(self):\n \"\"\"Filter the granular v2 events down to events of interest.\n\n Filter through the large number of granular events returned by the\n pipelines API, and extract only those that are interesting to a user. This\n is implemented by filtering out events which are known to be uninteresting\n (i.e. the default actions run for every job) and by explicitly matching\n specific events which are interesting and mapping those to v1 style naming.\n\n Events which are not whitelisted or blacklisted will still be output,\n meaning any events which are added in the future won't be masked.\n We don't want to suppress display of events that we don't recognize.\n They may be important.\n\n Returns:\n A list of maps containing the normalized, filtered events.\n \"\"\"\n # Need the user-image to look for the right \"pulling image\" event\n user_image = google_v2_operations.get_action_image(self._op,\n _ACTION_USER_COMMAND)\n\n # Only create an \"ok\" event for operations with SUCCESS status.\n need_ok = google_v2_operations.is_success(self._op)\n\n # Events are keyed by name for easier deletion.\n events = {}\n\n # Events are assumed to be ordered by timestamp (newest to oldest).\n for event in google_v2_operations.get_events(self._op):\n if self._filter(event):\n continue\n\n mapped, match = self._map(event)\n name = mapped['name']\n\n if name == 'ok':\n # If we want the \"ok\" event, we grab the first (most recent).\n if not need_ok or 'ok' in events:\n continue\n\n if name == 'pulling-image':\n if match and match.group(1) != user_image:\n continue\n\n events[name] = mapped\n\n return sorted(list(events.values()), key=operator.itemgetter('start-time'))","function_tokens":["def","get_filtered_normalized_events","(","self",")",":","# Need the user-image to look for the right \"pulling image\" event","user_image","=","google_v2_operations",".","get_action_image","(","self",".","_op",",","_ACTION_USER_COMMAND",")","# Only create an \"ok\" event for operations with SUCCESS status.","need_ok","=","google_v2_operations",".","is_success","(","self",".","_op",")","# Events are keyed by name for easier deletion.","events","=","{","}","# Events are assumed to be ordered by timestamp (newest to oldest).","for","event","in","google_v2_operations",".","get_events","(","self",".","_op",")",":","if","self",".","_filter","(","event",")",":","continue","mapped",",","match","=","self",".","_map","(","event",")","name","=","mapped","[","'name'","]","if","name","==","'ok'",":","# If we want the \"ok\" event, we grab the first (most recent).","if","not","need_ok","or","'ok'","in","events",":","continue","if","name","==","'pulling-image'",":","if","match","and","match",".","group","(","1",")","!=","user_image",":","continue","events","[","name","]","=","mapped","return","sorted","(","list","(","events",".","values","(",")",")",",","key","=","operator",".","itemgetter","(","'start-time'",")",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L371-L417"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2EventMap._map","parameters":"(self, event)","argument_list":"","return_statement":"return {'name': description, 'start-time': start_time}, None","docstring":"Extract elements from an operation event and map to a named event.","docstring_summary":"Extract elements from an operation event and map to a named event.","docstring_tokens":["Extract","elements","from","an","operation","event","and","map","to","a","named","event","."],"function":"def _map(self, event):\n \"\"\"Extract elements from an operation event and map to a named event.\"\"\"\n description = event.get('description', '')\n start_time = google_base.parse_rfc3339_utc_string(\n event.get('timestamp', ''))\n\n for name, regex in _EVENT_REGEX_MAP.items():\n match = regex.match(description)\n if match:\n return {'name': name, 'start-time': start_time}, match\n\n return {'name': description, 'start-time': start_time}, None","function_tokens":["def","_map","(","self",",","event",")",":","description","=","event",".","get","(","'description'",",","''",")","start_time","=","google_base",".","parse_rfc3339_utc_string","(","event",".","get","(","'timestamp'",",","''",")",")","for","name",",","regex","in","_EVENT_REGEX_MAP",".","items","(",")",":","match","=","regex",".","match","(","description",")","if","match",":","return","{","'name'",":","name",",","'start-time'",":","start_time","}",",","match","return","{","'name'",":","description",",","'start-time'",":","start_time","}",",","None"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L419-L430"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2JobProviderBase._get_pipeline_regions","parameters":"(self, regions, zones)","argument_list":"","return_statement":"","docstring":"Returns the list of regions to use for a pipeline request.","docstring_summary":"Returns the list of regions to use for a pipeline request.","docstring_tokens":["Returns","the","list","of","regions","to","use","for","a","pipeline","request","."],"function":"def _get_pipeline_regions(self, regions, zones):\n \"\"\"Returns the list of regions to use for a pipeline request.\"\"\"\n raise NotImplementedError('Derived class must implement this function')","function_tokens":["def","_get_pipeline_regions","(","self",",","regions",",","zones",")",":","raise","NotImplementedError","(","'Derived class must implement this function'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L483-L485"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2JobProviderBase._pipelines_run_api","parameters":"(self, request)","argument_list":"","return_statement":"","docstring":"Executes the provider-specific pipelines.run() API.","docstring_summary":"Executes the provider-specific pipelines.run() API.","docstring_tokens":["Executes","the","provider","-","specific","pipelines",".","run","()","API","."],"function":"def _pipelines_run_api(self, request):\n \"\"\"Executes the provider-specific pipelines.run() API.\"\"\"\n raise NotImplementedError('Derived class must implement this function')","function_tokens":["def","_pipelines_run_api","(","self",",","request",")",":","raise","NotImplementedError","(","'Derived class must implement this function'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L487-L489"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2JobProviderBase._operations_list_api","parameters":"(self, ops_filter, page_token, page_size)","argument_list":"","return_statement":"","docstring":"Executes the provider-specific operaitons.list() API.","docstring_summary":"Executes the provider-specific operaitons.list() API.","docstring_tokens":["Executes","the","provider","-","specific","operaitons",".","list","()","API","."],"function":"def _operations_list_api(self, ops_filter, page_token, page_size):\n \"\"\"Executes the provider-specific operaitons.list() API.\"\"\"\n raise NotImplementedError('Derived class must implement this function')","function_tokens":["def","_operations_list_api","(","self",",","ops_filter",",","page_token",",","page_size",")",":","raise","NotImplementedError","(","'Derived class must implement this function'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L491-L493"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2JobProviderBase._operations_cancel_api_def","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Returns a function object for the provider-specific cancel API.","docstring_summary":"Returns a function object for the provider-specific cancel API.","docstring_tokens":["Returns","a","function","object","for","the","provider","-","specific","cancel","API","."],"function":"def _operations_cancel_api_def(self):\n \"\"\"Returns a function object for the provider-specific cancel API.\"\"\"\n raise NotImplementedError('Derived class must implement this function')","function_tokens":["def","_operations_cancel_api_def","(","self",")",":","raise","NotImplementedError","(","'Derived class must implement this function'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L495-L497"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2JobProviderBase._batch_handler_def","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Returns a function object for the provider-specific batch handler.","docstring_summary":"Returns a function object for the provider-specific batch handler.","docstring_tokens":["Returns","a","function","object","for","the","provider","-","specific","batch","handler","."],"function":"def _batch_handler_def(self):\n \"\"\"Returns a function object for the provider-specific batch handler.\"\"\"\n raise NotImplementedError('Derived class must implement this function')","function_tokens":["def","_batch_handler_def","(","self",")",":","raise","NotImplementedError","(","'Derived class must implement this function'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L499-L501"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2JobProviderBase.prepare_job_metadata","parameters":"(self, script, job_name, user_id)","argument_list":"","return_statement":"return providers_util.prepare_job_metadata(script, job_name, user_id)","docstring":"Returns a dictionary of metadata fields for the job.","docstring_summary":"Returns a dictionary of metadata fields for the job.","docstring_tokens":["Returns","a","dictionary","of","metadata","fields","for","the","job","."],"function":"def prepare_job_metadata(self, script, job_name, user_id):\n \"\"\"Returns a dictionary of metadata fields for the job.\"\"\"\n return providers_util.prepare_job_metadata(script, job_name, user_id)","function_tokens":["def","prepare_job_metadata","(","self",",","script",",","job_name",",","user_id",")",":","return","providers_util",".","prepare_job_metadata","(","script",",","job_name",",","user_id",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L503-L505"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2JobProviderBase._get_logging_env","parameters":"(self, logging_uri, user_project)","argument_list":"","return_statement":"return {\n 'LOGGING_PATH': '{}.log'.format(logging_prefix),\n 'STDOUT_PATH': '{}-stdout.log'.format(logging_prefix),\n 'STDERR_PATH': '{}-stderr.log'.format(logging_prefix),\n 'USER_PROJECT': user_project,\n }","docstring":"Returns the environment for actions that copy logging files.","docstring_summary":"Returns the environment for actions that copy logging files.","docstring_tokens":["Returns","the","environment","for","actions","that","copy","logging","files","."],"function":"def _get_logging_env(self, logging_uri, user_project):\n \"\"\"Returns the environment for actions that copy logging files.\"\"\"\n if not logging_uri.endswith('.log'):\n raise ValueError('Logging URI must end in \".log\": {}'.format(logging_uri))\n\n logging_prefix = logging_uri[:-len('.log')]\n return {\n 'LOGGING_PATH': '{}.log'.format(logging_prefix),\n 'STDOUT_PATH': '{}-stdout.log'.format(logging_prefix),\n 'STDERR_PATH': '{}-stderr.log'.format(logging_prefix),\n 'USER_PROJECT': user_project,\n }","function_tokens":["def","_get_logging_env","(","self",",","logging_uri",",","user_project",")",":","if","not","logging_uri",".","endswith","(","'.log'",")",":","raise","ValueError","(","'Logging URI must end in \".log\": {}'",".","format","(","logging_uri",")",")","logging_prefix","=","logging_uri","[",":","-","len","(","'.log'",")","]","return","{","'LOGGING_PATH'",":","'{}.log'",".","format","(","logging_prefix",")",",","'STDOUT_PATH'",":","'{}-stdout.log'",".","format","(","logging_prefix",")",",","'STDERR_PATH'",":","'{}-stderr.log'",".","format","(","logging_prefix",")",",","'USER_PROJECT'",":","user_project",",","}"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L507-L518"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2JobProviderBase._get_prepare_env","parameters":"(self, script, job_descriptor, inputs, outputs, mounts)","argument_list":"","return_statement":"return env","docstring":"Return a dict with variables for the 'prepare' action.","docstring_summary":"Return a dict with variables for the 'prepare' action.","docstring_tokens":["Return","a","dict","with","variables","for","the","prepare","action","."],"function":"def _get_prepare_env(self, script, job_descriptor, inputs, outputs, mounts):\n \"\"\"Return a dict with variables for the 'prepare' action.\"\"\"\n\n # Add the _SCRIPT_REPR with the repr(script) contents\n # Add the _META_YAML_REPR with the repr(meta) contents\n\n # Add variables for directories that need to be created, for example:\n # DIR_COUNT: 2\n # DIR_0: \/mnt\/data\/input\/gs\/bucket\/path1\/\n # DIR_1: \/mnt\/data\/output\/gs\/bucket\/path2\n\n # List the directories in sorted order so that they are created in that\n # order. This is primarily to ensure that permissions are set as we create\n # each directory.\n # For example:\n # mkdir -m 777 -p \/root\/first\/second\n # mkdir -m 777 -p \/root\/first\n # *may* not actually set 777 on \/root\/first\n\n docker_paths = sorted([\n var.docker_path if var.recursive else os.path.dirname(var.docker_path)\n for var in inputs | outputs | mounts\n if var.value\n ])\n\n env = {\n _SCRIPT_VARNAME: repr(script.value),\n _META_YAML_VARNAME: repr(job_descriptor.to_yaml()),\n 'DIR_COUNT': str(len(docker_paths))\n }\n\n for idx, path in enumerate(docker_paths):\n env['DIR_{}'.format(idx)] = os.path.join(providers_util.DATA_MOUNT_POINT,\n path)\n\n return env","function_tokens":["def","_get_prepare_env","(","self",",","script",",","job_descriptor",",","inputs",",","outputs",",","mounts",")",":","# Add the _SCRIPT_REPR with the repr(script) contents","# Add the _META_YAML_REPR with the repr(meta) contents","# Add variables for directories that need to be created, for example:","# DIR_COUNT: 2","# DIR_0: \/mnt\/data\/input\/gs\/bucket\/path1\/","# DIR_1: \/mnt\/data\/output\/gs\/bucket\/path2","# List the directories in sorted order so that they are created in that","# order. This is primarily to ensure that permissions are set as we create","# each directory.","# For example:","# mkdir -m 777 -p \/root\/first\/second","# mkdir -m 777 -p \/root\/first","# *may* not actually set 777 on \/root\/first","docker_paths","=","sorted","(","[","var",".","docker_path","if","var",".","recursive","else","os",".","path",".","dirname","(","var",".","docker_path",")","for","var","in","inputs","|","outputs","|","mounts","if","var",".","value","]",")","env","=","{","_SCRIPT_VARNAME",":","repr","(","script",".","value",")",",","_META_YAML_VARNAME",":","repr","(","job_descriptor",".","to_yaml","(",")",")",",","'DIR_COUNT'",":","str","(","len","(","docker_paths",")",")","}","for","idx",",","path","in","enumerate","(","docker_paths",")",":","env","[","'DIR_{}'",".","format","(","idx",")","]","=","os",".","path",".","join","(","providers_util",".","DATA_MOUNT_POINT",",","path",")","return","env"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L520-L555"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2JobProviderBase._get_localization_env","parameters":"(self, inputs, user_project)","argument_list":"","return_statement":"return env","docstring":"Return a dict with variables for the 'localization' action.","docstring_summary":"Return a dict with variables for the 'localization' action.","docstring_tokens":["Return","a","dict","with","variables","for","the","localization","action","."],"function":"def _get_localization_env(self, inputs, user_project):\n \"\"\"Return a dict with variables for the 'localization' action.\"\"\"\n\n # Add variables for paths that need to be localized, for example:\n # INPUT_COUNT: 1\n # INPUT_0: MY_INPUT_FILE\n # INPUT_RECURSIVE_0: 0\n # INPUT_SRC_0: gs:\/\/mybucket\/mypath\/myfile\n # INPUT_DST_0: \/mnt\/data\/inputs\/mybucket\/mypath\/myfile\n\n non_empty_inputs = [var for var in inputs if var.value]\n env = {'INPUT_COUNT': str(len(non_empty_inputs))}\n\n for idx, var in enumerate(non_empty_inputs):\n env['INPUT_{}'.format(idx)] = var.name\n env['INPUT_RECURSIVE_{}'.format(idx)] = str(int(var.recursive))\n env['INPUT_SRC_{}'.format(idx)] = var.value\n\n # For wildcard paths, the destination must be a directory\n dst = os.path.join(providers_util.DATA_MOUNT_POINT, var.docker_path)\n path, filename = os.path.split(dst)\n if '*' in filename:\n dst = '{}\/'.format(path)\n env['INPUT_DST_{}'.format(idx)] = dst\n\n env['USER_PROJECT'] = user_project\n\n return env","function_tokens":["def","_get_localization_env","(","self",",","inputs",",","user_project",")",":","# Add variables for paths that need to be localized, for example:","# INPUT_COUNT: 1","# INPUT_0: MY_INPUT_FILE","# INPUT_RECURSIVE_0: 0","# INPUT_SRC_0: gs:\/\/mybucket\/mypath\/myfile","# INPUT_DST_0: \/mnt\/data\/inputs\/mybucket\/mypath\/myfile","non_empty_inputs","=","[","var","for","var","in","inputs","if","var",".","value","]","env","=","{","'INPUT_COUNT'",":","str","(","len","(","non_empty_inputs",")",")","}","for","idx",",","var","in","enumerate","(","non_empty_inputs",")",":","env","[","'INPUT_{}'",".","format","(","idx",")","]","=","var",".","name","env","[","'INPUT_RECURSIVE_{}'",".","format","(","idx",")","]","=","str","(","int","(","var",".","recursive",")",")","env","[","'INPUT_SRC_{}'",".","format","(","idx",")","]","=","var",".","value","# For wildcard paths, the destination must be a directory","dst","=","os",".","path",".","join","(","providers_util",".","DATA_MOUNT_POINT",",","var",".","docker_path",")","path",",","filename","=","os",".","path",".","split","(","dst",")","if","'*'","in","filename",":","dst","=","'{}\/'",".","format","(","path",")","env","[","'INPUT_DST_{}'",".","format","(","idx",")","]","=","dst","env","[","'USER_PROJECT'","]","=","user_project","return","env"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L557-L584"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2JobProviderBase._get_delocalization_env","parameters":"(self, outputs, user_project)","argument_list":"","return_statement":"return env","docstring":"Return a dict with variables for the 'delocalization' action.","docstring_summary":"Return a dict with variables for the 'delocalization' action.","docstring_tokens":["Return","a","dict","with","variables","for","the","delocalization","action","."],"function":"def _get_delocalization_env(self, outputs, user_project):\n \"\"\"Return a dict with variables for the 'delocalization' action.\"\"\"\n\n # Add variables for paths that need to be delocalized, for example:\n # OUTPUT_COUNT: 1\n # OUTPUT_0: MY_OUTPUT_FILE\n # OUTPUT_RECURSIVE_0: 0\n # OUTPUT_SRC_0: gs:\/\/mybucket\/mypath\/myfile\n # OUTPUT_DST_0: \/mnt\/data\/outputs\/mybucket\/mypath\/myfile\n\n non_empty_outputs = [var for var in outputs if var.value]\n env = {'OUTPUT_COUNT': str(len(non_empty_outputs))}\n\n for idx, var in enumerate(non_empty_outputs):\n env['OUTPUT_{}'.format(idx)] = var.name\n env['OUTPUT_RECURSIVE_{}'.format(idx)] = str(int(var.recursive))\n env['OUTPUT_SRC_{}'.format(idx)] = os.path.join(\n providers_util.DATA_MOUNT_POINT, var.docker_path)\n\n # For wildcard paths, the destination must be a directory\n if '*' in var.uri.basename:\n dst = var.uri.path\n else:\n dst = var.uri\n env['OUTPUT_DST_{}'.format(idx)] = dst\n\n env['USER_PROJECT'] = user_project\n\n return env","function_tokens":["def","_get_delocalization_env","(","self",",","outputs",",","user_project",")",":","# Add variables for paths that need to be delocalized, for example:","# OUTPUT_COUNT: 1","# OUTPUT_0: MY_OUTPUT_FILE","# OUTPUT_RECURSIVE_0: 0","# OUTPUT_SRC_0: gs:\/\/mybucket\/mypath\/myfile","# OUTPUT_DST_0: \/mnt\/data\/outputs\/mybucket\/mypath\/myfile","non_empty_outputs","=","[","var","for","var","in","outputs","if","var",".","value","]","env","=","{","'OUTPUT_COUNT'",":","str","(","len","(","non_empty_outputs",")",")","}","for","idx",",","var","in","enumerate","(","non_empty_outputs",")",":","env","[","'OUTPUT_{}'",".","format","(","idx",")","]","=","var",".","name","env","[","'OUTPUT_RECURSIVE_{}'",".","format","(","idx",")","]","=","str","(","int","(","var",".","recursive",")",")","env","[","'OUTPUT_SRC_{}'",".","format","(","idx",")","]","=","os",".","path",".","join","(","providers_util",".","DATA_MOUNT_POINT",",","var",".","docker_path",")","# For wildcard paths, the destination must be a directory","if","'*'","in","var",".","uri",".","basename",":","dst","=","var",".","uri",".","path","else",":","dst","=","var",".","uri","env","[","'OUTPUT_DST_{}'",".","format","(","idx",")","]","=","dst","env","[","'USER_PROJECT'","]","=","user_project","return","env"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L586-L614"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2JobProviderBase._build_user_environment","parameters":"(self, envs, inputs, outputs, mounts)","argument_list":"","return_statement":"return envs","docstring":"Returns a dictionary of for the user container environment.","docstring_summary":"Returns a dictionary of for the user container environment.","docstring_tokens":["Returns","a","dictionary","of","for","the","user","container","environment","."],"function":"def _build_user_environment(self, envs, inputs, outputs, mounts):\n \"\"\"Returns a dictionary of for the user container environment.\"\"\"\n envs = {env.name: env.value for env in envs}\n envs.update(providers_util.get_file_environment_variables(inputs))\n envs.update(providers_util.get_file_environment_variables(outputs))\n envs.update(providers_util.get_file_environment_variables(mounts))\n return envs","function_tokens":["def","_build_user_environment","(","self",",","envs",",","inputs",",","outputs",",","mounts",")",":","envs","=","{","env",".","name",":","env",".","value","for","env","in","envs","}","envs",".","update","(","providers_util",".","get_file_environment_variables","(","inputs",")",")","envs",".","update","(","providers_util",".","get_file_environment_variables","(","outputs",")",")","envs",".","update","(","providers_util",".","get_file_environment_variables","(","mounts",")",")","return","envs"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L616-L622"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2JobProviderBase._get_mount_actions","parameters":"(self, mounts, mnt_datadisk)","argument_list":"","return_statement":"return actions_to_add","docstring":"Returns a list of two actions per gcs bucket to mount.","docstring_summary":"Returns a list of two actions per gcs bucket to mount.","docstring_tokens":["Returns","a","list","of","two","actions","per","gcs","bucket","to","mount","."],"function":"def _get_mount_actions(self, mounts, mnt_datadisk):\n \"\"\"Returns a list of two actions per gcs bucket to mount.\"\"\"\n actions_to_add = []\n for mount in mounts:\n bucket = mount.value[len('gs:\/\/'):]\n mount_path = mount.docker_path\n actions_to_add.extend([\n google_v2_pipelines.build_action(\n name='mount-{}'.format(bucket),\n enable_fuse=True,\n run_in_background=True,\n image_uri=_GCSFUSE_IMAGE,\n mounts=[mnt_datadisk],\n commands=[\n '--implicit-dirs', '--foreground', '-o ro', bucket,\n os.path.join(providers_util.DATA_MOUNT_POINT, mount_path)\n ]),\n google_v2_pipelines.build_action(\n name='mount-wait-{}'.format(bucket),\n enable_fuse=True,\n image_uri=_GCSFUSE_IMAGE,\n mounts=[mnt_datadisk],\n commands=[\n 'wait',\n os.path.join(providers_util.DATA_MOUNT_POINT, mount_path)\n ])\n ])\n return actions_to_add","function_tokens":["def","_get_mount_actions","(","self",",","mounts",",","mnt_datadisk",")",":","actions_to_add","=","[","]","for","mount","in","mounts",":","bucket","=","mount",".","value","[","len","(","'gs:\/\/'",")",":","]","mount_path","=","mount",".","docker_path","actions_to_add",".","extend","(","[","google_v2_pipelines",".","build_action","(","name","=","'mount-{}'",".","format","(","bucket",")",",","enable_fuse","=","True",",","run_in_background","=","True",",","image_uri","=","_GCSFUSE_IMAGE",",","mounts","=","[","mnt_datadisk","]",",","commands","=","[","'--implicit-dirs'",",","'--foreground'",",","'-o ro'",",","bucket",",","os",".","path",".","join","(","providers_util",".","DATA_MOUNT_POINT",",","mount_path",")","]",")",",","google_v2_pipelines",".","build_action","(","name","=","'mount-wait-{}'",".","format","(","bucket",")",",","enable_fuse","=","True",",","image_uri","=","_GCSFUSE_IMAGE",",","mounts","=","[","mnt_datadisk","]",",","commands","=","[","'wait'",",","os",".","path",".","join","(","providers_util",".","DATA_MOUNT_POINT",",","mount_path",")","]",")","]",")","return","actions_to_add"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L624-L651"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2JobProviderBase._build_pipeline_request","parameters":"(self, task_view)","argument_list":"","return_statement":"return {'pipeline': pipeline, 'labels': labels}","docstring":"Returns a Pipeline objects for the task.","docstring_summary":"Returns a Pipeline objects for the task.","docstring_tokens":["Returns","a","Pipeline","objects","for","the","task","."],"function":"def _build_pipeline_request(self, task_view):\n \"\"\"Returns a Pipeline objects for the task.\"\"\"\n job_metadata = task_view.job_metadata\n job_params = task_view.job_params\n job_resources = task_view.job_resources\n task_metadata = task_view.task_descriptors[0].task_metadata\n task_params = task_view.task_descriptors[0].task_params\n task_resources = task_view.task_descriptors[0].task_resources\n\n # Set up VM-specific variables\n mnt_datadisk = google_v2_pipelines.build_mount(\n disk=_DATA_DISK_NAME,\n path=providers_util.DATA_MOUNT_POINT,\n read_only=False)\n scopes = job_resources.scopes or google_base.DEFAULT_SCOPES\n\n # Set up the task labels\n labels = {\n label.name: label.value if label.value else '' for label in\n google_base.build_pipeline_labels(job_metadata, task_metadata)\n | job_params['labels'] | task_params['labels']\n }\n\n # Set local variables for the core pipeline values\n script = task_view.job_metadata['script']\n user_project = task_view.job_metadata['user-project'] or ''\n\n envs = job_params['envs'] | task_params['envs']\n inputs = job_params['inputs'] | task_params['inputs']\n outputs = job_params['outputs'] | task_params['outputs']\n mounts = job_params['mounts']\n gcs_mounts = param_util.get_gcs_mounts(mounts)\n\n persistent_disk_mount_params = param_util.get_persistent_disk_mounts(mounts)\n\n # pylint: disable=g-complex-comprehension\n persistent_disks = [\n google_v2_pipelines.build_disk(\n name=disk.name.replace('_', '-'), # Underscores not allowed\n size_gb=disk.disk_size or job_model.DEFAULT_MOUNTED_DISK_SIZE,\n source_image=disk.value,\n disk_type=disk.disk_type or job_model.DEFAULT_DISK_TYPE)\n for disk in persistent_disk_mount_params\n ]\n persistent_disk_mounts = [\n google_v2_pipelines.build_mount(\n disk=persistent_disk.get('name'),\n path=os.path.join(providers_util.DATA_MOUNT_POINT,\n persistent_disk_mount_param.docker_path),\n read_only=True)\n for persistent_disk, persistent_disk_mount_param in zip(\n persistent_disks, persistent_disk_mount_params)\n ]\n # pylint: enable=g-complex-comprehension\n\n # The list of \"actions\" (1-based) will be:\n # 1- continuous copy of log files off to Cloud Storage\n # 2- prepare the shared mount point (write the user script)\n # 3- localize objects from Cloud Storage to block storage\n # 4- execute user command\n # 5- delocalize objects from block storage to Cloud Storage\n # 6- final copy of log files off to Cloud Storage\n #\n # If the user has requested an SSH server be started, it will be inserted\n # after logging is started, and all subsequent action numbers above will be\n # incremented by 1.\n # If the user has requested to mount one or more buckets, two actions per\n # bucket will be inserted after the prepare step, and all subsequent action\n # numbers will be incremented by the number of actions added.\n #\n # We need to track the action numbers specifically for the user action and\n # the final logging action.\n optional_actions = 0\n if job_resources.ssh:\n optional_actions += 1\n\n mount_actions = self._get_mount_actions(gcs_mounts, mnt_datadisk)\n optional_actions += len(mount_actions)\n\n user_action = 4 + optional_actions\n final_logging_action = 6 + optional_actions\n\n # Set up the commands and environment for the logging actions\n logging_cmd = _LOGGING_CMD.format(\n log_msg_fn=_LOG_MSG_FN,\n log_cp_fn=_GSUTIL_CP_FN,\n log_cp_cmd=_LOG_CP_CMD.format(\n user_action=user_action, logging_action='logging_action'))\n continuous_logging_cmd = _CONTINUOUS_LOGGING_CMD.format(\n log_msg_fn=_LOG_MSG_FN,\n log_cp_fn=_GSUTIL_CP_FN,\n log_cp_cmd=_LOG_CP_CMD.format(\n user_action=user_action,\n logging_action='continuous_logging_action'),\n final_logging_action=final_logging_action,\n log_interval=job_resources.log_interval or '60s')\n logging_env = self._get_logging_env(task_resources.logging_path.uri,\n user_project)\n\n # Set up command and environments for the prepare, localization, user,\n # and de-localization actions\n script_path = os.path.join(providers_util.SCRIPT_DIR, script.name)\n prepare_command = _PREPARE_CMD.format(\n log_msg_fn=_LOG_MSG_FN,\n mk_runtime_dirs=_MK_RUNTIME_DIRS_CMD,\n script_var=_SCRIPT_VARNAME,\n python_decode_script=_PYTHON_DECODE_SCRIPT,\n script_path=script_path,\n mk_io_dirs=_MK_IO_DIRS)\n\n prepare_env = self._get_prepare_env(script, task_view, inputs, outputs,\n mounts)\n localization_env = self._get_localization_env(inputs, user_project)\n user_environment = self._build_user_environment(envs, inputs, outputs,\n mounts)\n delocalization_env = self._get_delocalization_env(outputs, user_project)\n\n # When --ssh is enabled, run all actions in the same process ID namespace\n pid_namespace = 'shared' if job_resources.ssh else None\n\n # Build the list of actions\n actions = []\n actions.append(\n google_v2_pipelines.build_action(\n name='logging',\n pid_namespace=pid_namespace,\n run_in_background=True,\n image_uri=_CLOUD_SDK_IMAGE,\n environment=logging_env,\n entrypoint='\/bin\/bash',\n commands=['-c', continuous_logging_cmd]))\n\n if job_resources.ssh:\n actions.append(\n google_v2_pipelines.build_action(\n name='ssh',\n pid_namespace=pid_namespace,\n image_uri=_SSH_IMAGE,\n mounts=[mnt_datadisk],\n entrypoint='ssh-server',\n port_mappings={_DEFAULT_SSH_PORT: _DEFAULT_SSH_PORT},\n run_in_background=True))\n\n actions.append(\n google_v2_pipelines.build_action(\n name='prepare',\n pid_namespace=pid_namespace,\n image_uri=_CLOUD_SDK_IMAGE,\n mounts=[mnt_datadisk],\n environment=prepare_env,\n entrypoint='\/bin\/bash',\n commands=['-c', prepare_command]),)\n\n actions.extend(mount_actions)\n\n actions.extend([\n google_v2_pipelines.build_action(\n name='localization',\n pid_namespace=pid_namespace,\n image_uri=_CLOUD_SDK_IMAGE,\n mounts=[mnt_datadisk],\n environment=localization_env,\n entrypoint='\/bin\/bash',\n commands=[\n '-c',\n _LOCALIZATION_CMD.format(\n log_msg_fn=_LOG_MSG_FN,\n recursive_cp_fn=_GSUTIL_RSYNC_FN,\n cp_fn=_GSUTIL_CP_FN,\n cp_loop=_LOCALIZATION_LOOP)\n ]),\n google_v2_pipelines.build_action(\n name='user-command',\n pid_namespace=pid_namespace,\n block_external_network=job_resources.block_external_network,\n image_uri=job_resources.image,\n mounts=[mnt_datadisk] + persistent_disk_mounts,\n environment=user_environment,\n entrypoint='\/usr\/bin\/env',\n commands=[\n 'bash', '-c',\n _USER_CMD.format(\n tmp_dir=providers_util.TMP_DIR,\n working_dir=providers_util.WORKING_DIR,\n user_script=script_path)\n ]),\n google_v2_pipelines.build_action(\n name='delocalization',\n pid_namespace=pid_namespace,\n image_uri=_CLOUD_SDK_IMAGE,\n mounts=[mnt_datadisk],\n environment=delocalization_env,\n entrypoint='\/bin\/bash',\n commands=[\n '-c',\n _LOCALIZATION_CMD.format(\n log_msg_fn=_LOG_MSG_FN,\n recursive_cp_fn=_GSUTIL_RSYNC_FN,\n cp_fn=_GSUTIL_CP_FN,\n cp_loop=_DELOCALIZATION_LOOP)\n ]),\n google_v2_pipelines.build_action(\n name='final_logging',\n pid_namespace=pid_namespace,\n always_run=True,\n image_uri=_CLOUD_SDK_IMAGE,\n environment=logging_env,\n entrypoint='\/bin\/bash',\n commands=['-c', logging_cmd]),\n ])\n\n assert len(actions) - 2 == user_action\n assert len(actions) == final_logging_action\n\n # Prepare the VM (resources) configuration\n disks = [\n google_v2_pipelines.build_disk(\n _DATA_DISK_NAME,\n job_resources.disk_size,\n source_image=None,\n disk_type=job_resources.disk_type or job_model.DEFAULT_DISK_TYPE)\n ]\n disks.extend(persistent_disks)\n network = google_v2_pipelines.build_network(\n job_resources.network, job_resources.subnetwork,\n job_resources.use_private_address)\n if job_resources.machine_type:\n machine_type = job_resources.machine_type\n elif job_resources.min_cores or job_resources.min_ram:\n machine_type = GoogleV2CustomMachine.build_machine_type(\n job_resources.min_cores, job_resources.min_ram)\n else:\n machine_type = job_model.DEFAULT_MACHINE_TYPE\n accelerators = None\n if job_resources.accelerator_type:\n accelerators = [\n google_v2_pipelines.build_accelerator(job_resources.accelerator_type,\n job_resources.accelerator_count)\n ]\n service_account = google_v2_pipelines.build_service_account(\n job_resources.service_account or 'default', scopes)\n\n resources = google_v2_pipelines.build_resources(\n self._project,\n self._get_pipeline_regions(job_resources.regions, job_resources.zones),\n google_base.get_zones(job_resources.zones),\n google_v2_pipelines.build_machine(\n network=network,\n machine_type=machine_type,\n # Preemptible comes from task_resources because it may change\n # on retry attempts\n preemptible=task_resources.preemptible,\n service_account=service_account,\n boot_disk_size_gb=job_resources.boot_disk_size,\n disks=disks,\n accelerators=accelerators,\n nvidia_driver_version=job_resources.nvidia_driver_version,\n labels=labels,\n cpu_platform=job_resources.cpu_platform,\n enable_stackdriver_monitoring=job_resources\n .enable_stackdriver_monitoring),\n )\n\n # Build the pipeline request\n pipeline = google_v2_pipelines.build_pipeline(actions, resources, None,\n job_resources.timeout)\n\n return {'pipeline': pipeline, 'labels': labels}","function_tokens":["def","_build_pipeline_request","(","self",",","task_view",")",":","job_metadata","=","task_view",".","job_metadata","job_params","=","task_view",".","job_params","job_resources","=","task_view",".","job_resources","task_metadata","=","task_view",".","task_descriptors","[","0","]",".","task_metadata","task_params","=","task_view",".","task_descriptors","[","0","]",".","task_params","task_resources","=","task_view",".","task_descriptors","[","0","]",".","task_resources","# Set up VM-specific variables","mnt_datadisk","=","google_v2_pipelines",".","build_mount","(","disk","=","_DATA_DISK_NAME",",","path","=","providers_util",".","DATA_MOUNT_POINT",",","read_only","=","False",")","scopes","=","job_resources",".","scopes","or","google_base",".","DEFAULT_SCOPES","# Set up the task labels","labels","=","{","label",".","name",":","label",".","value","if","label",".","value","else","''","for","label","in","google_base",".","build_pipeline_labels","(","job_metadata",",","task_metadata",")","|","job_params","[","'labels'","]","|","task_params","[","'labels'","]","}","# Set local variables for the core pipeline values","script","=","task_view",".","job_metadata","[","'script'","]","user_project","=","task_view",".","job_metadata","[","'user-project'","]","or","''","envs","=","job_params","[","'envs'","]","|","task_params","[","'envs'","]","inputs","=","job_params","[","'inputs'","]","|","task_params","[","'inputs'","]","outputs","=","job_params","[","'outputs'","]","|","task_params","[","'outputs'","]","mounts","=","job_params","[","'mounts'","]","gcs_mounts","=","param_util",".","get_gcs_mounts","(","mounts",")","persistent_disk_mount_params","=","param_util",".","get_persistent_disk_mounts","(","mounts",")","# pylint: disable=g-complex-comprehension","persistent_disks","=","[","google_v2_pipelines",".","build_disk","(","name","=","disk",".","name",".","replace","(","'_'",",","'-'",")",",","# Underscores not allowed","size_gb","=","disk",".","disk_size","or","job_model",".","DEFAULT_MOUNTED_DISK_SIZE",",","source_image","=","disk",".","value",",","disk_type","=","disk",".","disk_type","or","job_model",".","DEFAULT_DISK_TYPE",")","for","disk","in","persistent_disk_mount_params","]","persistent_disk_mounts","=","[","google_v2_pipelines",".","build_mount","(","disk","=","persistent_disk",".","get","(","'name'",")",",","path","=","os",".","path",".","join","(","providers_util",".","DATA_MOUNT_POINT",",","persistent_disk_mount_param",".","docker_path",")",",","read_only","=","True",")","for","persistent_disk",",","persistent_disk_mount_param","in","zip","(","persistent_disks",",","persistent_disk_mount_params",")","]","# pylint: enable=g-complex-comprehension","# The list of \"actions\" (1-based) will be:","# 1- continuous copy of log files off to Cloud Storage","# 2- prepare the shared mount point (write the user script)","# 3- localize objects from Cloud Storage to block storage","# 4- execute user command","# 5- delocalize objects from block storage to Cloud Storage","# 6- final copy of log files off to Cloud Storage","#","# If the user has requested an SSH server be started, it will be inserted","# after logging is started, and all subsequent action numbers above will be","# incremented by 1.","# If the user has requested to mount one or more buckets, two actions per","# bucket will be inserted after the prepare step, and all subsequent action","# numbers will be incremented by the number of actions added.","#","# We need to track the action numbers specifically for the user action and","# the final logging action.","optional_actions","=","0","if","job_resources",".","ssh",":","optional_actions","+=","1","mount_actions","=","self",".","_get_mount_actions","(","gcs_mounts",",","mnt_datadisk",")","optional_actions","+=","len","(","mount_actions",")","user_action","=","4","+","optional_actions","final_logging_action","=","6","+","optional_actions","# Set up the commands and environment for the logging actions","logging_cmd","=","_LOGGING_CMD",".","format","(","log_msg_fn","=","_LOG_MSG_FN",",","log_cp_fn","=","_GSUTIL_CP_FN",",","log_cp_cmd","=","_LOG_CP_CMD",".","format","(","user_action","=","user_action",",","logging_action","=","'logging_action'",")",")","continuous_logging_cmd","=","_CONTINUOUS_LOGGING_CMD",".","format","(","log_msg_fn","=","_LOG_MSG_FN",",","log_cp_fn","=","_GSUTIL_CP_FN",",","log_cp_cmd","=","_LOG_CP_CMD",".","format","(","user_action","=","user_action",",","logging_action","=","'continuous_logging_action'",")",",","final_logging_action","=","final_logging_action",",","log_interval","=","job_resources",".","log_interval","or","'60s'",")","logging_env","=","self",".","_get_logging_env","(","task_resources",".","logging_path",".","uri",",","user_project",")","# Set up command and environments for the prepare, localization, user,","# and de-localization actions","script_path","=","os",".","path",".","join","(","providers_util",".","SCRIPT_DIR",",","script",".","name",")","prepare_command","=","_PREPARE_CMD",".","format","(","log_msg_fn","=","_LOG_MSG_FN",",","mk_runtime_dirs","=","_MK_RUNTIME_DIRS_CMD",",","script_var","=","_SCRIPT_VARNAME",",","python_decode_script","=","_PYTHON_DECODE_SCRIPT",",","script_path","=","script_path",",","mk_io_dirs","=","_MK_IO_DIRS",")","prepare_env","=","self",".","_get_prepare_env","(","script",",","task_view",",","inputs",",","outputs",",","mounts",")","localization_env","=","self",".","_get_localization_env","(","inputs",",","user_project",")","user_environment","=","self",".","_build_user_environment","(","envs",",","inputs",",","outputs",",","mounts",")","delocalization_env","=","self",".","_get_delocalization_env","(","outputs",",","user_project",")","# When --ssh is enabled, run all actions in the same process ID namespace","pid_namespace","=","'shared'","if","job_resources",".","ssh","else","None","# Build the list of actions","actions","=","[","]","actions",".","append","(","google_v2_pipelines",".","build_action","(","name","=","'logging'",",","pid_namespace","=","pid_namespace",",","run_in_background","=","True",",","image_uri","=","_CLOUD_SDK_IMAGE",",","environment","=","logging_env",",","entrypoint","=","'\/bin\/bash'",",","commands","=","[","'-c'",",","continuous_logging_cmd","]",")",")","if","job_resources",".","ssh",":","actions",".","append","(","google_v2_pipelines",".","build_action","(","name","=","'ssh'",",","pid_namespace","=","pid_namespace",",","image_uri","=","_SSH_IMAGE",",","mounts","=","[","mnt_datadisk","]",",","entrypoint","=","'ssh-server'",",","port_mappings","=","{","_DEFAULT_SSH_PORT",":","_DEFAULT_SSH_PORT","}",",","run_in_background","=","True",")",")","actions",".","append","(","google_v2_pipelines",".","build_action","(","name","=","'prepare'",",","pid_namespace","=","pid_namespace",",","image_uri","=","_CLOUD_SDK_IMAGE",",","mounts","=","[","mnt_datadisk","]",",","environment","=","prepare_env",",","entrypoint","=","'\/bin\/bash'",",","commands","=","[","'-c'",",","prepare_command","]",")",",",")","actions",".","extend","(","mount_actions",")","actions",".","extend","(","[","google_v2_pipelines",".","build_action","(","name","=","'localization'",",","pid_namespace","=","pid_namespace",",","image_uri","=","_CLOUD_SDK_IMAGE",",","mounts","=","[","mnt_datadisk","]",",","environment","=","localization_env",",","entrypoint","=","'\/bin\/bash'",",","commands","=","[","'-c'",",","_LOCALIZATION_CMD",".","format","(","log_msg_fn","=","_LOG_MSG_FN",",","recursive_cp_fn","=","_GSUTIL_RSYNC_FN",",","cp_fn","=","_GSUTIL_CP_FN",",","cp_loop","=","_LOCALIZATION_LOOP",")","]",")",",","google_v2_pipelines",".","build_action","(","name","=","'user-command'",",","pid_namespace","=","pid_namespace",",","block_external_network","=","job_resources",".","block_external_network",",","image_uri","=","job_resources",".","image",",","mounts","=","[","mnt_datadisk","]","+","persistent_disk_mounts",",","environment","=","user_environment",",","entrypoint","=","'\/usr\/bin\/env'",",","commands","=","[","'bash'",",","'-c'",",","_USER_CMD",".","format","(","tmp_dir","=","providers_util",".","TMP_DIR",",","working_dir","=","providers_util",".","WORKING_DIR",",","user_script","=","script_path",")","]",")",",","google_v2_pipelines",".","build_action","(","name","=","'delocalization'",",","pid_namespace","=","pid_namespace",",","image_uri","=","_CLOUD_SDK_IMAGE",",","mounts","=","[","mnt_datadisk","]",",","environment","=","delocalization_env",",","entrypoint","=","'\/bin\/bash'",",","commands","=","[","'-c'",",","_LOCALIZATION_CMD",".","format","(","log_msg_fn","=","_LOG_MSG_FN",",","recursive_cp_fn","=","_GSUTIL_RSYNC_FN",",","cp_fn","=","_GSUTIL_CP_FN",",","cp_loop","=","_DELOCALIZATION_LOOP",")","]",")",",","google_v2_pipelines",".","build_action","(","name","=","'final_logging'",",","pid_namespace","=","pid_namespace",",","always_run","=","True",",","image_uri","=","_CLOUD_SDK_IMAGE",",","environment","=","logging_env",",","entrypoint","=","'\/bin\/bash'",",","commands","=","[","'-c'",",","logging_cmd","]",")",",","]",")","assert","len","(","actions",")","-","2","==","user_action","assert","len","(","actions",")","==","final_logging_action","# Prepare the VM (resources) configuration","disks","=","[","google_v2_pipelines",".","build_disk","(","_DATA_DISK_NAME",",","job_resources",".","disk_size",",","source_image","=","None",",","disk_type","=","job_resources",".","disk_type","or","job_model",".","DEFAULT_DISK_TYPE",")","]","disks",".","extend","(","persistent_disks",")","network","=","google_v2_pipelines",".","build_network","(","job_resources",".","network",",","job_resources",".","subnetwork",",","job_resources",".","use_private_address",")","if","job_resources",".","machine_type",":","machine_type","=","job_resources",".","machine_type","elif","job_resources",".","min_cores","or","job_resources",".","min_ram",":","machine_type","=","GoogleV2CustomMachine",".","build_machine_type","(","job_resources",".","min_cores",",","job_resources",".","min_ram",")","else",":","machine_type","=","job_model",".","DEFAULT_MACHINE_TYPE","accelerators","=","None","if","job_resources",".","accelerator_type",":","accelerators","=","[","google_v2_pipelines",".","build_accelerator","(","job_resources",".","accelerator_type",",","job_resources",".","accelerator_count",")","]","service_account","=","google_v2_pipelines",".","build_service_account","(","job_resources",".","service_account","or","'default'",",","scopes",")","resources","=","google_v2_pipelines",".","build_resources","(","self",".","_project",",","self",".","_get_pipeline_regions","(","job_resources",".","regions",",","job_resources",".","zones",")",",","google_base",".","get_zones","(","job_resources",".","zones",")",",","google_v2_pipelines",".","build_machine","(","network","=","network",",","machine_type","=","machine_type",",","# Preemptible comes from task_resources because it may change","# on retry attempts","preemptible","=","task_resources",".","preemptible",",","service_account","=","service_account",",","boot_disk_size_gb","=","job_resources",".","boot_disk_size",",","disks","=","disks",",","accelerators","=","accelerators",",","nvidia_driver_version","=","job_resources",".","nvidia_driver_version",",","labels","=","labels",",","cpu_platform","=","job_resources",".","cpu_platform",",","enable_stackdriver_monitoring","=","job_resources",".","enable_stackdriver_monitoring",")",",",")","# Build the pipeline request","pipeline","=","google_v2_pipelines",".","build_pipeline","(","actions",",","resources",",","None",",","job_resources",".","timeout",")","return","{","'pipeline'",":","pipeline",",","'labels'",":","labels","}"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L653-L920"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2JobProviderBase.submit_job","parameters":"(self, job_descriptor, skip_if_output_present)","argument_list":"","return_statement":"return {\n 'job-id': job_descriptor.job_metadata['job-id'],\n 'user-id': job_descriptor.job_metadata['user-id'],\n 'task-id': [task_id for task_id in launched_tasks if task_id],\n }","docstring":"Submit the job (or tasks) to be executed.\n\n Args:\n job_descriptor: all parameters needed to launch all job tasks\n skip_if_output_present: (boolean) if true, skip tasks whose output\n is present (see --skip flag for more explanation).\n\n Returns:\n A dictionary containing the 'user-id', 'job-id', and 'task-id' list.\n For jobs that are not task array jobs, the task-id list should be empty.\n\n Raises:\n ValueError: if job resources or task data contain illegal values.","docstring_summary":"Submit the job (or tasks) to be executed.","docstring_tokens":["Submit","the","job","(","or","tasks",")","to","be","executed","."],"function":"def submit_job(self, job_descriptor, skip_if_output_present):\n \"\"\"Submit the job (or tasks) to be executed.\n\n Args:\n job_descriptor: all parameters needed to launch all job tasks\n skip_if_output_present: (boolean) if true, skip tasks whose output\n is present (see --skip flag for more explanation).\n\n Returns:\n A dictionary containing the 'user-id', 'job-id', and 'task-id' list.\n For jobs that are not task array jobs, the task-id list should be empty.\n\n Raises:\n ValueError: if job resources or task data contain illegal values.\n \"\"\"\n # Validate task data and resources.\n param_util.validate_submit_args_or_fail(\n job_descriptor,\n provider_name=self._provider_name,\n input_providers=_SUPPORTED_INPUT_PROVIDERS,\n output_providers=_SUPPORTED_OUTPUT_PROVIDERS,\n logging_providers=_SUPPORTED_LOGGING_PROVIDERS)\n\n # Prepare and submit jobs.\n launched_tasks = []\n requests = []\n\n for task_view in job_model.task_view_generator(job_descriptor):\n\n job_params = task_view.job_params\n task_params = task_view.task_descriptors[0].task_params\n\n outputs = job_params['outputs'] | task_params['outputs']\n if skip_if_output_present:\n # check whether the output's already there\n if dsub_util.outputs_are_present(outputs, self._storage_service):\n print('Skipping task because its outputs are present')\n continue\n\n request = self._build_pipeline_request(task_view)\n\n if self._dry_run:\n requests.append(request)\n else:\n task_id = self._submit_pipeline(request)\n launched_tasks.append(task_id)\n\n # If this is a dry-run, emit all the pipeline request objects\n if self._dry_run:\n print(\n json.dumps(\n requests, indent=2, sort_keys=True, separators=(',', ': ')))\n\n if not requests and not launched_tasks:\n return {'job-id': dsub_util.NO_JOB}\n\n return {\n 'job-id': job_descriptor.job_metadata['job-id'],\n 'user-id': job_descriptor.job_metadata['user-id'],\n 'task-id': [task_id for task_id in launched_tasks if task_id],\n }","function_tokens":["def","submit_job","(","self",",","job_descriptor",",","skip_if_output_present",")",":","# Validate task data and resources.","param_util",".","validate_submit_args_or_fail","(","job_descriptor",",","provider_name","=","self",".","_provider_name",",","input_providers","=","_SUPPORTED_INPUT_PROVIDERS",",","output_providers","=","_SUPPORTED_OUTPUT_PROVIDERS",",","logging_providers","=","_SUPPORTED_LOGGING_PROVIDERS",")","# Prepare and submit jobs.","launched_tasks","=","[","]","requests","=","[","]","for","task_view","in","job_model",".","task_view_generator","(","job_descriptor",")",":","job_params","=","task_view",".","job_params","task_params","=","task_view",".","task_descriptors","[","0","]",".","task_params","outputs","=","job_params","[","'outputs'","]","|","task_params","[","'outputs'","]","if","skip_if_output_present",":","# check whether the output's already there","if","dsub_util",".","outputs_are_present","(","outputs",",","self",".","_storage_service",")",":","print","(","'Skipping task because its outputs are present'",")","continue","request","=","self",".","_build_pipeline_request","(","task_view",")","if","self",".","_dry_run",":","requests",".","append","(","request",")","else",":","task_id","=","self",".","_submit_pipeline","(","request",")","launched_tasks",".","append","(","task_id",")","# If this is a dry-run, emit all the pipeline request objects","if","self",".","_dry_run",":","print","(","json",".","dumps","(","requests",",","indent","=","2",",","sort_keys","=","True",",","separators","=","(","','",",","': '",")",")",")","if","not","requests","and","not","launched_tasks",":","return","{","'job-id'",":","dsub_util",".","NO_JOB","}","return","{","'job-id'",":","job_descriptor",".","job_metadata","[","'job-id'","]",",","'user-id'",":","job_descriptor",".","job_metadata","[","'user-id'","]",",","'task-id'",":","[","task_id","for","task_id","in","launched_tasks","if","task_id","]",",","}"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L929-L989"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2JobProviderBase._operations_list","parameters":"(self, ops_filter, max_tasks, page_size, page_token)","argument_list":"","return_statement":"return [\n GoogleOperation(self._provider_name, op)\n for op in response.get('operations', [])\n if google_v2_operations.is_dsub_operation(op)\n ], response.get('nextPageToken')","docstring":"Gets the list of operations for the specified filter.\n\n Args:\n ops_filter: string filter of operations to return\n max_tasks: the maximum number of job tasks to return or 0 for no limit.\n page_size: the number of operations to requested on each list operation to\n the pipelines API (if 0 or None, the API default is used)\n page_token: page token returned by a previous _operations_list call.\n\n Returns:\n Operations matching the filter criteria.","docstring_summary":"Gets the list of operations for the specified filter.","docstring_tokens":["Gets","the","list","of","operations","for","the","specified","filter","."],"function":"def _operations_list(self, ops_filter, max_tasks, page_size, page_token):\n \"\"\"Gets the list of operations for the specified filter.\n\n Args:\n ops_filter: string filter of operations to return\n max_tasks: the maximum number of job tasks to return or 0 for no limit.\n page_size: the number of operations to requested on each list operation to\n the pipelines API (if 0 or None, the API default is used)\n page_token: page token returned by a previous _operations_list call.\n\n Returns:\n Operations matching the filter criteria.\n \"\"\"\n\n # We are not using the documented default page size of 256,\n # nor allowing for the maximum page size of 2048 as larger page sizes\n # currently cause the operations.list() API to return an error:\n # HttpError 429 ... Resource has been exhausted (e.g. check quota).\n max_page_size = 128\n\n # Set the page size to the smallest (non-zero) size we can\n page_size = min(sz for sz in [page_size, max_page_size, max_tasks] if sz)\n\n # Execute operations.list() and return all of the dsub operations\n api = self._operations_list_api(ops_filter, page_token, page_size)\n google_base_api = google_base.Api()\n response = google_base_api.execute(api)\n\n return [\n GoogleOperation(self._provider_name, op)\n for op in response.get('operations', [])\n if google_v2_operations.is_dsub_operation(op)\n ], response.get('nextPageToken')","function_tokens":["def","_operations_list","(","self",",","ops_filter",",","max_tasks",",","page_size",",","page_token",")",":","# We are not using the documented default page size of 256,","# nor allowing for the maximum page size of 2048 as larger page sizes","# currently cause the operations.list() API to return an error:","# HttpError 429 ... Resource has been exhausted (e.g. check quota).","max_page_size","=","128","# Set the page size to the smallest (non-zero) size we can","page_size","=","min","(","sz","for","sz","in","[","page_size",",","max_page_size",",","max_tasks","]","if","sz",")","# Execute operations.list() and return all of the dsub operations","api","=","self",".","_operations_list_api","(","ops_filter",",","page_token",",","page_size",")","google_base_api","=","google_base",".","Api","(",")","response","=","google_base_api",".","execute","(","api",")","return","[","GoogleOperation","(","self",".","_provider_name",",","op",")","for","op","in","response",".","get","(","'operations'",",","[","]",")","if","google_v2_operations",".","is_dsub_operation","(","op",")","]",",","response",".","get","(","'nextPageToken'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L1093-L1125"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2JobProviderBase.lookup_job_tasks","parameters":"(self,\n statuses,\n user_ids=None,\n job_ids=None,\n job_names=None,\n task_ids=None,\n task_attempts=None,\n labels=None,\n create_time_min=None,\n create_time_max=None,\n max_tasks=0,\n page_size=0)","argument_list":"","return_statement":"","docstring":"Yields operations based on the input criteria.\n\n If any of the filters are empty or {'*'}, then no filtering is performed on\n that field. Filtering by both a job id list and job name list is\n unsupported.\n\n Args:\n statuses: {'*'}, or a list of job status strings to return. Valid\n status strings are 'RUNNING', 'SUCCESS', 'FAILURE', or 'CANCELED'.\n user_ids: a set of ids for the user(s) who launched the job.\n job_ids: a set of job ids to return.\n job_names: a set of job names to return.\n task_ids: a set of specific tasks within the specified job(s) to return.\n task_attempts: a list of specific attempts within the specified tasks(s)\n to return.\n labels: a list of LabelParam with user-added labels. All labels must\n match the task being fetched.\n create_time_min: a timezone-aware datetime value for the earliest create\n time of a task, inclusive.\n create_time_max: a timezone-aware datetime value for the most recent\n create time of a task, inclusive.\n max_tasks: the maximum number of job tasks to return or 0 for no limit.\n page_size: the page size to use for each query to the pipelins API.\n\n Raises:\n ValueError: if both a job id list and a job name list are provided\n\n Yields:\n Genomics API Operations objects.","docstring_summary":"Yields operations based on the input criteria.","docstring_tokens":["Yields","operations","based","on","the","input","criteria","."],"function":"def lookup_job_tasks(self,\n statuses,\n user_ids=None,\n job_ids=None,\n job_names=None,\n task_ids=None,\n task_attempts=None,\n labels=None,\n create_time_min=None,\n create_time_max=None,\n max_tasks=0,\n page_size=0):\n \"\"\"Yields operations based on the input criteria.\n\n If any of the filters are empty or {'*'}, then no filtering is performed on\n that field. Filtering by both a job id list and job name list is\n unsupported.\n\n Args:\n statuses: {'*'}, or a list of job status strings to return. Valid\n status strings are 'RUNNING', 'SUCCESS', 'FAILURE', or 'CANCELED'.\n user_ids: a set of ids for the user(s) who launched the job.\n job_ids: a set of job ids to return.\n job_names: a set of job names to return.\n task_ids: a set of specific tasks within the specified job(s) to return.\n task_attempts: a list of specific attempts within the specified tasks(s)\n to return.\n labels: a list of LabelParam with user-added labels. All labels must\n match the task being fetched.\n create_time_min: a timezone-aware datetime value for the earliest create\n time of a task, inclusive.\n create_time_max: a timezone-aware datetime value for the most recent\n create time of a task, inclusive.\n max_tasks: the maximum number of job tasks to return or 0 for no limit.\n page_size: the page size to use for each query to the pipelins API.\n\n Raises:\n ValueError: if both a job id list and a job name list are provided\n\n Yields:\n Genomics API Operations objects.\n \"\"\"\n\n # Build a filter for operations to return\n ops_filter = self._build_query_filter(\n statuses, user_ids, job_ids, job_names, task_ids, task_attempts, labels,\n create_time_min, create_time_max)\n\n # Execute the operations.list() API to get batches of operations to yield\n page_token = None\n tasks_yielded = 0\n while True:\n # If max_tasks is set, let operations.list() know not to send more than\n # we need.\n max_to_fetch = None\n if max_tasks:\n max_to_fetch = max_tasks - tasks_yielded\n ops, page_token = self._operations_list(ops_filter, max_to_fetch,\n page_size, page_token)\n\n for op in ops:\n yield op\n tasks_yielded += 1\n\n assert (max_tasks >= tasks_yielded or not max_tasks)\n if not page_token or 0 < max_tasks <= tasks_yielded:\n break","function_tokens":["def","lookup_job_tasks","(","self",",","statuses",",","user_ids","=","None",",","job_ids","=","None",",","job_names","=","None",",","task_ids","=","None",",","task_attempts","=","None",",","labels","=","None",",","create_time_min","=","None",",","create_time_max","=","None",",","max_tasks","=","0",",","page_size","=","0",")",":","# Build a filter for operations to return","ops_filter","=","self",".","_build_query_filter","(","statuses",",","user_ids",",","job_ids",",","job_names",",","task_ids",",","task_attempts",",","labels",",","create_time_min",",","create_time_max",")","# Execute the operations.list() API to get batches of operations to yield","page_token","=","None","tasks_yielded","=","0","while","True",":","# If max_tasks is set, let operations.list() know not to send more than","# we need.","max_to_fetch","=","None","if","max_tasks",":","max_to_fetch","=","max_tasks","-","tasks_yielded","ops",",","page_token","=","self",".","_operations_list","(","ops_filter",",","max_to_fetch",",","page_size",",","page_token",")","for","op","in","ops",":","yield","op","tasks_yielded","+=","1","assert","(","max_tasks",">=","tasks_yielded","or","not","max_tasks",")","if","not","page_token","or","0","<","max_tasks","<=","tasks_yielded",":","break"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L1127-L1193"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2JobProviderBase.delete_jobs","parameters":"(self,\n user_ids,\n job_ids,\n task_ids,\n labels,\n create_time_min=None,\n create_time_max=None)","argument_list":"","return_statement":"return google_base.cancel(self._batch_handler_def(),\n self._operations_cancel_api_def(), tasks)","docstring":"Kills the operations associated with the specified job or job.task.\n\n Args:\n user_ids: List of user ids who \"own\" the job(s) to cancel.\n job_ids: List of job_ids to cancel.\n task_ids: List of task-ids to cancel.\n labels: List of LabelParam, each must match the job(s) to be canceled.\n create_time_min: a timezone-aware datetime value for the earliest create\n time of a task, inclusive.\n create_time_max: a timezone-aware datetime value for the most recent\n create time of a task, inclusive.\n\n Returns:\n A list of tasks canceled and a list of error messages.","docstring_summary":"Kills the operations associated with the specified job or job.task.","docstring_tokens":["Kills","the","operations","associated","with","the","specified","job","or","job",".","task","."],"function":"def delete_jobs(self,\n user_ids,\n job_ids,\n task_ids,\n labels,\n create_time_min=None,\n create_time_max=None):\n \"\"\"Kills the operations associated with the specified job or job.task.\n\n Args:\n user_ids: List of user ids who \"own\" the job(s) to cancel.\n job_ids: List of job_ids to cancel.\n task_ids: List of task-ids to cancel.\n labels: List of LabelParam, each must match the job(s) to be canceled.\n create_time_min: a timezone-aware datetime value for the earliest create\n time of a task, inclusive.\n create_time_max: a timezone-aware datetime value for the most recent\n create time of a task, inclusive.\n\n Returns:\n A list of tasks canceled and a list of error messages.\n \"\"\"\n # Look up the job(s)\n tasks = list(\n self.lookup_job_tasks(\n {'RUNNING'},\n user_ids=user_ids,\n job_ids=job_ids,\n task_ids=task_ids,\n labels=labels,\n create_time_min=create_time_min,\n create_time_max=create_time_max))\n\n print('Found %d tasks to delete.' % len(tasks))\n\n return google_base.cancel(self._batch_handler_def(),\n self._operations_cancel_api_def(), tasks)","function_tokens":["def","delete_jobs","(","self",",","user_ids",",","job_ids",",","task_ids",",","labels",",","create_time_min","=","None",",","create_time_max","=","None",")",":","# Look up the job(s)","tasks","=","list","(","self",".","lookup_job_tasks","(","{","'RUNNING'","}",",","user_ids","=","user_ids",",","job_ids","=","job_ids",",","task_ids","=","task_ids",",","labels","=","labels",",","create_time_min","=","create_time_min",",","create_time_max","=","create_time_max",")",")","print","(","'Found %d tasks to delete.'","%","len","(","tasks",")",")","return","google_base",".","cancel","(","self",".","_batch_handler_def","(",")",",","self",".","_operations_cancel_api_def","(",")",",","tasks",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L1195-L1231"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleOperation._operation_status","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Returns the status of this operation.\n\n Raises:\n ValueError: if the operation status cannot be determined.\n\n Returns:\n A printable status string (RUNNING, SUCCESS, CANCELED or FAILURE).","docstring_summary":"Returns the status of this operation.","docstring_tokens":["Returns","the","status","of","this","operation","."],"function":"def _operation_status(self):\n \"\"\"Returns the status of this operation.\n\n Raises:\n ValueError: if the operation status cannot be determined.\n\n Returns:\n A printable status string (RUNNING, SUCCESS, CANCELED or FAILURE).\n \"\"\"\n if not google_v2_operations.is_done(self._op):\n return 'RUNNING'\n if google_v2_operations.is_success(self._op):\n return 'SUCCESS'\n if google_v2_operations.is_canceled(self._op):\n return 'CANCELED'\n if google_v2_operations.is_failed(self._op):\n return 'FAILURE'\n\n raise ValueError('Status for operation {} could not be determined'.format(\n self._op['name']))","function_tokens":["def","_operation_status","(","self",")",":","if","not","google_v2_operations",".","is_done","(","self",".","_op",")",":","return","'RUNNING'","if","google_v2_operations",".","is_success","(","self",".","_op",")",":","return","'SUCCESS'","if","google_v2_operations",".","is_canceled","(","self",".","_op",")",":","return","'CANCELED'","if","google_v2_operations",".","is_failed","(","self",".","_op",")",":","return","'FAILURE'","raise","ValueError","(","'Status for operation {} could not be determined'",".","format","(","self",".","_op","[","'name'","]",")",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L1265-L1284"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleOperation._operation_status_message","parameters":"(self)","argument_list":"","return_statement":"return msg, action, detail","docstring":"Returns the most relevant status string and failed action.\n\n This string is meant for display only.\n\n Returns:\n A triple of:\n - printable status message\n - the action that failed (if any)\n - a detail message (if available)","docstring_summary":"Returns the most relevant status string and failed action.","docstring_tokens":["Returns","the","most","relevant","status","string","and","failed","action","."],"function":"def _operation_status_message(self):\n \"\"\"Returns the most relevant status string and failed action.\n\n This string is meant for display only.\n\n Returns:\n A triple of:\n - printable status message\n - the action that failed (if any)\n - a detail message (if available)\n \"\"\"\n msg = None\n action = None\n detail = None\n\n if not google_v2_operations.is_done(self._op):\n last_event = google_v2_operations.get_last_event(self._op)\n if last_event:\n if google_v2_operations.is_worker_assigned_event(last_event):\n msg = 'VM starting (awaiting worker checkin)'\n detail = last_event['description']\n elif google_v2_operations.is_pull_started_event(last_event):\n detail = last_event['description']\n msg = detail.replace('Started pulling', 'Pulling')\n else:\n msg = last_event['description']\n action_id = last_event.get('details', {}).get('actionId')\n if action_id:\n action = google_v2_operations.get_action_by_id(self._op, action_id)\n else:\n msg = 'Pending'\n\n elif google_v2_operations.is_success(self._op):\n msg = 'Success'\n\n else:\n # We have a failure condition and want to get the best details of why.\n\n # For a single failure, we may get multiple failure events.\n # For the Life Sciences v2 provider, events may look like:\n\n # - description: 'Execution failed: generic::failed_precondition: ...\n # failed:\n # cause: 'Execution failed: generic::failed_precondition: while ...\n # code: FAILED_PRECONDITION\n # timestamp: '2020-09-28T23:10:09.364365339Z'\n # - description: Unexpected exit status 127 while running \"user-command\"\n # timestamp: '2020-09-28T23:10:04.671139036Z'\n # unexpectedExitStatus:\n # actionId: 4\n # exitStatus: 127\n # - containerStopped:\n # actionId: 4\n # exitStatus: 127\n # stderr: |\n # bash: line 3: \/mnt\/data\/script\/foo: No such file or directory\n # description: 'Stopped running \"user-command\": exit status 127: ...\n # timestamp: '2020-09-28T23:10:04.671133099Z'\n\n # If we can get a containerStopped event, it has the best information\n # Otherwise fallback to unexpectedExitStatus.\n # Otherwise fallback to failed.\n\n container_failed_events = google_v2_operations.get_container_stopped_error_events(\n self._op)\n unexpected_exit_events = google_v2_operations.get_unexpected_exit_events(\n self._op)\n failed_events = google_v2_operations.get_failed_events(self._op)\n\n if container_failed_events:\n container_failed_event = container_failed_events[-1]\n action_id = google_v2_operations.get_event_action_id(\n container_failed_event)\n msg = google_v2_operations.get_event_description(container_failed_event)\n detail = google_v2_operations.get_event_stderr(container_failed_event)\n\n elif unexpected_exit_events:\n unexpected_exit_event = unexpected_exit_events[-1]\n action_id = google_v2_operations.get_event_action_id(\n unexpected_exit_event)\n msg = google_v2_operations.get_event_description(unexpected_exit_event)\n\n elif failed_events:\n failed_event = failed_events[-1]\n msg = google_v2_operations.get_event_description(failed_event)\n action_id = None\n\n if not msg:\n error = google_v2_operations.get_error(self._op)\n if error:\n msg = error['message']\n\n action = google_v2_operations.get_action_by_id(self._op, action_id)\n\n return msg, action, detail","function_tokens":["def","_operation_status_message","(","self",")",":","msg","=","None","action","=","None","detail","=","None","if","not","google_v2_operations",".","is_done","(","self",".","_op",")",":","last_event","=","google_v2_operations",".","get_last_event","(","self",".","_op",")","if","last_event",":","if","google_v2_operations",".","is_worker_assigned_event","(","last_event",")",":","msg","=","'VM starting (awaiting worker checkin)'","detail","=","last_event","[","'description'","]","elif","google_v2_operations",".","is_pull_started_event","(","last_event",")",":","detail","=","last_event","[","'description'","]","msg","=","detail",".","replace","(","'Started pulling'",",","'Pulling'",")","else",":","msg","=","last_event","[","'description'","]","action_id","=","last_event",".","get","(","'details'",",","{","}",")",".","get","(","'actionId'",")","if","action_id",":","action","=","google_v2_operations",".","get_action_by_id","(","self",".","_op",",","action_id",")","else",":","msg","=","'Pending'","elif","google_v2_operations",".","is_success","(","self",".","_op",")",":","msg","=","'Success'","else",":","# We have a failure condition and want to get the best details of why.","# For a single failure, we may get multiple failure events.","# For the Life Sciences v2 provider, events may look like:","# - description: 'Execution failed: generic::failed_precondition: ...","# failed:","# cause: 'Execution failed: generic::failed_precondition: while ...","# code: FAILED_PRECONDITION","# timestamp: '2020-09-28T23:10:09.364365339Z'","# - description: Unexpected exit status 127 while running \"user-command\"","# timestamp: '2020-09-28T23:10:04.671139036Z'","# unexpectedExitStatus:","# actionId: 4","# exitStatus: 127","# - containerStopped:","# actionId: 4","# exitStatus: 127","# stderr: |","# bash: line 3: \/mnt\/data\/script\/foo: No such file or directory","# description: 'Stopped running \"user-command\": exit status 127: ...","# timestamp: '2020-09-28T23:10:04.671133099Z'","# If we can get a containerStopped event, it has the best information","# Otherwise fallback to unexpectedExitStatus.","# Otherwise fallback to failed.","container_failed_events","=","google_v2_operations",".","get_container_stopped_error_events","(","self",".","_op",")","unexpected_exit_events","=","google_v2_operations",".","get_unexpected_exit_events","(","self",".","_op",")","failed_events","=","google_v2_operations",".","get_failed_events","(","self",".","_op",")","if","container_failed_events",":","container_failed_event","=","container_failed_events","[","-","1","]","action_id","=","google_v2_operations",".","get_event_action_id","(","container_failed_event",")","msg","=","google_v2_operations",".","get_event_description","(","container_failed_event",")","detail","=","google_v2_operations",".","get_event_stderr","(","container_failed_event",")","elif","unexpected_exit_events",":","unexpected_exit_event","=","unexpected_exit_events","[","-","1","]","action_id","=","google_v2_operations",".","get_event_action_id","(","unexpected_exit_event",")","msg","=","google_v2_operations",".","get_event_description","(","unexpected_exit_event",")","elif","failed_events",":","failed_event","=","failed_events","[","-","1","]","msg","=","google_v2_operations",".","get_event_description","(","failed_event",")","action_id","=","None","if","not","msg",":","error","=","google_v2_operations",".","get_error","(","self",".","_op",")","if","error",":","msg","=","error","[","'message'","]","action","=","google_v2_operations",".","get_action_by_id","(","self",".","_op",",","action_id",")","return","msg",",","action",",","detail"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L1286-L1380"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleOperation._is_ssh_enabled","parameters":"(self, op)","argument_list":"","return_statement":"return action is not None","docstring":"Return whether the operation had --ssh enabled or not.","docstring_summary":"Return whether the operation had --ssh enabled or not.","docstring_tokens":["Return","whether","the","operation","had","--","ssh","enabled","or","not","."],"function":"def _is_ssh_enabled(self, op):\n \"\"\"Return whether the operation had --ssh enabled or not.\"\"\"\n action = google_v2_operations.get_action_by_name(op, 'ssh')\n return action is not None","function_tokens":["def","_is_ssh_enabled","(","self",",","op",")",":","action","=","google_v2_operations",".","get_action_by_name","(","op",",","'ssh'",")","return","action","is","not","None"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L1382-L1385"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleOperation.error_message","parameters":"(self)","argument_list":"","return_statement":"return ''","docstring":"Returns an error message if the operation failed for any reason.\n\n Failure as defined here means ended for any reason other than 'success'.\n This means that a successful cancelation will also return an error message.\n\n Returns:\n string, string will be empty if job did not error.","docstring_summary":"Returns an error message if the operation failed for any reason.","docstring_tokens":["Returns","an","error","message","if","the","operation","failed","for","any","reason","."],"function":"def error_message(self):\n \"\"\"Returns an error message if the operation failed for any reason.\n\n Failure as defined here means ended for any reason other than 'success'.\n This means that a successful cancelation will also return an error message.\n\n Returns:\n string, string will be empty if job did not error.\n \"\"\"\n error = google_v2_operations.get_error(self._op)\n if error:\n job_id = self.get_field('job-id')\n task_id = self.get_field('task-id')\n task_str = job_id if task_id is None else '{} (task: {})'.format(\n job_id, task_id)\n\n return 'Error in {} - code {}: {}'.format(task_str, error['code'],\n error['message'])\n\n return ''","function_tokens":["def","error_message","(","self",")",":","error","=","google_v2_operations",".","get_error","(","self",".","_op",")","if","error",":","job_id","=","self",".","get_field","(","'job-id'",")","task_id","=","self",".","get_field","(","'task-id'",")","task_str","=","job_id","if","task_id","is","None","else","'{} (task: {})'",".","format","(","job_id",",","task_id",")","return","'Error in {} - code {}: {}'",".","format","(","task_str",",","error","[","'code'","]",",","error","[","'message'","]",")","return","''"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L1387-L1406"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleOperation.get_field","parameters":"(self, field, default=None)","argument_list":"","return_statement":"return value if value else default","docstring":"Returns a value from the operation for a specific set of field names.\n\n Args:\n field: a dsub-specific job metadata key\n default: default value to return if field does not exist or is empty.\n\n Returns:\n A text string for the field or a list for 'inputs'.\n\n Raises:\n ValueError: if the field label is not supported by the operation","docstring_summary":"Returns a value from the operation for a specific set of field names.","docstring_tokens":["Returns","a","value","from","the","operation","for","a","specific","set","of","field","names","."],"function":"def get_field(self, field, default=None):\n \"\"\"Returns a value from the operation for a specific set of field names.\n\n Args:\n field: a dsub-specific job metadata key\n default: default value to return if field does not exist or is empty.\n\n Returns:\n A text string for the field or a list for 'inputs'.\n\n Raises:\n ValueError: if the field label is not supported by the operation\n \"\"\"\n\n value = None\n if field == 'internal-id':\n value = self._op['name']\n elif field == 'user-project':\n if self._job_descriptor:\n value = self._job_descriptor.job_metadata.get(field)\n elif field in [\n 'job-id', 'job-name', 'task-id', 'task-attempt', 'user-id',\n 'dsub-version'\n ]:\n value = google_v2_operations.get_label(self._op, field)\n elif field == 'task-status':\n value = self._operation_status()\n elif field == 'logging':\n if self._job_descriptor:\n # The job_resources will contain the \"--logging\" value.\n # The task_resources will contain the resolved logging path.\n # Return the resolved logging path.\n task_resources = self._job_descriptor.task_descriptors[0].task_resources\n value = task_resources.logging_path\n\n elif field in ['envs', 'labels']:\n if self._job_descriptor:\n items = providers_util.get_job_and_task_param(\n self._job_descriptor.job_params,\n self._job_descriptor.task_descriptors[0].task_params, field)\n value = {item.name: item.value for item in items}\n elif field in [\n 'inputs', 'outputs', 'input-recursives', 'output-recursives'\n ]:\n if self._job_descriptor:\n value = {}\n items = providers_util.get_job_and_task_param(\n self._job_descriptor.job_params,\n self._job_descriptor.task_descriptors[0].task_params, field)\n value.update({item.name: item.value for item in items})\n elif field == 'mounts':\n if self._job_descriptor:\n items = providers_util.get_job_and_task_param(\n self._job_descriptor.job_params,\n self._job_descriptor.task_descriptors[0].task_params, field)\n value = {item.name: item.value for item in items}\n elif field == 'create-time':\n ds = google_v2_operations.get_create_time(self._op)\n value = google_base.parse_rfc3339_utc_string(ds)\n elif field == 'start-time':\n ds = google_v2_operations.get_start_time(self._op)\n if ds:\n value = google_base.parse_rfc3339_utc_string(ds)\n elif field == 'end-time':\n ds = google_v2_operations.get_end_time(self._op)\n if ds:\n value = google_base.parse_rfc3339_utc_string(ds)\n\n elif field == 'status':\n # Short message like:\n # \"Pending\", \"VM starting\", \"\", \"Success\", \"Cancelled\"\n value = self._operation_status()\n\n elif field == 'status-message':\n # Longer message\n msg, action, detail = self._operation_status_message()\n if msg.startswith('Execution failed:'):\n # msg may look something like\n # \"Execution failed: action 2: pulling image...\"\n # Emit the actual message (\"pulling image...\")\n msg = msg.split(': ', 2)[-1]\n value = msg\n\n elif field == 'status-detail':\n # As much detail as we can reasonably get from the operation\n msg, action, detail = self._operation_status_message()\n if detail:\n msg = detail\n\n if action:\n value = google_v2_operations.get_action_name(action) + ':\\n' + msg\n else:\n value = msg\n\n elif field == 'last-update':\n last_update = google_v2_operations.get_last_update(self._op)\n if last_update:\n value = google_base.parse_rfc3339_utc_string(last_update)\n elif field == 'provider':\n return self._provider_name\n elif field == 'provider-attributes':\n value = {}\n\n # The ssh flag is determined by if an action named 'ssh' exists.\n value['ssh'] = self._is_ssh_enabled(self._op)\n\n value[\n 'block-external-network'] = google_v2_operations.external_network_blocked(\n self._op)\n\n # The VM instance name and zone can be found in the WorkerAssignedEvent.\n # For a given operation, this may have occurred multiple times, so be\n # sure to grab the most recent.\n assigned_event_details = google_v2_operations.get_worker_assigned_event_details(\n self._op)\n if assigned_event_details:\n value['instance-name'] = assigned_event_details.get('instance')\n value['zone'] = assigned_event_details.get('zone')\n\n # The rest of the information comes from the request itself.\n # Note that for the v2alpha1 API, the returned operation contains\n # default values in the response, while the v2beta API omits fields\n # that match empty defaults (hence the \"False\", \"[]\", and empty string\n # default values in the get() calls below).\n resources = google_v2_operations.get_resources(self._op)\n value['regions'] = resources.get('regions', [])\n value['zones'] = resources.get('zones', [])\n if 'virtualMachine' in resources:\n vm = resources['virtualMachine']\n value['machine-type'] = vm.get('machineType')\n value['preemptible'] = vm.get('preemptible', False)\n\n value['boot-disk-size'] = vm.get('bootDiskSizeGb')\n value['network'] = google_v2_operations.get_vm_network_name(vm) or ''\n value['subnetwork'] = vm.get('network', {}).get('subnetwork', '')\n value['use_private_address'] = vm.get('network',\n {}).get('usePrivateAddress',\n False)\n value['cpu_platform'] = vm.get('cpuPlatform', '')\n value['accelerators'] = vm.get('accelerators', [])\n value['enable-stackdriver-monitoring'] = vm.get(\n 'enableStackdriverMonitoring', False)\n value['service-account'] = vm.get('serviceAccount', {}).get('email')\n if 'disks' in vm:\n datadisk = next(\n (d for d in vm['disks'] if d['name'] == _DATA_DISK_NAME))\n if datadisk:\n value['disk-size'] = datadisk.get('sizeGb')\n value['disk-type'] = datadisk.get('type')\n elif field == 'events':\n value = GoogleV2EventMap(self._op).get_filtered_normalized_events()\n elif field == 'script-name':\n if self._job_descriptor:\n value = self._job_descriptor.job_metadata.get(field)\n elif field == 'script':\n value = self._try_op_to_script_body()\n else:\n raise ValueError('Unsupported field: \"%s\"' % field)\n\n return value if value else default","function_tokens":["def","get_field","(","self",",","field",",","default","=","None",")",":","value","=","None","if","field","==","'internal-id'",":","value","=","self",".","_op","[","'name'","]","elif","field","==","'user-project'",":","if","self",".","_job_descriptor",":","value","=","self",".","_job_descriptor",".","job_metadata",".","get","(","field",")","elif","field","in","[","'job-id'",",","'job-name'",",","'task-id'",",","'task-attempt'",",","'user-id'",",","'dsub-version'","]",":","value","=","google_v2_operations",".","get_label","(","self",".","_op",",","field",")","elif","field","==","'task-status'",":","value","=","self",".","_operation_status","(",")","elif","field","==","'logging'",":","if","self",".","_job_descriptor",":","# The job_resources will contain the \"--logging\" value.","# The task_resources will contain the resolved logging path.","# Return the resolved logging path.","task_resources","=","self",".","_job_descriptor",".","task_descriptors","[","0","]",".","task_resources","value","=","task_resources",".","logging_path","elif","field","in","[","'envs'",",","'labels'","]",":","if","self",".","_job_descriptor",":","items","=","providers_util",".","get_job_and_task_param","(","self",".","_job_descriptor",".","job_params",",","self",".","_job_descriptor",".","task_descriptors","[","0","]",".","task_params",",","field",")","value","=","{","item",".","name",":","item",".","value","for","item","in","items","}","elif","field","in","[","'inputs'",",","'outputs'",",","'input-recursives'",",","'output-recursives'","]",":","if","self",".","_job_descriptor",":","value","=","{","}","items","=","providers_util",".","get_job_and_task_param","(","self",".","_job_descriptor",".","job_params",",","self",".","_job_descriptor",".","task_descriptors","[","0","]",".","task_params",",","field",")","value",".","update","(","{","item",".","name",":","item",".","value","for","item","in","items","}",")","elif","field","==","'mounts'",":","if","self",".","_job_descriptor",":","items","=","providers_util",".","get_job_and_task_param","(","self",".","_job_descriptor",".","job_params",",","self",".","_job_descriptor",".","task_descriptors","[","0","]",".","task_params",",","field",")","value","=","{","item",".","name",":","item",".","value","for","item","in","items","}","elif","field","==","'create-time'",":","ds","=","google_v2_operations",".","get_create_time","(","self",".","_op",")","value","=","google_base",".","parse_rfc3339_utc_string","(","ds",")","elif","field","==","'start-time'",":","ds","=","google_v2_operations",".","get_start_time","(","self",".","_op",")","if","ds",":","value","=","google_base",".","parse_rfc3339_utc_string","(","ds",")","elif","field","==","'end-time'",":","ds","=","google_v2_operations",".","get_end_time","(","self",".","_op",")","if","ds",":","value","=","google_base",".","parse_rfc3339_utc_string","(","ds",")","elif","field","==","'status'",":","# Short message like:","# \"Pending\", \"VM starting\", \"\", \"Success\", \"Cancelled\"","value","=","self",".","_operation_status","(",")","elif","field","==","'status-message'",":","# Longer message","msg",",","action",",","detail","=","self",".","_operation_status_message","(",")","if","msg",".","startswith","(","'Execution failed:'",")",":","# msg may look something like","# \"Execution failed: action 2: pulling image...\"","# Emit the actual message (\"pulling image...\")","msg","=","msg",".","split","(","': '",",","2",")","[","-","1","]","value","=","msg","elif","field","==","'status-detail'",":","# As much detail as we can reasonably get from the operation","msg",",","action",",","detail","=","self",".","_operation_status_message","(",")","if","detail",":","msg","=","detail","if","action",":","value","=","google_v2_operations",".","get_action_name","(","action",")","+","':\\n'","+","msg","else",":","value","=","msg","elif","field","==","'last-update'",":","last_update","=","google_v2_operations",".","get_last_update","(","self",".","_op",")","if","last_update",":","value","=","google_base",".","parse_rfc3339_utc_string","(","last_update",")","elif","field","==","'provider'",":","return","self",".","_provider_name","elif","field","==","'provider-attributes'",":","value","=","{","}","# The ssh flag is determined by if an action named 'ssh' exists.","value","[","'ssh'","]","=","self",".","_is_ssh_enabled","(","self",".","_op",")","value","[","'block-external-network'","]","=","google_v2_operations",".","external_network_blocked","(","self",".","_op",")","# The VM instance name and zone can be found in the WorkerAssignedEvent.","# For a given operation, this may have occurred multiple times, so be","# sure to grab the most recent.","assigned_event_details","=","google_v2_operations",".","get_worker_assigned_event_details","(","self",".","_op",")","if","assigned_event_details",":","value","[","'instance-name'","]","=","assigned_event_details",".","get","(","'instance'",")","value","[","'zone'","]","=","assigned_event_details",".","get","(","'zone'",")","# The rest of the information comes from the request itself.","# Note that for the v2alpha1 API, the returned operation contains","# default values in the response, while the v2beta API omits fields","# that match empty defaults (hence the \"False\", \"[]\", and empty string","# default values in the get() calls below).","resources","=","google_v2_operations",".","get_resources","(","self",".","_op",")","value","[","'regions'","]","=","resources",".","get","(","'regions'",",","[","]",")","value","[","'zones'","]","=","resources",".","get","(","'zones'",",","[","]",")","if","'virtualMachine'","in","resources",":","vm","=","resources","[","'virtualMachine'","]","value","[","'machine-type'","]","=","vm",".","get","(","'machineType'",")","value","[","'preemptible'","]","=","vm",".","get","(","'preemptible'",",","False",")","value","[","'boot-disk-size'","]","=","vm",".","get","(","'bootDiskSizeGb'",")","value","[","'network'","]","=","google_v2_operations",".","get_vm_network_name","(","vm",")","or","''","value","[","'subnetwork'","]","=","vm",".","get","(","'network'",",","{","}",")",".","get","(","'subnetwork'",",","''",")","value","[","'use_private_address'","]","=","vm",".","get","(","'network'",",","{","}",")",".","get","(","'usePrivateAddress'",",","False",")","value","[","'cpu_platform'","]","=","vm",".","get","(","'cpuPlatform'",",","''",")","value","[","'accelerators'","]","=","vm",".","get","(","'accelerators'",",","[","]",")","value","[","'enable-stackdriver-monitoring'","]","=","vm",".","get","(","'enableStackdriverMonitoring'",",","False",")","value","[","'service-account'","]","=","vm",".","get","(","'serviceAccount'",",","{","}",")",".","get","(","'email'",")","if","'disks'","in","vm",":","datadisk","=","next","(","(","d","for","d","in","vm","[","'disks'","]","if","d","[","'name'","]","==","_DATA_DISK_NAME",")",")","if","datadisk",":","value","[","'disk-size'","]","=","datadisk",".","get","(","'sizeGb'",")","value","[","'disk-type'","]","=","datadisk",".","get","(","'type'",")","elif","field","==","'events'",":","value","=","GoogleV2EventMap","(","self",".","_op",")",".","get_filtered_normalized_events","(",")","elif","field","==","'script-name'",":","if","self",".","_job_descriptor",":","value","=","self",".","_job_descriptor",".","job_metadata",".","get","(","field",")","elif","field","==","'script'",":","value","=","self",".","_try_op_to_script_body","(",")","else",":","raise","ValueError","(","'Unsupported field: \"%s\"'","%","field",")","return","value","if","value","else","default"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L1408-L1567"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2CustomMachine._validate_cores","parameters":"(cores)","argument_list":"","return_statement":"","docstring":"Make sure cores is either one or even.","docstring_summary":"Make sure cores is either one or even.","docstring_tokens":["Make","sure","cores","is","either","one","or","even","."],"function":"def _validate_cores(cores):\n \"\"\"Make sure cores is either one or even.\"\"\"\n if cores == 1 or cores % 2 == 0:\n return cores\n else:\n return cores + 1","function_tokens":["def","_validate_cores","(","cores",")",":","if","cores","==","1","or","cores","%","2","==","0",":","return","cores","else",":","return","cores","+","1"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L1585-L1590"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2CustomMachine._validate_ram","parameters":"(ram_in_mb)","argument_list":"","return_statement":"return int(GoogleV2CustomMachine._MEMORY_MULTIPLE * math.ceil(\n ram_in_mb \/ GoogleV2CustomMachine._MEMORY_MULTIPLE))","docstring":"Rounds ram up to the nearest multiple of _MEMORY_MULTIPLE.","docstring_summary":"Rounds ram up to the nearest multiple of _MEMORY_MULTIPLE.","docstring_tokens":["Rounds","ram","up","to","the","nearest","multiple","of","_MEMORY_MULTIPLE","."],"function":"def _validate_ram(ram_in_mb):\n \"\"\"Rounds ram up to the nearest multiple of _MEMORY_MULTIPLE.\"\"\"\n return int(GoogleV2CustomMachine._MEMORY_MULTIPLE * math.ceil(\n ram_in_mb \/ GoogleV2CustomMachine._MEMORY_MULTIPLE))","function_tokens":["def","_validate_ram","(","ram_in_mb",")",":","return","int","(","GoogleV2CustomMachine",".","_MEMORY_MULTIPLE","*","math",".","ceil","(","ram_in_mb","\/","GoogleV2CustomMachine",".","_MEMORY_MULTIPLE",")",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L1593-L1596"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_base.py","language":"python","identifier":"GoogleV2CustomMachine.build_machine_type","parameters":"(cls, min_cores, min_ram)","argument_list":"","return_statement":"return 'custom-{}-{}'.format(int(cores), int(ram))","docstring":"Returns a custom machine type string.","docstring_summary":"Returns a custom machine type string.","docstring_tokens":["Returns","a","custom","machine","type","string","."],"function":"def build_machine_type(cls, min_cores, min_ram):\n \"\"\"Returns a custom machine type string.\"\"\"\n min_cores = min_cores or job_model.DEFAULT_MIN_CORES\n min_ram = min_ram or job_model.DEFAULT_MIN_RAM\n\n # First, min_ram is given in GB. Convert to MB.\n min_ram *= GoogleV2CustomMachine._MB_PER_GB\n\n # Only machine types with 1 vCPU or an even number of vCPUs can be created.\n cores = cls._validate_cores(min_cores)\n # The total memory of the instance must be a multiple of 256 MB.\n ram = cls._validate_ram(min_ram)\n\n # Memory must be between 0.9 GB per vCPU, up to 6.5 GB per vCPU.\n memory_to_cpu_ratio = ram \/ cores\n\n if memory_to_cpu_ratio < GoogleV2CustomMachine._MIN_MEMORY_PER_CPU:\n # If we're under the ratio, top up the memory.\n adjusted_ram = GoogleV2CustomMachine._MIN_MEMORY_PER_CPU * cores\n ram = cls._validate_ram(adjusted_ram)\n\n elif memory_to_cpu_ratio > GoogleV2CustomMachine._MAX_MEMORY_PER_CPU:\n # If we're over the ratio, top up the CPU.\n adjusted_cores = math.ceil(\n ram \/ GoogleV2CustomMachine._MAX_MEMORY_PER_CPU)\n cores = cls._validate_cores(adjusted_cores)\n\n else:\n # Ratio is within the restrictions - no adjustments needed.\n pass\n\n return 'custom-{}-{}'.format(int(cores), int(ram))","function_tokens":["def","build_machine_type","(","cls",",","min_cores",",","min_ram",")",":","min_cores","=","min_cores","or","job_model",".","DEFAULT_MIN_CORES","min_ram","=","min_ram","or","job_model",".","DEFAULT_MIN_RAM","# First, min_ram is given in GB. Convert to MB.","min_ram","*=","GoogleV2CustomMachine",".","_MB_PER_GB","# Only machine types with 1 vCPU or an even number of vCPUs can be created.","cores","=","cls",".","_validate_cores","(","min_cores",")","# The total memory of the instance must be a multiple of 256 MB.","ram","=","cls",".","_validate_ram","(","min_ram",")","# Memory must be between 0.9 GB per vCPU, up to 6.5 GB per vCPU.","memory_to_cpu_ratio","=","ram","\/","cores","if","memory_to_cpu_ratio","<","GoogleV2CustomMachine",".","_MIN_MEMORY_PER_CPU",":","# If we're under the ratio, top up the memory.","adjusted_ram","=","GoogleV2CustomMachine",".","_MIN_MEMORY_PER_CPU","*","cores","ram","=","cls",".","_validate_ram","(","adjusted_ram",")","elif","memory_to_cpu_ratio",">","GoogleV2CustomMachine",".","_MAX_MEMORY_PER_CPU",":","# If we're over the ratio, top up the CPU.","adjusted_cores","=","math",".","ceil","(","ram","\/","GoogleV2CustomMachine",".","_MAX_MEMORY_PER_CPU",")","cores","=","cls",".","_validate_cores","(","adjusted_cores",")","else",":","# Ratio is within the restrictions - no adjustments needed.","pass","return","'custom-{}-{}'",".","format","(","int","(","cores",")",",","int","(","ram",")",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_base.py#L1599-L1630"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_cls_v2.py","language":"python","identifier":"GoogleCLSV2JobProvider._get_pipeline_regions","parameters":"(self, regions, zones)","argument_list":"","return_statement":"return regions or []","docstring":"Returns the list of regions to use for a pipeline request.\n\n If neither regions nor zones were specified for the pipeline, then use the\n v2beta location as the default region.\n\n Args:\n regions (str): A space separated list of regions to use for the pipeline.\n zones (str): A space separated list of zones to use for the pipeline.","docstring_summary":"Returns the list of regions to use for a pipeline request.","docstring_tokens":["Returns","the","list","of","regions","to","use","for","a","pipeline","request","."],"function":"def _get_pipeline_regions(self, regions, zones):\n \"\"\"Returns the list of regions to use for a pipeline request.\n\n If neither regions nor zones were specified for the pipeline, then use the\n v2beta location as the default region.\n\n Args:\n regions (str): A space separated list of regions to use for the pipeline.\n zones (str): A space separated list of zones to use for the pipeline.\n \"\"\"\n\n if not regions and not zones:\n return [self._location]\n return regions or []","function_tokens":["def","_get_pipeline_regions","(","self",",","regions",",","zones",")",":","if","not","regions","and","not","zones",":","return","[","self",".","_location","]","return","regions","or","[","]"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_cls_v2.py#L37-L50"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_cls_v2.py","language":"python","identifier":"GoogleCLSV2JobProvider._batch_handler_def","parameters":"(self)","argument_list":"","return_statement":"return self._service.new_batch_http_request","docstring":"Returns a function object for the provider-specific batch handler.","docstring_summary":"Returns a function object for the provider-specific batch handler.","docstring_tokens":["Returns","a","function","object","for","the","provider","-","specific","batch","handler","."],"function":"def _batch_handler_def(self):\n \"\"\"Returns a function object for the provider-specific batch handler.\"\"\"\n\n # The batch endpoint currently only works for us-central1 requests.\n if self._location != 'us-central1':\n return google_v2_base.GoogleV2BatchHandler\n\n # The Lifesciences API provides a batch endpoint\n # (the Genomics v2alpha1 does not).\n #\n # This function returns the new_batch_http_request function, which the\n # caller can then use to create a BatchHttpRequest object.\n # The new_batch_http_request function is provided by the Google APIs\n # Python Client for batching requests destined for the batch endpoint.\n #\n # For documentation, see\n # https:\/\/googleapis.github.io\/google-api-python-client\/docs\/dyn\/lifesciences_v2beta.html#new_batch_http_request\n #\n # For example usage, see google_base.py (_cancel() and __cancel_batch()).\n\n return self._service.new_batch_http_request","function_tokens":["def","_batch_handler_def","(","self",")",":","# The batch endpoint currently only works for us-central1 requests.","if","self",".","_location","!=","'us-central1'",":","return","google_v2_base",".","GoogleV2BatchHandler","# The Lifesciences API provides a batch endpoint","# (the Genomics v2alpha1 does not).","#","# This function returns the new_batch_http_request function, which the","# caller can then use to create a BatchHttpRequest object.","# The new_batch_http_request function is provided by the Google APIs","# Python Client for batching requests destined for the batch endpoint.","#","# For documentation, see","# https:\/\/googleapis.github.io\/google-api-python-client\/docs\/dyn\/lifesciences_v2beta.html#new_batch_http_request","#","# For example usage, see google_base.py (_cancel() and __cancel_batch()).","return","self",".","_service",".","new_batch_http_request"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_cls_v2.py#L65-L85"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"label_filter","parameters":"(label_key, label_value)","argument_list":"","return_statement":"return 'labels.\"{}\" = \"{}\"'.format(label_key, label_value)","docstring":"Return a valid label filter for operations.list().","docstring_summary":"Return a valid label filter for operations.list().","docstring_tokens":["Return","a","valid","label","filter","for","operations",".","list","()","."],"function":"def label_filter(label_key, label_value):\n \"\"\"Return a valid label filter for operations.list().\"\"\"\n return 'labels.\"{}\" = \"{}\"'.format(label_key, label_value)","function_tokens":["def","label_filter","(","label_key",",","label_value",")",":","return","'labels.\"{}\" = \"{}\"'",".","format","(","label_key",",","label_value",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L36-L38"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"create_time_filter","parameters":"(create_time, comparator)","argument_list":"","return_statement":"return 'createTime {} \"{}\"'.format(comparator, create_time)","docstring":"Return a valid createTime filter for operations.list().","docstring_summary":"Return a valid createTime filter for operations.list().","docstring_tokens":["Return","a","valid","createTime","filter","for","operations",".","list","()","."],"function":"def create_time_filter(create_time, comparator):\n \"\"\"Return a valid createTime filter for operations.list().\"\"\"\n return 'createTime {} \"{}\"'.format(comparator, create_time)","function_tokens":["def","create_time_filter","(","create_time",",","comparator",")",":","return","'createTime {} \"{}\"'",".","format","(","comparator",",","create_time",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L41-L43"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_create_time","parameters":"(op)","argument_list":"","return_statement":"return op.get('metadata', {}).get('createTime')","docstring":"Return the create time string of the operation.","docstring_summary":"Return the create time string of the operation.","docstring_tokens":["Return","the","create","time","string","of","the","operation","."],"function":"def get_create_time(op):\n \"\"\"Return the create time string of the operation.\"\"\"\n return op.get('metadata', {}).get('createTime')","function_tokens":["def","get_create_time","(","op",")",":","return","op",".","get","(","'metadata'",",","{","}",")",".","get","(","'createTime'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L46-L48"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_start_time","parameters":"(op)","argument_list":"","return_statement":"return op.get('metadata', {}).get('startTime')","docstring":"Return the start time string of the operation.","docstring_summary":"Return the start time string of the operation.","docstring_tokens":["Return","the","start","time","string","of","the","operation","."],"function":"def get_start_time(op):\n \"\"\"Return the start time string of the operation.\"\"\"\n return op.get('metadata', {}).get('startTime')","function_tokens":["def","get_start_time","(","op",")",":","return","op",".","get","(","'metadata'",",","{","}",")",".","get","(","'startTime'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L51-L53"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_end_time","parameters":"(op)","argument_list":"","return_statement":"return op.get('metadata', {}).get('endTime')","docstring":"Return the end time string of the operation.","docstring_summary":"Return the end time string of the operation.","docstring_tokens":["Return","the","end","time","string","of","the","operation","."],"function":"def get_end_time(op):\n \"\"\"Return the end time string of the operation.\"\"\"\n return op.get('metadata', {}).get('endTime')","function_tokens":["def","get_end_time","(","op",")",":","return","op",".","get","(","'metadata'",",","{","}",")",".","get","(","'endTime'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L56-L58"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_metadata_type","parameters":"(op)","argument_list":"","return_statement":"return op.get('metadata', {}).get('@type')","docstring":"Return the internal metadata type of the operation.","docstring_summary":"Return the internal metadata type of the operation.","docstring_tokens":["Return","the","internal","metadata","type","of","the","operation","."],"function":"def get_metadata_type(op):\n \"\"\"Return the internal metadata type of the operation.\"\"\"\n return op.get('metadata', {}).get('@type')","function_tokens":["def","get_metadata_type","(","op",")",":","return","op",".","get","(","'metadata'",",","{","}",")",".","get","(","'@type'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L61-L63"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_error","parameters":"(op)","argument_list":"","return_statement":"return op.get('error')","docstring":"Return the error structure for the operation.","docstring_summary":"Return the error structure for the operation.","docstring_tokens":["Return","the","error","structure","for","the","operation","."],"function":"def get_error(op):\n \"\"\"Return the error structure for the operation.\"\"\"\n return op.get('error')","function_tokens":["def","get_error","(","op",")",":","return","op",".","get","(","'error'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L66-L68"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"is_done","parameters":"(op)","argument_list":"","return_statement":"return op.get('done', False)","docstring":"Return whether the operation has been marked done.","docstring_summary":"Return whether the operation has been marked done.","docstring_tokens":["Return","whether","the","operation","has","been","marked","done","."],"function":"def is_done(op):\n \"\"\"Return whether the operation has been marked done.\"\"\"\n return op.get('done', False)","function_tokens":["def","is_done","(","op",")",":","return","op",".","get","(","'done'",",","False",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L71-L73"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"is_success","parameters":"(op)","argument_list":"","return_statement":"return is_done(op) and ('error' not in op)","docstring":"Return whether the operation has completed successfully.","docstring_summary":"Return whether the operation has completed successfully.","docstring_tokens":["Return","whether","the","operation","has","completed","successfully","."],"function":"def is_success(op):\n \"\"\"Return whether the operation has completed successfully.\"\"\"\n return is_done(op) and ('error' not in op)","function_tokens":["def","is_success","(","op",")",":","return","is_done","(","op",")","and","(","'error'","not","in","op",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L76-L78"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"is_canceled","parameters":"(op)","argument_list":"","return_statement":"return error and error.get('code', 0) == 1","docstring":"Return whether the operation was canceled by the user.","docstring_summary":"Return whether the operation was canceled by the user.","docstring_tokens":["Return","whether","the","operation","was","canceled","by","the","user","."],"function":"def is_canceled(op):\n \"\"\"Return whether the operation was canceled by the user.\"\"\"\n error = get_error(op)\n return error and error.get('code', 0) == 1","function_tokens":["def","is_canceled","(","op",")",":","error","=","get_error","(","op",")","return","error","and","error",".","get","(","'code'",",","0",")","==","1"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L81-L84"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"is_failed","parameters":"(op)","argument_list":"","return_statement":"return error and error.get('code', 0) > 1","docstring":"Return whether the operation has failed.","docstring_summary":"Return whether the operation has failed.","docstring_tokens":["Return","whether","the","operation","has","failed","."],"function":"def is_failed(op):\n \"\"\"Return whether the operation has failed.\"\"\"\n error = get_error(op)\n return error and error.get('code', 0) > 1","function_tokens":["def","is_failed","(","op",")",":","error","=","get_error","(","op",")","return","error","and","error",".","get","(","'code'",",","0",")",">","1"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L87-L90"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_labels","parameters":"(op)","argument_list":"","return_statement":"return op.get('metadata', {}).get('labels', {})","docstring":"Return the operation's array of labels.","docstring_summary":"Return the operation's array of labels.","docstring_tokens":["Return","the","operation","s","array","of","labels","."],"function":"def get_labels(op):\n \"\"\"Return the operation's array of labels.\"\"\"\n return op.get('metadata', {}).get('labels', {})","function_tokens":["def","get_labels","(","op",")",":","return","op",".","get","(","'metadata'",",","{","}",")",".","get","(","'labels'",",","{","}",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L93-L95"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_label","parameters":"(op, name)","argument_list":"","return_statement":"return get_labels(op).get(name)","docstring":"Return the value for the specified label.","docstring_summary":"Return the value for the specified label.","docstring_tokens":["Return","the","value","for","the","specified","label","."],"function":"def get_label(op, name):\n \"\"\"Return the value for the specified label.\"\"\"\n return get_labels(op).get(name)","function_tokens":["def","get_label","(","op",",","name",")",":","return","get_labels","(","op",")",".","get","(","name",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L98-L100"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_actions","parameters":"(op)","argument_list":"","return_statement":"return op.get('metadata', {}).get('pipeline').get('actions', [])","docstring":"Return the operation's array of actions.","docstring_summary":"Return the operation's array of actions.","docstring_tokens":["Return","the","operation","s","array","of","actions","."],"function":"def get_actions(op):\n \"\"\"Return the operation's array of actions.\"\"\"\n return op.get('metadata', {}).get('pipeline').get('actions', [])","function_tokens":["def","get_actions","(","op",")",":","return","op",".","get","(","'metadata'",",","{","}",")",".","get","(","'pipeline'",")",".","get","(","'actions'",",","[","]",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L103-L105"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_action_by_id","parameters":"(op, action_id)","argument_list":"","return_statement":"","docstring":"Return the operation's array of actions.","docstring_summary":"Return the operation's array of actions.","docstring_tokens":["Return","the","operation","s","array","of","actions","."],"function":"def get_action_by_id(op, action_id):\n \"\"\"Return the operation's array of actions.\"\"\"\n actions = get_actions(op)\n if actions and 1 <= action_id < len(actions):\n return actions[action_id - 1]","function_tokens":["def","get_action_by_id","(","op",",","action_id",")",":","actions","=","get_actions","(","op",")","if","actions","and","1","<=","action_id","<","len","(","actions",")",":","return","actions","[","action_id","-","1","]"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L108-L112"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_action_by_name","parameters":"(op, name)","argument_list":"","return_statement":"","docstring":"Return the value for the specified action.","docstring_summary":"Return the value for the specified action.","docstring_tokens":["Return","the","value","for","the","specified","action","."],"function":"def get_action_by_name(op, name):\n \"\"\"Return the value for the specified action.\"\"\"\n actions = get_actions(op)\n for action in actions:\n if get_action_name(action) == name:\n return action","function_tokens":["def","get_action_by_name","(","op",",","name",")",":","actions","=","get_actions","(","op",")","for","action","in","actions",":","if","get_action_name","(","action",")","==","name",":","return","action"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L115-L120"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_action_environment","parameters":"(op, name)","argument_list":"","return_statement":"","docstring":"Return the environment for the operation.","docstring_summary":"Return the environment for the operation.","docstring_tokens":["Return","the","environment","for","the","operation","."],"function":"def get_action_environment(op, name):\n \"\"\"Return the environment for the operation.\"\"\"\n action = get_action_by_name(op, name)\n if action:\n return action.get('environment')","function_tokens":["def","get_action_environment","(","op",",","name",")",":","action","=","get_action_by_name","(","op",",","name",")","if","action",":","return","action",".","get","(","'environment'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L134-L138"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_action_image","parameters":"(op, name)","argument_list":"","return_statement":"","docstring":"Return the image for the operation.","docstring_summary":"Return the image for the operation.","docstring_tokens":["Return","the","image","for","the","operation","."],"function":"def get_action_image(op, name):\n \"\"\"Return the image for the operation.\"\"\"\n action = get_action_by_name(op, name)\n if action:\n return action.get('imageUri')","function_tokens":["def","get_action_image","(","op",",","name",")",":","action","=","get_action_by_name","(","op",",","name",")","if","action",":","return","action",".","get","(","'imageUri'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L141-L145"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_events","parameters":"(op)","argument_list":"","return_statement":"return op.get('metadata', {}).get('events', [])","docstring":"Return the array of events for the operation.","docstring_summary":"Return the array of events for the operation.","docstring_tokens":["Return","the","array","of","events","for","the","operation","."],"function":"def get_events(op):\n \"\"\"Return the array of events for the operation.\"\"\"\n return op.get('metadata', {}).get('events', [])","function_tokens":["def","get_events","(","op",")",":","return","op",".","get","(","'metadata'",",","{","}",")",".","get","(","'events'",",","[","]",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L148-L150"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_last_event","parameters":"(op)","argument_list":"","return_statement":"return None","docstring":"Return the last event (if any) for the operation.","docstring_summary":"Return the last event (if any) for the operation.","docstring_tokens":["Return","the","last","event","(","if","any",")","for","the","operation","."],"function":"def get_last_event(op):\n \"\"\"Return the last event (if any) for the operation.\"\"\"\n events = get_events(op)\n if events:\n return events[0]\n return None","function_tokens":["def","get_last_event","(","op",")",":","events","=","get_events","(","op",")","if","events",":","return","events","[","0","]","return","None"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L153-L158"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"external_network_blocked","parameters":"(op)","argument_list":"","return_statement":"return False","docstring":"Retun True if the blockExternalNetwork flag is set for the user action.","docstring_summary":"Retun True if the blockExternalNetwork flag is set for the user action.","docstring_tokens":["Retun","True","if","the","blockExternalNetwork","flag","is","set","for","the","user","action","."],"function":"def external_network_blocked(op):\n \"\"\"Retun True if the blockExternalNetwork flag is set for the user action.\"\"\"\n user_action = get_action_by_name(op, 'user-command')\n if user_action:\n if _API_VERSION == google_v2_versions.V2ALPHA1:\n flags = user_action.get('flags')\n if flags:\n return 'BLOCK_EXTERNAL_NETWORK' in flags\n elif _API_VERSION == google_v2_versions.V2BETA:\n return user_action.get('blockExternalNetwork')\n else:\n assert False, 'Unexpected version: {}'.format(_API_VERSION)\n return False","function_tokens":["def","external_network_blocked","(","op",")",":","user_action","=","get_action_by_name","(","op",",","'user-command'",")","if","user_action",":","if","_API_VERSION","==","google_v2_versions",".","V2ALPHA1",":","flags","=","user_action",".","get","(","'flags'",")","if","flags",":","return","'BLOCK_EXTERNAL_NETWORK'","in","flags","elif","_API_VERSION","==","google_v2_versions",".","V2BETA",":","return","user_action",".","get","(","'blockExternalNetwork'",")","else",":","assert","False",",","'Unexpected version: {}'",".","format","(","_API_VERSION",")","return","False"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L161-L173"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"is_unexpected_exit_status_event","parameters":"(e)","argument_list":"","return_statement":"","docstring":"Retun True if the event is for an unexpected exit status.","docstring_summary":"Retun True if the event is for an unexpected exit status.","docstring_tokens":["Retun","True","if","the","event","is","for","an","unexpected","exit","status","."],"function":"def is_unexpected_exit_status_event(e):\n \"\"\"Retun True if the event is for an unexpected exit status.\"\"\"\n if _API_VERSION == google_v2_versions.V2ALPHA1:\n\n return e.get('details', {}).get(\n '@type'\n ) == 'type.googleapis.com\/google.genomics.v2alpha1.UnexpectedExitStatusEvent'\n\n elif _API_VERSION == google_v2_versions.V2BETA:\n\n return 'unexpectedExitStatus' in e\n\n else:\n assert False, 'Unexpected version: {}'.format(_API_VERSION)","function_tokens":["def","is_unexpected_exit_status_event","(","e",")",":","if","_API_VERSION","==","google_v2_versions",".","V2ALPHA1",":","return","e",".","get","(","'details'",",","{","}",")",".","get","(","'@type'",")","==","'type.googleapis.com\/google.genomics.v2alpha1.UnexpectedExitStatusEvent'","elif","_API_VERSION","==","google_v2_versions",".","V2BETA",":","return","'unexpectedExitStatus'","in","e","else",":","assert","False",",","'Unexpected version: {}'",".","format","(","_API_VERSION",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L176-L189"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"is_failed_event","parameters":"(e)","argument_list":"","return_statement":"","docstring":"Retun True if the event is an operation failed event.","docstring_summary":"Retun True if the event is an operation failed event.","docstring_tokens":["Retun","True","if","the","event","is","an","operation","failed","event","."],"function":"def is_failed_event(e):\n \"\"\"Retun True if the event is an operation failed event.\"\"\"\n if _API_VERSION == google_v2_versions.V2ALPHA1:\n\n return e.get('details', {}).get(\n '@type') == 'type.googleapis.com\/google.genomics.v2alpha1.FailedEvent'\n\n elif _API_VERSION == google_v2_versions.V2BETA:\n\n return 'failed' in e\n\n else:\n assert False, 'Unexpected version: {}'.format(_API_VERSION)","function_tokens":["def","is_failed_event","(","e",")",":","if","_API_VERSION","==","google_v2_versions",".","V2ALPHA1",":","return","e",".","get","(","'details'",",","{","}",")",".","get","(","'@type'",")","==","'type.googleapis.com\/google.genomics.v2alpha1.FailedEvent'","elif","_API_VERSION","==","google_v2_versions",".","V2BETA",":","return","'failed'","in","e","else",":","assert","False",",","'Unexpected version: {}'",".","format","(","_API_VERSION",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L192-L204"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"is_container_stopped_event","parameters":"(e)","argument_list":"","return_statement":"","docstring":"Retun True if the event is a container stopped event.","docstring_summary":"Retun True if the event is a container stopped event.","docstring_tokens":["Retun","True","if","the","event","is","a","container","stopped","event","."],"function":"def is_container_stopped_event(e):\n \"\"\"Retun True if the event is a container stopped event.\"\"\"\n if _API_VERSION == google_v2_versions.V2ALPHA1:\n\n return e.get('details', {}).get(\n '@type'\n ) == 'type.googleapis.com\/google.genomics.v2alpha1.ContainerStoppedEvent'\n\n elif _API_VERSION == google_v2_versions.V2BETA:\n\n return 'containerStopped' in e\n\n else:\n assert False, 'Unexpected version: {}'.format(_API_VERSION)","function_tokens":["def","is_container_stopped_event","(","e",")",":","if","_API_VERSION","==","google_v2_versions",".","V2ALPHA1",":","return","e",".","get","(","'details'",",","{","}",")",".","get","(","'@type'",")","==","'type.googleapis.com\/google.genomics.v2alpha1.ContainerStoppedEvent'","elif","_API_VERSION","==","google_v2_versions",".","V2BETA",":","return","'containerStopped'","in","e","else",":","assert","False",",","'Unexpected version: {}'",".","format","(","_API_VERSION",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L207-L220"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"is_worker_assigned_event","parameters":"(event)","argument_list":"","return_statement":"","docstring":"Return True if the event is a \"Worker assigned...\" event.","docstring_summary":"Return True if the event is a \"Worker assigned...\" event.","docstring_tokens":["Return","True","if","the","event","is","a","Worker","assigned","...","event","."],"function":"def is_worker_assigned_event(event):\n \"\"\"Return True if the event is a \"Worker assigned...\" event.\"\"\"\n if _API_VERSION == google_v2_versions.V2ALPHA1:\n return event.get('details', {}).get(\n '@type'\n ) == 'type.googleapis.com\/google.genomics.v2alpha1.WorkerAssignedEvent'\n\n elif _API_VERSION == google_v2_versions.V2BETA:\n return 'workerAssigned' in event\n\n else:\n assert False, 'Unexpected version: {}'.format(_API_VERSION)","function_tokens":["def","is_worker_assigned_event","(","event",")",":","if","_API_VERSION","==","google_v2_versions",".","V2ALPHA1",":","return","event",".","get","(","'details'",",","{","}",")",".","get","(","'@type'",")","==","'type.googleapis.com\/google.genomics.v2alpha1.WorkerAssignedEvent'","elif","_API_VERSION","==","google_v2_versions",".","V2BETA",":","return","'workerAssigned'","in","event","else",":","assert","False",",","'Unexpected version: {}'",".","format","(","_API_VERSION",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L223-L234"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"is_pull_started_event","parameters":"(event)","argument_list":"","return_statement":"","docstring":"Return True if the event is a \"Started Pulling...\" event.","docstring_summary":"Return True if the event is a \"Started Pulling...\" event.","docstring_tokens":["Return","True","if","the","event","is","a","Started","Pulling","...","event","."],"function":"def is_pull_started_event(event):\n \"\"\"Return True if the event is a \"Started Pulling...\" event.\"\"\"\n if _API_VERSION == google_v2_versions.V2ALPHA1:\n return event.get('details', {}).get(\n '@type'\n ) == 'type.googleapis.com\/google.genomics.v2alpha1.PullStartedEvent'\n\n elif _API_VERSION == google_v2_versions.V2BETA:\n return 'pullStarted' in event\n\n else:\n assert False, 'Unexpected version: {}'.format(_API_VERSION)","function_tokens":["def","is_pull_started_event","(","event",")",":","if","_API_VERSION","==","google_v2_versions",".","V2ALPHA1",":","return","event",".","get","(","'details'",",","{","}",")",".","get","(","'@type'",")","==","'type.googleapis.com\/google.genomics.v2alpha1.PullStartedEvent'","elif","_API_VERSION","==","google_v2_versions",".","V2BETA",":","return","'pullStarted'","in","event","else",":","assert","False",",","'Unexpected version: {}'",".","format","(","_API_VERSION",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L237-L248"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_failed_events","parameters":"(op)","argument_list":"","return_statement":"return [e for e in events if is_failed_event(e)]","docstring":"Return all \"failed\" events.","docstring_summary":"Return all \"failed\" events.","docstring_tokens":["Return","all","failed","events","."],"function":"def get_failed_events(op):\n \"\"\"Return all \"failed\" events.\"\"\"\n events = get_events(op)\n if not events:\n return None\n\n return [e for e in events if is_failed_event(e)]","function_tokens":["def","get_failed_events","(","op",")",":","events","=","get_events","(","op",")","if","not","events",":","return","None","return","[","e","for","e","in","events","if","is_failed_event","(","e",")","]"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L251-L257"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_container_stopped_error_events","parameters":"(op)","argument_list":"","return_statement":"","docstring":"Return all container stopped events with a non-zero exit status.","docstring_summary":"Return all container stopped events with a non-zero exit status.","docstring_tokens":["Return","all","container","stopped","events","with","a","non","-","zero","exit","status","."],"function":"def get_container_stopped_error_events(op):\n \"\"\"Return all container stopped events with a non-zero exit status.\"\"\"\n events = get_events(op)\n if not events:\n return None\n\n if _API_VERSION == google_v2_versions.V2ALPHA1:\n\n return [\n e for e in events if is_container_stopped_event(e) and\n e.get('details', {}).get('exitStatus', 0) != 0\n ]\n\n elif _API_VERSION == google_v2_versions.V2BETA:\n\n return [\n e for e in events if is_container_stopped_event(e) and\n e['containerStopped'].get('exitStatus', 0) != 0\n ]\n\n else:\n assert False, 'Unexpected version: {}'.format(_API_VERSION)","function_tokens":["def","get_container_stopped_error_events","(","op",")",":","events","=","get_events","(","op",")","if","not","events",":","return","None","if","_API_VERSION","==","google_v2_versions",".","V2ALPHA1",":","return","[","e","for","e","in","events","if","is_container_stopped_event","(","e",")","and","e",".","get","(","'details'",",","{","}",")",".","get","(","'exitStatus'",",","0",")","!=","0","]","elif","_API_VERSION","==","google_v2_versions",".","V2BETA",":","return","[","e","for","e","in","events","if","is_container_stopped_event","(","e",")","and","e","[","'containerStopped'","]",".","get","(","'exitStatus'",",","0",")","!=","0","]","else",":","assert","False",",","'Unexpected version: {}'",".","format","(","_API_VERSION",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L260-L281"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_unexpected_exit_events","parameters":"(op)","argument_list":"","return_statement":"return [e for e in events if is_unexpected_exit_status_event(e)]","docstring":"Return all unexpected exit status events.","docstring_summary":"Return all unexpected exit status events.","docstring_tokens":["Return","all","unexpected","exit","status","events","."],"function":"def get_unexpected_exit_events(op):\n \"\"\"Return all unexpected exit status events.\"\"\"\n events = get_events(op)\n if not events:\n return None\n\n return [e for e in events if is_unexpected_exit_status_event(e)]","function_tokens":["def","get_unexpected_exit_events","(","op",")",":","events","=","get_events","(","op",")","if","not","events",":","return","None","return","[","e","for","e","in","events","if","is_unexpected_exit_status_event","(","e",")","]"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L284-L290"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_worker_assigned_events","parameters":"(op)","argument_list":"","return_statement":"return [e for e in events if is_worker_assigned_event(e)]","docstring":"Return all \"Worker Assigned\" events.","docstring_summary":"Return all \"Worker Assigned\" events.","docstring_tokens":["Return","all","Worker","Assigned","events","."],"function":"def get_worker_assigned_events(op):\n \"\"\"Return all \"Worker Assigned\" events.\"\"\"\n\n events = get_events(op)\n if not events:\n return None\n\n return [e for e in events if is_worker_assigned_event(e)]","function_tokens":["def","get_worker_assigned_events","(","op",")",":","events","=","get_events","(","op",")","if","not","events",":","return","None","return","[","e","for","e","in","events","if","is_worker_assigned_event","(","e",")","]"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L293-L300"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_event_action_id","parameters":"(e)","argument_list":"","return_statement":"","docstring":"Return the actionId associated with the specified event.","docstring_summary":"Return the actionId associated with the specified event.","docstring_tokens":["Return","the","actionId","associated","with","the","specified","event","."],"function":"def get_event_action_id(e):\n \"\"\"Return the actionId associated with the specified event.\"\"\"\n if _API_VERSION == google_v2_versions.V2ALPHA1:\n\n return e.get('details', {}).get('actionId')\n\n elif _API_VERSION == google_v2_versions.V2BETA:\n\n for event_type in ['containerStopped', 'unexpectedExitStatus']:\n if event_type in e:\n return e[event_type].get('actionId')\n\n else:\n assert False, 'Unexpected version: {}'.format(_API_VERSION)","function_tokens":["def","get_event_action_id","(","e",")",":","if","_API_VERSION","==","google_v2_versions",".","V2ALPHA1",":","return","e",".","get","(","'details'",",","{","}",")",".","get","(","'actionId'",")","elif","_API_VERSION","==","google_v2_versions",".","V2BETA",":","for","event_type","in","[","'containerStopped'",",","'unexpectedExitStatus'","]",":","if","event_type","in","e",":","return","e","[","event_type","]",".","get","(","'actionId'",")","else",":","assert","False",",","'Unexpected version: {}'",".","format","(","_API_VERSION",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L303-L316"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_event_description","parameters":"(e)","argument_list":"","return_statement":"return e.get('description')","docstring":"Return the description field for the event.","docstring_summary":"Return the description field for the event.","docstring_tokens":["Return","the","description","field","for","the","event","."],"function":"def get_event_description(e):\n \"\"\"Return the description field for the event.\"\"\"\n return e.get('description')","function_tokens":["def","get_event_description","(","e",")",":","return","e",".","get","(","'description'",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L319-L321"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_event_stderr","parameters":"(e)","argument_list":"","return_statement":"","docstring":"Return the stderr field (if any) associated with the event.","docstring_summary":"Return the stderr field (if any) associated with the event.","docstring_tokens":["Return","the","stderr","field","(","if","any",")","associated","with","the","event","."],"function":"def get_event_stderr(e):\n \"\"\"Return the stderr field (if any) associated with the event.\"\"\"\n if _API_VERSION == google_v2_versions.V2ALPHA1:\n\n return e.get('details', {}).get('stderr')\n\n elif _API_VERSION == google_v2_versions.V2BETA:\n\n for event_type in ['containerStopped']:\n if event_type in e:\n return e[event_type].get('stderr')\n\n else:\n assert False, 'Unexpected version: {}'.format(_API_VERSION)","function_tokens":["def","get_event_stderr","(","e",")",":","if","_API_VERSION","==","google_v2_versions",".","V2ALPHA1",":","return","e",".","get","(","'details'",",","{","}",")",".","get","(","'stderr'",")","elif","_API_VERSION","==","google_v2_versions",".","V2BETA",":","for","event_type","in","[","'containerStopped'","]",":","if","event_type","in","e",":","return","e","[","event_type","]",".","get","(","'stderr'",")","else",":","assert","False",",","'Unexpected version: {}'",".","format","(","_API_VERSION",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L324-L337"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_worker_assigned_event_details","parameters":"(op)","argument_list":"","return_statement":"","docstring":"Return the detail portion of the most recent \"worker assigned\" event.","docstring_summary":"Return the detail portion of the most recent \"worker assigned\" event.","docstring_tokens":["Return","the","detail","portion","of","the","most","recent","worker","assigned","event","."],"function":"def get_worker_assigned_event_details(op):\n \"\"\"Return the detail portion of the most recent \"worker assigned\" event.\"\"\"\n\n events = get_worker_assigned_events(op)\n if not events:\n return None\n\n if _API_VERSION == google_v2_versions.V2ALPHA1:\n return events[0].get('details', {})\n\n elif _API_VERSION == google_v2_versions.V2BETA:\n return events[0].get('workerAssigned', {})\n\n else:\n assert False, 'Unexpected version: {}'.format(_API_VERSION)","function_tokens":["def","get_worker_assigned_event_details","(","op",")",":","events","=","get_worker_assigned_events","(","op",")","if","not","events",":","return","None","if","_API_VERSION","==","google_v2_versions",".","V2ALPHA1",":","return","events","[","0","]",".","get","(","'details'",",","{","}",")","elif","_API_VERSION","==","google_v2_versions",".","V2BETA",":","return","events","[","0","]",".","get","(","'workerAssigned'",",","{","}",")","else",":","assert","False",",","'Unexpected version: {}'",".","format","(","_API_VERSION",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L340-L354"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_last_update","parameters":"(op)","argument_list":"","return_statement":"return last_update","docstring":"Return the most recent timestamp in the operation.","docstring_summary":"Return the most recent timestamp in the operation.","docstring_tokens":["Return","the","most","recent","timestamp","in","the","operation","."],"function":"def get_last_update(op):\n \"\"\"Return the most recent timestamp in the operation.\"\"\"\n last_update = get_end_time(op)\n\n if not last_update:\n last_event = get_last_event(op)\n if last_event:\n last_update = last_event['timestamp']\n\n if not last_update:\n last_update = get_create_time(op)\n\n return last_update","function_tokens":["def","get_last_update","(","op",")",":","last_update","=","get_end_time","(","op",")","if","not","last_update",":","last_event","=","get_last_event","(","op",")","if","last_event",":","last_update","=","last_event","[","'timestamp'","]","if","not","last_update",":","last_update","=","get_create_time","(","op",")","return","last_update"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L357-L369"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_resources","parameters":"(op)","argument_list":"","return_statement":"return op.get('metadata', {}).get('pipeline').get('resources', {})","docstring":"Return the operation's resource.","docstring_summary":"Return the operation's resource.","docstring_tokens":["Return","the","operation","s","resource","."],"function":"def get_resources(op):\n \"\"\"Return the operation's resource.\"\"\"\n return op.get('metadata', {}).get('pipeline').get('resources', {})","function_tokens":["def","get_resources","(","op",")",":","return","op",".","get","(","'metadata'",",","{","}",")",".","get","(","'pipeline'",")",".","get","(","'resources'",",","{","}",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L372-L374"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"get_vm_network_name","parameters":"(vm)","argument_list":"","return_statement":"","docstring":"Return the name of the network from the virtualMachine.","docstring_summary":"Return the name of the network from the virtualMachine.","docstring_tokens":["Return","the","name","of","the","network","from","the","virtualMachine","."],"function":"def get_vm_network_name(vm):\n \"\"\"Return the name of the network from the virtualMachine.\"\"\"\n\n if _API_VERSION == google_v2_versions.V2ALPHA1:\n return vm.get('network', {}).get('name')\n\n elif _API_VERSION == google_v2_versions.V2BETA:\n return vm.get('network', {}).get('network')\n\n else:\n assert False, 'Unexpected version: {}'.format(_API_VERSION)","function_tokens":["def","get_vm_network_name","(","vm",")",":","if","_API_VERSION","==","google_v2_versions",".","V2ALPHA1",":","return","vm",".","get","(","'network'",",","{","}",")",".","get","(","'name'",")","elif","_API_VERSION","==","google_v2_versions",".","V2BETA",":","return","vm",".","get","(","'network'",",","{","}",")",".","get","(","'network'",")","else",":","assert","False",",","'Unexpected version: {}'",".","format","(","_API_VERSION",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L377-L387"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"is_pipeline","parameters":"(op)","argument_list":"","return_statement":"","docstring":"Check that an operation is a genomics pipeline run.\n\n An operation is a Genomics Pipeline run if the request metadata's @type\n is \"type.googleapis.com\/google.genomics.v2alpha1.Metadata\".\n\n Args:\n op: a pipelines operation.\n\n Returns:\n Boolean, true if the operation is a RunPipelineRequest.","docstring_summary":"Check that an operation is a genomics pipeline run.","docstring_tokens":["Check","that","an","operation","is","a","genomics","pipeline","run","."],"function":"def is_pipeline(op):\n \"\"\"Check that an operation is a genomics pipeline run.\n\n An operation is a Genomics Pipeline run if the request metadata's @type\n is \"type.googleapis.com\/google.genomics.v2alpha1.Metadata\".\n\n Args:\n op: a pipelines operation.\n\n Returns:\n Boolean, true if the operation is a RunPipelineRequest.\n \"\"\"\n\n if _API_VERSION == google_v2_versions.V2ALPHA1:\n return get_metadata_type(\n op) == 'type.googleapis.com\/google.genomics.v2alpha1.Metadata'\n\n elif _API_VERSION == google_v2_versions.V2BETA:\n return get_metadata_type(\n op) == 'type.googleapis.com\/google.cloud.lifesciences.v2beta.Metadata'\n\n else:\n assert False, 'Unexpected version: {}'.format(_API_VERSION)","function_tokens":["def","is_pipeline","(","op",")",":","if","_API_VERSION","==","google_v2_versions",".","V2ALPHA1",":","return","get_metadata_type","(","op",")","==","'type.googleapis.com\/google.genomics.v2alpha1.Metadata'","elif","_API_VERSION","==","google_v2_versions",".","V2BETA",":","return","get_metadata_type","(","op",")","==","'type.googleapis.com\/google.cloud.lifesciences.v2beta.Metadata'","else",":","assert","False",",","'Unexpected version: {}'",".","format","(","_API_VERSION",")"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L390-L412"} {"nwo":"DataBiosphere\/dsub","sha":"a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0","path":"dsub\/providers\/google_v2_operations.py","language":"python","identifier":"is_dsub_operation","parameters":"(op)","argument_list":"","return_statement":"return True","docstring":"Determine if a pipelines operation is a dsub request.\n\n We don't have a rigorous way to identify an operation as being submitted\n by dsub. Our best option is to check for certain fields that have always\n been part of dsub operations.\n\n - labels: job-id, job-name, and user-id have always existed. The dsub-version\n label has always existed for the google-v2 provider.\n\n Args:\n op: a pipelines operation.\n\n Returns:\n Boolean, true if the pipeline run was generated by dsub.","docstring_summary":"Determine if a pipelines operation is a dsub request.","docstring_tokens":["Determine","if","a","pipelines","operation","is","a","dsub","request","."],"function":"def is_dsub_operation(op):\n \"\"\"Determine if a pipelines operation is a dsub request.\n\n We don't have a rigorous way to identify an operation as being submitted\n by dsub. Our best option is to check for certain fields that have always\n been part of dsub operations.\n\n - labels: job-id, job-name, and user-id have always existed. The dsub-version\n label has always existed for the google-v2 provider.\n\n Args:\n op: a pipelines operation.\n\n Returns:\n Boolean, true if the pipeline run was generated by dsub.\n \"\"\"\n if not is_pipeline(op):\n return False\n\n for name in ['dsub-version', 'job-id', 'job-name', 'user-id']:\n if not get_label(op, name):\n return False\n\n return True","function_tokens":["def","is_dsub_operation","(","op",")",":","if","not","is_pipeline","(","op",")",":","return","False","for","name","in","[","'dsub-version'",",","'job-id'",",","'job-name'",",","'user-id'","]",":","if","not","get_label","(","op",",","name",")",":","return","False","return","True"],"url":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/a01408d3769d93c3ae5c5f8ea1cdd0484dc15bd0\/dsub\/providers\/google_v2_operations.py#L415-L438"}