partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
sequencelengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
sequencelengths
19
28.4k
sha
stringlengths
40
40
test
SSHHook.get_conn
Opens a ssh connection to the remote host. :rtype: paramiko.client.SSHClient
airflow/contrib/hooks/ssh_hook.py
def get_conn(self): """ Opens a ssh connection to the remote host. :rtype: paramiko.client.SSHClient """ self.log.debug('Creating SSH client for conn_id: %s', self.ssh_conn_id) client = paramiko.SSHClient() if not self.allow_host_key_change: self.log.warning('Remote Identification Change is not verified. ' 'This wont protect against Man-In-The-Middle attacks') client.load_system_host_keys() if self.no_host_key_check: self.log.warning('No Host Key Verification. This wont protect ' 'against Man-In-The-Middle attacks') # Default is RejectPolicy client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) if self.password and self.password.strip(): client.connect(hostname=self.remote_host, username=self.username, password=self.password, key_filename=self.key_file, timeout=self.timeout, compress=self.compress, port=self.port, sock=self.host_proxy) else: client.connect(hostname=self.remote_host, username=self.username, key_filename=self.key_file, timeout=self.timeout, compress=self.compress, port=self.port, sock=self.host_proxy) if self.keepalive_interval: client.get_transport().set_keepalive(self.keepalive_interval) self.client = client return client
def get_conn(self): """ Opens a ssh connection to the remote host. :rtype: paramiko.client.SSHClient """ self.log.debug('Creating SSH client for conn_id: %s', self.ssh_conn_id) client = paramiko.SSHClient() if not self.allow_host_key_change: self.log.warning('Remote Identification Change is not verified. ' 'This wont protect against Man-In-The-Middle attacks') client.load_system_host_keys() if self.no_host_key_check: self.log.warning('No Host Key Verification. This wont protect ' 'against Man-In-The-Middle attacks') # Default is RejectPolicy client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) if self.password and self.password.strip(): client.connect(hostname=self.remote_host, username=self.username, password=self.password, key_filename=self.key_file, timeout=self.timeout, compress=self.compress, port=self.port, sock=self.host_proxy) else: client.connect(hostname=self.remote_host, username=self.username, key_filename=self.key_file, timeout=self.timeout, compress=self.compress, port=self.port, sock=self.host_proxy) if self.keepalive_interval: client.get_transport().set_keepalive(self.keepalive_interval) self.client = client return client
[ "Opens", "a", "ssh", "connection", "to", "the", "remote", "host", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/ssh_hook.py#L144-L185
[ "def", "get_conn", "(", "self", ")", ":", "self", ".", "log", ".", "debug", "(", "'Creating SSH client for conn_id: %s'", ",", "self", ".", "ssh_conn_id", ")", "client", "=", "paramiko", ".", "SSHClient", "(", ")", "if", "not", "self", ".", "allow_host_key_change", ":", "self", ".", "log", ".", "warning", "(", "'Remote Identification Change is not verified. '", "'This wont protect against Man-In-The-Middle attacks'", ")", "client", ".", "load_system_host_keys", "(", ")", "if", "self", ".", "no_host_key_check", ":", "self", ".", "log", ".", "warning", "(", "'No Host Key Verification. This wont protect '", "'against Man-In-The-Middle attacks'", ")", "# Default is RejectPolicy", "client", ".", "set_missing_host_key_policy", "(", "paramiko", ".", "AutoAddPolicy", "(", ")", ")", "if", "self", ".", "password", "and", "self", ".", "password", ".", "strip", "(", ")", ":", "client", ".", "connect", "(", "hostname", "=", "self", ".", "remote_host", ",", "username", "=", "self", ".", "username", ",", "password", "=", "self", ".", "password", ",", "key_filename", "=", "self", ".", "key_file", ",", "timeout", "=", "self", ".", "timeout", ",", "compress", "=", "self", ".", "compress", ",", "port", "=", "self", ".", "port", ",", "sock", "=", "self", ".", "host_proxy", ")", "else", ":", "client", ".", "connect", "(", "hostname", "=", "self", ".", "remote_host", ",", "username", "=", "self", ".", "username", ",", "key_filename", "=", "self", ".", "key_file", ",", "timeout", "=", "self", ".", "timeout", ",", "compress", "=", "self", ".", "compress", ",", "port", "=", "self", ".", "port", ",", "sock", "=", "self", ".", "host_proxy", ")", "if", "self", ".", "keepalive_interval", ":", "client", ".", "get_transport", "(", ")", ".", "set_keepalive", "(", "self", ".", "keepalive_interval", ")", "self", ".", "client", "=", "client", "return", "client" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SSHHook.get_tunnel
Creates a tunnel between two hosts. Like ssh -L <LOCAL_PORT>:host:<REMOTE_PORT>. :param remote_port: The remote port to create a tunnel to :type remote_port: int :param remote_host: The remote host to create a tunnel to (default localhost) :type remote_host: str :param local_port: The local port to attach the tunnel to :type local_port: int :return: sshtunnel.SSHTunnelForwarder object
airflow/contrib/hooks/ssh_hook.py
def get_tunnel(self, remote_port, remote_host="localhost", local_port=None): """ Creates a tunnel between two hosts. Like ssh -L <LOCAL_PORT>:host:<REMOTE_PORT>. :param remote_port: The remote port to create a tunnel to :type remote_port: int :param remote_host: The remote host to create a tunnel to (default localhost) :type remote_host: str :param local_port: The local port to attach the tunnel to :type local_port: int :return: sshtunnel.SSHTunnelForwarder object """ if local_port: local_bind_address = ('localhost', local_port) else: local_bind_address = ('localhost',) if self.password and self.password.strip(): client = SSHTunnelForwarder(self.remote_host, ssh_port=self.port, ssh_username=self.username, ssh_password=self.password, ssh_pkey=self.key_file, ssh_proxy=self.host_proxy, local_bind_address=local_bind_address, remote_bind_address=(remote_host, remote_port), logger=self.log) else: client = SSHTunnelForwarder(self.remote_host, ssh_port=self.port, ssh_username=self.username, ssh_pkey=self.key_file, ssh_proxy=self.host_proxy, local_bind_address=local_bind_address, remote_bind_address=(remote_host, remote_port), host_pkey_directories=[], logger=self.log) return client
def get_tunnel(self, remote_port, remote_host="localhost", local_port=None): """ Creates a tunnel between two hosts. Like ssh -L <LOCAL_PORT>:host:<REMOTE_PORT>. :param remote_port: The remote port to create a tunnel to :type remote_port: int :param remote_host: The remote host to create a tunnel to (default localhost) :type remote_host: str :param local_port: The local port to attach the tunnel to :type local_port: int :return: sshtunnel.SSHTunnelForwarder object """ if local_port: local_bind_address = ('localhost', local_port) else: local_bind_address = ('localhost',) if self.password and self.password.strip(): client = SSHTunnelForwarder(self.remote_host, ssh_port=self.port, ssh_username=self.username, ssh_password=self.password, ssh_pkey=self.key_file, ssh_proxy=self.host_proxy, local_bind_address=local_bind_address, remote_bind_address=(remote_host, remote_port), logger=self.log) else: client = SSHTunnelForwarder(self.remote_host, ssh_port=self.port, ssh_username=self.username, ssh_pkey=self.key_file, ssh_proxy=self.host_proxy, local_bind_address=local_bind_address, remote_bind_address=(remote_host, remote_port), host_pkey_directories=[], logger=self.log) return client
[ "Creates", "a", "tunnel", "between", "two", "hosts", ".", "Like", "ssh", "-", "L", "<LOCAL_PORT", ">", ":", "host", ":", "<REMOTE_PORT", ">", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/ssh_hook.py#L199-L239
[ "def", "get_tunnel", "(", "self", ",", "remote_port", ",", "remote_host", "=", "\"localhost\"", ",", "local_port", "=", "None", ")", ":", "if", "local_port", ":", "local_bind_address", "=", "(", "'localhost'", ",", "local_port", ")", "else", ":", "local_bind_address", "=", "(", "'localhost'", ",", ")", "if", "self", ".", "password", "and", "self", ".", "password", ".", "strip", "(", ")", ":", "client", "=", "SSHTunnelForwarder", "(", "self", ".", "remote_host", ",", "ssh_port", "=", "self", ".", "port", ",", "ssh_username", "=", "self", ".", "username", ",", "ssh_password", "=", "self", ".", "password", ",", "ssh_pkey", "=", "self", ".", "key_file", ",", "ssh_proxy", "=", "self", ".", "host_proxy", ",", "local_bind_address", "=", "local_bind_address", ",", "remote_bind_address", "=", "(", "remote_host", ",", "remote_port", ")", ",", "logger", "=", "self", ".", "log", ")", "else", ":", "client", "=", "SSHTunnelForwarder", "(", "self", ".", "remote_host", ",", "ssh_port", "=", "self", ".", "port", ",", "ssh_username", "=", "self", ".", "username", ",", "ssh_pkey", "=", "self", ".", "key_file", ",", "ssh_proxy", "=", "self", ".", "host_proxy", ",", "local_bind_address", "=", "local_bind_address", ",", "remote_bind_address", "=", "(", "remote_host", ",", "remote_port", ")", ",", "host_pkey_directories", "=", "[", "]", ",", "logger", "=", "self", ".", "log", ")", "return", "client" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GCPTransferServiceHook.create_transfer_job
Creates a transfer job that runs periodically. :param body: (Required) A request body, as described in https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs/patch#request-body :type body: dict :return: transfer job. See: https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs#TransferJob :rtype: dict
airflow/contrib/hooks/gcp_transfer_hook.py
def create_transfer_job(self, body): """ Creates a transfer job that runs periodically. :param body: (Required) A request body, as described in https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs/patch#request-body :type body: dict :return: transfer job. See: https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs#TransferJob :rtype: dict """ body = self._inject_project_id(body, BODY, PROJECT_ID) return self.get_conn().transferJobs().create(body=body).execute(num_retries=self.num_retries)
def create_transfer_job(self, body): """ Creates a transfer job that runs periodically. :param body: (Required) A request body, as described in https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs/patch#request-body :type body: dict :return: transfer job. See: https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs#TransferJob :rtype: dict """ body = self._inject_project_id(body, BODY, PROJECT_ID) return self.get_conn().transferJobs().create(body=body).execute(num_retries=self.num_retries)
[ "Creates", "a", "transfer", "job", "that", "runs", "periodically", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_transfer_hook.py#L119-L132
[ "def", "create_transfer_job", "(", "self", ",", "body", ")", ":", "body", "=", "self", ".", "_inject_project_id", "(", "body", ",", "BODY", ",", "PROJECT_ID", ")", "return", "self", ".", "get_conn", "(", ")", ".", "transferJobs", "(", ")", ".", "create", "(", "body", "=", "body", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GCPTransferServiceHook.get_transfer_job
Gets the latest state of a long-running operation in Google Storage Transfer Service. :param job_name: (Required) Name of the job to be fetched :type job_name: str :param project_id: (Optional) the ID of the project that owns the Transfer Job. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :return: Transfer Job :rtype: dict
airflow/contrib/hooks/gcp_transfer_hook.py
def get_transfer_job(self, job_name, project_id=None): """ Gets the latest state of a long-running operation in Google Storage Transfer Service. :param job_name: (Required) Name of the job to be fetched :type job_name: str :param project_id: (Optional) the ID of the project that owns the Transfer Job. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :return: Transfer Job :rtype: dict """ return ( self.get_conn() .transferJobs() .get(jobName=job_name, projectId=project_id) .execute(num_retries=self.num_retries) )
def get_transfer_job(self, job_name, project_id=None): """ Gets the latest state of a long-running operation in Google Storage Transfer Service. :param job_name: (Required) Name of the job to be fetched :type job_name: str :param project_id: (Optional) the ID of the project that owns the Transfer Job. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :return: Transfer Job :rtype: dict """ return ( self.get_conn() .transferJobs() .get(jobName=job_name, projectId=project_id) .execute(num_retries=self.num_retries) )
[ "Gets", "the", "latest", "state", "of", "a", "long", "-", "running", "operation", "in", "Google", "Storage", "Transfer", "Service", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_transfer_hook.py#L136-L155
[ "def", "get_transfer_job", "(", "self", ",", "job_name", ",", "project_id", "=", "None", ")", ":", "return", "(", "self", ".", "get_conn", "(", ")", ".", "transferJobs", "(", ")", ".", "get", "(", "jobName", "=", "job_name", ",", "projectId", "=", "project_id", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GCPTransferServiceHook.list_transfer_job
Lists long-running operations in Google Storage Transfer Service that match the specified filter. :param filter: (Required) A request filter, as described in https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs/list#body.QUERY_PARAMETERS.filter :type filter: dict :return: List of Transfer Jobs :rtype: list[dict]
airflow/contrib/hooks/gcp_transfer_hook.py
def list_transfer_job(self, filter): """ Lists long-running operations in Google Storage Transfer Service that match the specified filter. :param filter: (Required) A request filter, as described in https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs/list#body.QUERY_PARAMETERS.filter :type filter: dict :return: List of Transfer Jobs :rtype: list[dict] """ conn = self.get_conn() filter = self._inject_project_id(filter, FILTER, FILTER_PROJECT_ID) request = conn.transferJobs().list(filter=json.dumps(filter)) jobs = [] while request is not None: response = request.execute(num_retries=self.num_retries) jobs.extend(response[TRANSFER_JOBS]) request = conn.transferJobs().list_next(previous_request=request, previous_response=response) return jobs
def list_transfer_job(self, filter): """ Lists long-running operations in Google Storage Transfer Service that match the specified filter. :param filter: (Required) A request filter, as described in https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs/list#body.QUERY_PARAMETERS.filter :type filter: dict :return: List of Transfer Jobs :rtype: list[dict] """ conn = self.get_conn() filter = self._inject_project_id(filter, FILTER, FILTER_PROJECT_ID) request = conn.transferJobs().list(filter=json.dumps(filter)) jobs = [] while request is not None: response = request.execute(num_retries=self.num_retries) jobs.extend(response[TRANSFER_JOBS]) request = conn.transferJobs().list_next(previous_request=request, previous_response=response) return jobs
[ "Lists", "long", "-", "running", "operations", "in", "Google", "Storage", "Transfer", "Service", "that", "match", "the", "specified", "filter", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_transfer_hook.py#L157-L179
[ "def", "list_transfer_job", "(", "self", ",", "filter", ")", ":", "conn", "=", "self", ".", "get_conn", "(", ")", "filter", "=", "self", ".", "_inject_project_id", "(", "filter", ",", "FILTER", ",", "FILTER_PROJECT_ID", ")", "request", "=", "conn", ".", "transferJobs", "(", ")", ".", "list", "(", "filter", "=", "json", ".", "dumps", "(", "filter", ")", ")", "jobs", "=", "[", "]", "while", "request", "is", "not", "None", ":", "response", "=", "request", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "jobs", ".", "extend", "(", "response", "[", "TRANSFER_JOBS", "]", ")", "request", "=", "conn", ".", "transferJobs", "(", ")", ".", "list_next", "(", "previous_request", "=", "request", ",", "previous_response", "=", "response", ")", "return", "jobs" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GCPTransferServiceHook.update_transfer_job
Updates a transfer job that runs periodically. :param job_name: (Required) Name of the job to be updated :type job_name: str :param body: A request body, as described in https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs/patch#request-body :type body: dict :return: If successful, TransferJob. :rtype: dict
airflow/contrib/hooks/gcp_transfer_hook.py
def update_transfer_job(self, job_name, body): """ Updates a transfer job that runs periodically. :param job_name: (Required) Name of the job to be updated :type job_name: str :param body: A request body, as described in https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs/patch#request-body :type body: dict :return: If successful, TransferJob. :rtype: dict """ body = self._inject_project_id(body, BODY, PROJECT_ID) return ( self.get_conn() .transferJobs() .patch(jobName=job_name, body=body) .execute(num_retries=self.num_retries) )
def update_transfer_job(self, job_name, body): """ Updates a transfer job that runs periodically. :param job_name: (Required) Name of the job to be updated :type job_name: str :param body: A request body, as described in https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs/patch#request-body :type body: dict :return: If successful, TransferJob. :rtype: dict """ body = self._inject_project_id(body, BODY, PROJECT_ID) return ( self.get_conn() .transferJobs() .patch(jobName=job_name, body=body) .execute(num_retries=self.num_retries) )
[ "Updates", "a", "transfer", "job", "that", "runs", "periodically", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_transfer_hook.py#L182-L200
[ "def", "update_transfer_job", "(", "self", ",", "job_name", ",", "body", ")", ":", "body", "=", "self", ".", "_inject_project_id", "(", "body", ",", "BODY", ",", "PROJECT_ID", ")", "return", "(", "self", ".", "get_conn", "(", ")", ".", "transferJobs", "(", ")", ".", "patch", "(", "jobName", "=", "job_name", ",", "body", "=", "body", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GCPTransferServiceHook.delete_transfer_job
Deletes a transfer job. This is a soft delete. After a transfer job is deleted, the job and all the transfer executions are subject to garbage collection. Transfer jobs become eligible for garbage collection 30 days after soft delete. :param job_name: (Required) Name of the job to be deleted :type job_name: str :param project_id: (Optional) the ID of the project that owns the Transfer Job. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :rtype: None
airflow/contrib/hooks/gcp_transfer_hook.py
def delete_transfer_job(self, job_name, project_id): """ Deletes a transfer job. This is a soft delete. After a transfer job is deleted, the job and all the transfer executions are subject to garbage collection. Transfer jobs become eligible for garbage collection 30 days after soft delete. :param job_name: (Required) Name of the job to be deleted :type job_name: str :param project_id: (Optional) the ID of the project that owns the Transfer Job. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :rtype: None """ return ( self.get_conn() .transferJobs() .patch( jobName=job_name, body={ PROJECT_ID: project_id, TRANSFER_JOB: {STATUS1: GcpTransferJobsStatus.DELETED}, TRANSFER_JOB_FIELD_MASK: STATUS1, }, ) .execute(num_retries=self.num_retries) )
def delete_transfer_job(self, job_name, project_id): """ Deletes a transfer job. This is a soft delete. After a transfer job is deleted, the job and all the transfer executions are subject to garbage collection. Transfer jobs become eligible for garbage collection 30 days after soft delete. :param job_name: (Required) Name of the job to be deleted :type job_name: str :param project_id: (Optional) the ID of the project that owns the Transfer Job. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :rtype: None """ return ( self.get_conn() .transferJobs() .patch( jobName=job_name, body={ PROJECT_ID: project_id, TRANSFER_JOB: {STATUS1: GcpTransferJobsStatus.DELETED}, TRANSFER_JOB_FIELD_MASK: STATUS1, }, ) .execute(num_retries=self.num_retries) )
[ "Deletes", "a", "transfer", "job", ".", "This", "is", "a", "soft", "delete", ".", "After", "a", "transfer", "job", "is", "deleted", "the", "job", "and", "all", "the", "transfer", "executions", "are", "subject", "to", "garbage", "collection", ".", "Transfer", "jobs", "become", "eligible", "for", "garbage", "collection", "30", "days", "after", "soft", "delete", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_transfer_hook.py#L204-L232
[ "def", "delete_transfer_job", "(", "self", ",", "job_name", ",", "project_id", ")", ":", "return", "(", "self", ".", "get_conn", "(", ")", ".", "transferJobs", "(", ")", ".", "patch", "(", "jobName", "=", "job_name", ",", "body", "=", "{", "PROJECT_ID", ":", "project_id", ",", "TRANSFER_JOB", ":", "{", "STATUS1", ":", "GcpTransferJobsStatus", ".", "DELETED", "}", ",", "TRANSFER_JOB_FIELD_MASK", ":", "STATUS1", ",", "}", ",", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GCPTransferServiceHook.cancel_transfer_operation
Cancels an transfer operation in Google Storage Transfer Service. :param operation_name: Name of the transfer operation. :type operation_name: str :rtype: None
airflow/contrib/hooks/gcp_transfer_hook.py
def cancel_transfer_operation(self, operation_name): """ Cancels an transfer operation in Google Storage Transfer Service. :param operation_name: Name of the transfer operation. :type operation_name: str :rtype: None """ self.get_conn().transferOperations().cancel(name=operation_name).execute(num_retries=self.num_retries)
def cancel_transfer_operation(self, operation_name): """ Cancels an transfer operation in Google Storage Transfer Service. :param operation_name: Name of the transfer operation. :type operation_name: str :rtype: None """ self.get_conn().transferOperations().cancel(name=operation_name).execute(num_retries=self.num_retries)
[ "Cancels", "an", "transfer", "operation", "in", "Google", "Storage", "Transfer", "Service", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_transfer_hook.py#L235-L243
[ "def", "cancel_transfer_operation", "(", "self", ",", "operation_name", ")", ":", "self", ".", "get_conn", "(", ")", ".", "transferOperations", "(", ")", ".", "cancel", "(", "name", "=", "operation_name", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GCPTransferServiceHook.get_transfer_operation
Gets an transfer operation in Google Storage Transfer Service. :param operation_name: (Required) Name of the transfer operation. :type operation_name: str :return: transfer operation See: https://cloud.google.com/storage-transfer/docs/reference/rest/v1/Operation :rtype: dict
airflow/contrib/hooks/gcp_transfer_hook.py
def get_transfer_operation(self, operation_name): """ Gets an transfer operation in Google Storage Transfer Service. :param operation_name: (Required) Name of the transfer operation. :type operation_name: str :return: transfer operation See: https://cloud.google.com/storage-transfer/docs/reference/rest/v1/Operation :rtype: dict """ return ( self.get_conn() .transferOperations() .get(name=operation_name) .execute(num_retries=self.num_retries) )
def get_transfer_operation(self, operation_name): """ Gets an transfer operation in Google Storage Transfer Service. :param operation_name: (Required) Name of the transfer operation. :type operation_name: str :return: transfer operation See: https://cloud.google.com/storage-transfer/docs/reference/rest/v1/Operation :rtype: dict """ return ( self.get_conn() .transferOperations() .get(name=operation_name) .execute(num_retries=self.num_retries) )
[ "Gets", "an", "transfer", "operation", "in", "Google", "Storage", "Transfer", "Service", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_transfer_hook.py#L246-L262
[ "def", "get_transfer_operation", "(", "self", ",", "operation_name", ")", ":", "return", "(", "self", ".", "get_conn", "(", ")", ".", "transferOperations", "(", ")", ".", "get", "(", "name", "=", "operation_name", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GCPTransferServiceHook.list_transfer_operations
Gets an transfer operation in Google Storage Transfer Service. :param filter: (Required) A request filter, as described in https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs/list#body.QUERY_PARAMETERS.filter With one additional improvement: * project_id is optional if you have a project id defined in the connection See: :ref:`howto/connection:gcp` :type filter: dict :return: transfer operation :rtype: list[dict]
airflow/contrib/hooks/gcp_transfer_hook.py
def list_transfer_operations(self, filter): """ Gets an transfer operation in Google Storage Transfer Service. :param filter: (Required) A request filter, as described in https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs/list#body.QUERY_PARAMETERS.filter With one additional improvement: * project_id is optional if you have a project id defined in the connection See: :ref:`howto/connection:gcp` :type filter: dict :return: transfer operation :rtype: list[dict] """ conn = self.get_conn() filter = self._inject_project_id(filter, FILTER, FILTER_PROJECT_ID) operations = [] request = conn.transferOperations().list(name=TRANSFER_OPERATIONS, filter=json.dumps(filter)) while request is not None: response = request.execute(num_retries=self.num_retries) if OPERATIONS in response: operations.extend(response[OPERATIONS]) request = conn.transferOperations().list_next( previous_request=request, previous_response=response ) return operations
def list_transfer_operations(self, filter): """ Gets an transfer operation in Google Storage Transfer Service. :param filter: (Required) A request filter, as described in https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs/list#body.QUERY_PARAMETERS.filter With one additional improvement: * project_id is optional if you have a project id defined in the connection See: :ref:`howto/connection:gcp` :type filter: dict :return: transfer operation :rtype: list[dict] """ conn = self.get_conn() filter = self._inject_project_id(filter, FILTER, FILTER_PROJECT_ID) operations = [] request = conn.transferOperations().list(name=TRANSFER_OPERATIONS, filter=json.dumps(filter)) while request is not None: response = request.execute(num_retries=self.num_retries) if OPERATIONS in response: operations.extend(response[OPERATIONS]) request = conn.transferOperations().list_next( previous_request=request, previous_response=response ) return operations
[ "Gets", "an", "transfer", "operation", "in", "Google", "Storage", "Transfer", "Service", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_transfer_hook.py#L265-L298
[ "def", "list_transfer_operations", "(", "self", ",", "filter", ")", ":", "conn", "=", "self", ".", "get_conn", "(", ")", "filter", "=", "self", ".", "_inject_project_id", "(", "filter", ",", "FILTER", ",", "FILTER_PROJECT_ID", ")", "operations", "=", "[", "]", "request", "=", "conn", ".", "transferOperations", "(", ")", ".", "list", "(", "name", "=", "TRANSFER_OPERATIONS", ",", "filter", "=", "json", ".", "dumps", "(", "filter", ")", ")", "while", "request", "is", "not", "None", ":", "response", "=", "request", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "if", "OPERATIONS", "in", "response", ":", "operations", ".", "extend", "(", "response", "[", "OPERATIONS", "]", ")", "request", "=", "conn", ".", "transferOperations", "(", ")", ".", "list_next", "(", "previous_request", "=", "request", ",", "previous_response", "=", "response", ")", "return", "operations" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GCPTransferServiceHook.pause_transfer_operation
Pauses an transfer operation in Google Storage Transfer Service. :param operation_name: (Required) Name of the transfer operation. :type operation_name: str :rtype: None
airflow/contrib/hooks/gcp_transfer_hook.py
def pause_transfer_operation(self, operation_name): """ Pauses an transfer operation in Google Storage Transfer Service. :param operation_name: (Required) Name of the transfer operation. :type operation_name: str :rtype: None """ self.get_conn().transferOperations().pause(name=operation_name).execute(num_retries=self.num_retries)
def pause_transfer_operation(self, operation_name): """ Pauses an transfer operation in Google Storage Transfer Service. :param operation_name: (Required) Name of the transfer operation. :type operation_name: str :rtype: None """ self.get_conn().transferOperations().pause(name=operation_name).execute(num_retries=self.num_retries)
[ "Pauses", "an", "transfer", "operation", "in", "Google", "Storage", "Transfer", "Service", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_transfer_hook.py#L301-L309
[ "def", "pause_transfer_operation", "(", "self", ",", "operation_name", ")", ":", "self", ".", "get_conn", "(", ")", ".", "transferOperations", "(", ")", ".", "pause", "(", "name", "=", "operation_name", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GCPTransferServiceHook.resume_transfer_operation
Resumes an transfer operation in Google Storage Transfer Service. :param operation_name: (Required) Name of the transfer operation. :type operation_name: str :rtype: None
airflow/contrib/hooks/gcp_transfer_hook.py
def resume_transfer_operation(self, operation_name): """ Resumes an transfer operation in Google Storage Transfer Service. :param operation_name: (Required) Name of the transfer operation. :type operation_name: str :rtype: None """ self.get_conn().transferOperations().resume(name=operation_name).execute(num_retries=self.num_retries)
def resume_transfer_operation(self, operation_name): """ Resumes an transfer operation in Google Storage Transfer Service. :param operation_name: (Required) Name of the transfer operation. :type operation_name: str :rtype: None """ self.get_conn().transferOperations().resume(name=operation_name).execute(num_retries=self.num_retries)
[ "Resumes", "an", "transfer", "operation", "in", "Google", "Storage", "Transfer", "Service", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_transfer_hook.py#L312-L320
[ "def", "resume_transfer_operation", "(", "self", ",", "operation_name", ")", ":", "self", ".", "get_conn", "(", ")", ".", "transferOperations", "(", ")", ".", "resume", "(", "name", "=", "operation_name", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GCPTransferServiceHook.wait_for_transfer_job
Waits until the job reaches the expected state. :param job: Transfer job See: https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs#TransferJob :type job: dict :param expected_statuses: State that is expected See: https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferOperations#Status :type expected_statuses: set[str] :param timeout: :type timeout: time in which the operation must end in seconds :rtype: None
airflow/contrib/hooks/gcp_transfer_hook.py
def wait_for_transfer_job(self, job, expected_statuses=(GcpTransferOperationStatus.SUCCESS,), timeout=60): """ Waits until the job reaches the expected state. :param job: Transfer job See: https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs#TransferJob :type job: dict :param expected_statuses: State that is expected See: https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferOperations#Status :type expected_statuses: set[str] :param timeout: :type timeout: time in which the operation must end in seconds :rtype: None """ while timeout > 0: operations = self.list_transfer_operations( filter={FILTER_PROJECT_ID: job[PROJECT_ID], FILTER_JOB_NAMES: [job[NAME]]} ) if GCPTransferServiceHook.operations_contain_expected_statuses(operations, expected_statuses): return time.sleep(TIME_TO_SLEEP_IN_SECONDS) timeout -= TIME_TO_SLEEP_IN_SECONDS raise AirflowException("Timeout. The operation could not be completed within the allotted time.")
def wait_for_transfer_job(self, job, expected_statuses=(GcpTransferOperationStatus.SUCCESS,), timeout=60): """ Waits until the job reaches the expected state. :param job: Transfer job See: https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs#TransferJob :type job: dict :param expected_statuses: State that is expected See: https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferOperations#Status :type expected_statuses: set[str] :param timeout: :type timeout: time in which the operation must end in seconds :rtype: None """ while timeout > 0: operations = self.list_transfer_operations( filter={FILTER_PROJECT_ID: job[PROJECT_ID], FILTER_JOB_NAMES: [job[NAME]]} ) if GCPTransferServiceHook.operations_contain_expected_statuses(operations, expected_statuses): return time.sleep(TIME_TO_SLEEP_IN_SECONDS) timeout -= TIME_TO_SLEEP_IN_SECONDS raise AirflowException("Timeout. The operation could not be completed within the allotted time.")
[ "Waits", "until", "the", "job", "reaches", "the", "expected", "state", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_transfer_hook.py#L323-L348
[ "def", "wait_for_transfer_job", "(", "self", ",", "job", ",", "expected_statuses", "=", "(", "GcpTransferOperationStatus", ".", "SUCCESS", ",", ")", ",", "timeout", "=", "60", ")", ":", "while", "timeout", ">", "0", ":", "operations", "=", "self", ".", "list_transfer_operations", "(", "filter", "=", "{", "FILTER_PROJECT_ID", ":", "job", "[", "PROJECT_ID", "]", ",", "FILTER_JOB_NAMES", ":", "[", "job", "[", "NAME", "]", "]", "}", ")", "if", "GCPTransferServiceHook", ".", "operations_contain_expected_statuses", "(", "operations", ",", "expected_statuses", ")", ":", "return", "time", ".", "sleep", "(", "TIME_TO_SLEEP_IN_SECONDS", ")", "timeout", "-=", "TIME_TO_SLEEP_IN_SECONDS", "raise", "AirflowException", "(", "\"Timeout. The operation could not be completed within the allotted time.\"", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GCPTransferServiceHook.operations_contain_expected_statuses
Checks whether the operation list has an operation with the expected status, then returns true If it encounters operations in FAILED or ABORTED state throw :class:`airflow.exceptions.AirflowException`. :param operations: (Required) List of transfer operations to check. :type operations: list[dict] :param expected_statuses: (Required) status that is expected See: https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferOperations#Status :type expected_statuses: set[str] :return: If there is an operation with the expected state in the operation list, returns true, :raises: airflow.exceptions.AirflowException If it encounters operations with a state in the list, :rtype: bool
airflow/contrib/hooks/gcp_transfer_hook.py
def operations_contain_expected_statuses(operations, expected_statuses): """ Checks whether the operation list has an operation with the expected status, then returns true If it encounters operations in FAILED or ABORTED state throw :class:`airflow.exceptions.AirflowException`. :param operations: (Required) List of transfer operations to check. :type operations: list[dict] :param expected_statuses: (Required) status that is expected See: https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferOperations#Status :type expected_statuses: set[str] :return: If there is an operation with the expected state in the operation list, returns true, :raises: airflow.exceptions.AirflowException If it encounters operations with a state in the list, :rtype: bool """ expected_statuses = ( {expected_statuses} if isinstance(expected_statuses, six.string_types) else set(expected_statuses) ) if len(operations) == 0: return False current_statuses = {operation[METADATA][STATUS] for operation in operations} if len(current_statuses - set(expected_statuses)) != len(current_statuses): return True if len(NEGATIVE_STATUSES - current_statuses) != len(NEGATIVE_STATUSES): raise AirflowException( 'An unexpected operation status was encountered. Expected: {}'.format( ", ".join(expected_statuses) ) ) return False
def operations_contain_expected_statuses(operations, expected_statuses): """ Checks whether the operation list has an operation with the expected status, then returns true If it encounters operations in FAILED or ABORTED state throw :class:`airflow.exceptions.AirflowException`. :param operations: (Required) List of transfer operations to check. :type operations: list[dict] :param expected_statuses: (Required) status that is expected See: https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferOperations#Status :type expected_statuses: set[str] :return: If there is an operation with the expected state in the operation list, returns true, :raises: airflow.exceptions.AirflowException If it encounters operations with a state in the list, :rtype: bool """ expected_statuses = ( {expected_statuses} if isinstance(expected_statuses, six.string_types) else set(expected_statuses) ) if len(operations) == 0: return False current_statuses = {operation[METADATA][STATUS] for operation in operations} if len(current_statuses - set(expected_statuses)) != len(current_statuses): return True if len(NEGATIVE_STATUSES - current_statuses) != len(NEGATIVE_STATUSES): raise AirflowException( 'An unexpected operation status was encountered. Expected: {}'.format( ", ".join(expected_statuses) ) ) return False
[ "Checks", "whether", "the", "operation", "list", "has", "an", "operation", "with", "the", "expected", "status", "then", "returns", "true", "If", "it", "encounters", "operations", "in", "FAILED", "or", "ABORTED", "state", "throw", ":", "class", ":", "airflow", ".", "exceptions", ".", "AirflowException", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_transfer_hook.py#L361-L397
[ "def", "operations_contain_expected_statuses", "(", "operations", ",", "expected_statuses", ")", ":", "expected_statuses", "=", "(", "{", "expected_statuses", "}", "if", "isinstance", "(", "expected_statuses", ",", "six", ".", "string_types", ")", "else", "set", "(", "expected_statuses", ")", ")", "if", "len", "(", "operations", ")", "==", "0", ":", "return", "False", "current_statuses", "=", "{", "operation", "[", "METADATA", "]", "[", "STATUS", "]", "for", "operation", "in", "operations", "}", "if", "len", "(", "current_statuses", "-", "set", "(", "expected_statuses", ")", ")", "!=", "len", "(", "current_statuses", ")", ":", "return", "True", "if", "len", "(", "NEGATIVE_STATUSES", "-", "current_statuses", ")", "!=", "len", "(", "NEGATIVE_STATUSES", ")", ":", "raise", "AirflowException", "(", "'An unexpected operation status was encountered. Expected: {}'", ".", "format", "(", "\", \"", ".", "join", "(", "expected_statuses", ")", ")", ")", "return", "False" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskReschedule.find_for_task_instance
Returns all task reschedules for the task instance and try number, in ascending order. :param task_instance: the task instance to find task reschedules for :type task_instance: airflow.models.TaskInstance
airflow/models/taskreschedule.py
def find_for_task_instance(task_instance, session): """ Returns all task reschedules for the task instance and try number, in ascending order. :param task_instance: the task instance to find task reschedules for :type task_instance: airflow.models.TaskInstance """ TR = TaskReschedule return ( session .query(TR) .filter(TR.dag_id == task_instance.dag_id, TR.task_id == task_instance.task_id, TR.execution_date == task_instance.execution_date, TR.try_number == task_instance.try_number) .order_by(asc(TR.id)) .all() )
def find_for_task_instance(task_instance, session): """ Returns all task reschedules for the task instance and try number, in ascending order. :param task_instance: the task instance to find task reschedules for :type task_instance: airflow.models.TaskInstance """ TR = TaskReschedule return ( session .query(TR) .filter(TR.dag_id == task_instance.dag_id, TR.task_id == task_instance.task_id, TR.execution_date == task_instance.execution_date, TR.try_number == task_instance.try_number) .order_by(asc(TR.id)) .all() )
[ "Returns", "all", "task", "reschedules", "for", "the", "task", "instance", "and", "try", "number", "in", "ascending", "order", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskreschedule.py#L67-L85
[ "def", "find_for_task_instance", "(", "task_instance", ",", "session", ")", ":", "TR", "=", "TaskReschedule", "return", "(", "session", ".", "query", "(", "TR", ")", ".", "filter", "(", "TR", ".", "dag_id", "==", "task_instance", ".", "dag_id", ",", "TR", ".", "task_id", "==", "task_instance", ".", "task_id", ",", "TR", ".", "execution_date", "==", "task_instance", ".", "execution_date", ",", "TR", ".", "try_number", "==", "task_instance", ".", "try_number", ")", ".", "order_by", "(", "asc", "(", "TR", ".", "id", ")", ")", ".", "all", "(", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AirflowKubernetesScheduler._strip_unsafe_kubernetes_special_chars
Kubernetes only supports lowercase alphanumeric characters and "-" and "." in the pod name However, there are special rules about how "-" and "." can be used so let's only keep alphanumeric chars see here for detail: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/ :param string: The requested Pod name :return: ``str`` Pod name stripped of any unsafe characters
airflow/contrib/executors/kubernetes_executor.py
def _strip_unsafe_kubernetes_special_chars(string): """ Kubernetes only supports lowercase alphanumeric characters and "-" and "." in the pod name However, there are special rules about how "-" and "." can be used so let's only keep alphanumeric chars see here for detail: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/ :param string: The requested Pod name :return: ``str`` Pod name stripped of any unsafe characters """ return ''.join(ch.lower() for ind, ch in enumerate(string) if ch.isalnum())
def _strip_unsafe_kubernetes_special_chars(string): """ Kubernetes only supports lowercase alphanumeric characters and "-" and "." in the pod name However, there are special rules about how "-" and "." can be used so let's only keep alphanumeric chars see here for detail: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/ :param string: The requested Pod name :return: ``str`` Pod name stripped of any unsafe characters """ return ''.join(ch.lower() for ind, ch in enumerate(string) if ch.isalnum())
[ "Kubernetes", "only", "supports", "lowercase", "alphanumeric", "characters", "and", "-", "and", ".", "in", "the", "pod", "name", "However", "there", "are", "special", "rules", "about", "how", "-", "and", ".", "can", "be", "used", "so", "let", "s", "only", "keep", "alphanumeric", "chars", "see", "here", "for", "detail", ":", "https", ":", "//", "kubernetes", ".", "io", "/", "docs", "/", "concepts", "/", "overview", "/", "working", "-", "with", "-", "objects", "/", "names", "/" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/executors/kubernetes_executor.py#L458-L470
[ "def", "_strip_unsafe_kubernetes_special_chars", "(", "string", ")", ":", "return", "''", ".", "join", "(", "ch", ".", "lower", "(", ")", "for", "ind", ",", "ch", "in", "enumerate", "(", "string", ")", "if", "ch", ".", "isalnum", "(", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AirflowKubernetesScheduler._make_safe_pod_id
Kubernetes pod names must be <= 253 chars and must pass the following regex for validation "^[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*$" :param safe_dag_id: a dag_id with only alphanumeric characters :param safe_task_id: a task_id with only alphanumeric characters :param random_uuid: a uuid :return: ``str`` valid Pod name of appropriate length
airflow/contrib/executors/kubernetes_executor.py
def _make_safe_pod_id(safe_dag_id, safe_task_id, safe_uuid): """ Kubernetes pod names must be <= 253 chars and must pass the following regex for validation "^[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*$" :param safe_dag_id: a dag_id with only alphanumeric characters :param safe_task_id: a task_id with only alphanumeric characters :param random_uuid: a uuid :return: ``str`` valid Pod name of appropriate length """ MAX_POD_ID_LEN = 253 safe_key = safe_dag_id + safe_task_id safe_pod_id = safe_key[:MAX_POD_ID_LEN - len(safe_uuid) - 1] + "-" + safe_uuid return safe_pod_id
def _make_safe_pod_id(safe_dag_id, safe_task_id, safe_uuid): """ Kubernetes pod names must be <= 253 chars and must pass the following regex for validation "^[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*$" :param safe_dag_id: a dag_id with only alphanumeric characters :param safe_task_id: a task_id with only alphanumeric characters :param random_uuid: a uuid :return: ``str`` valid Pod name of appropriate length """ MAX_POD_ID_LEN = 253 safe_key = safe_dag_id + safe_task_id safe_pod_id = safe_key[:MAX_POD_ID_LEN - len(safe_uuid) - 1] + "-" + safe_uuid return safe_pod_id
[ "Kubernetes", "pod", "names", "must", "be", "<", "=", "253", "chars", "and", "must", "pass", "the", "following", "regex", "for", "validation", "^", "[", "a", "-", "z0", "-", "9", "]", "(", "[", "-", "a", "-", "z0", "-", "9", "]", "*", "[", "a", "-", "z0", "-", "9", "]", ")", "?", "(", "\\\\", ".", "[", "a", "-", "z0", "-", "9", "]", "(", "[", "-", "a", "-", "z0", "-", "9", "]", "*", "[", "a", "-", "z0", "-", "9", "]", ")", "?", ")", "*", "$" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/executors/kubernetes_executor.py#L473-L490
[ "def", "_make_safe_pod_id", "(", "safe_dag_id", ",", "safe_task_id", ",", "safe_uuid", ")", ":", "MAX_POD_ID_LEN", "=", "253", "safe_key", "=", "safe_dag_id", "+", "safe_task_id", "safe_pod_id", "=", "safe_key", "[", ":", "MAX_POD_ID_LEN", "-", "len", "(", "safe_uuid", ")", "-", "1", "]", "+", "\"-\"", "+", "safe_uuid", "return", "safe_pod_id" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AirflowKubernetesScheduler._make_safe_label_value
Valid label values must be 63 characters or less and must be empty or begin and end with an alphanumeric character ([a-z0-9A-Z]) with dashes (-), underscores (_), dots (.), and alphanumerics between. If the label value is then greater than 63 chars once made safe, or differs in any way from the original value sent to this function, then we need to truncate to 53chars, and append it with a unique hash.
airflow/contrib/executors/kubernetes_executor.py
def _make_safe_label_value(string): """ Valid label values must be 63 characters or less and must be empty or begin and end with an alphanumeric character ([a-z0-9A-Z]) with dashes (-), underscores (_), dots (.), and alphanumerics between. If the label value is then greater than 63 chars once made safe, or differs in any way from the original value sent to this function, then we need to truncate to 53chars, and append it with a unique hash. """ MAX_LABEL_LEN = 63 safe_label = re.sub(r'^[^a-z0-9A-Z]*|[^a-zA-Z0-9_\-\.]|[^a-z0-9A-Z]*$', '', string) if len(safe_label) > MAX_LABEL_LEN or string != safe_label: safe_hash = hashlib.md5(string.encode()).hexdigest()[:9] safe_label = safe_label[:MAX_LABEL_LEN - len(safe_hash) - 1] + "-" + safe_hash return safe_label
def _make_safe_label_value(string): """ Valid label values must be 63 characters or less and must be empty or begin and end with an alphanumeric character ([a-z0-9A-Z]) with dashes (-), underscores (_), dots (.), and alphanumerics between. If the label value is then greater than 63 chars once made safe, or differs in any way from the original value sent to this function, then we need to truncate to 53chars, and append it with a unique hash. """ MAX_LABEL_LEN = 63 safe_label = re.sub(r'^[^a-z0-9A-Z]*|[^a-zA-Z0-9_\-\.]|[^a-z0-9A-Z]*$', '', string) if len(safe_label) > MAX_LABEL_LEN or string != safe_label: safe_hash = hashlib.md5(string.encode()).hexdigest()[:9] safe_label = safe_label[:MAX_LABEL_LEN - len(safe_hash) - 1] + "-" + safe_hash return safe_label
[ "Valid", "label", "values", "must", "be", "63", "characters", "or", "less", "and", "must", "be", "empty", "or", "begin", "and", "end", "with", "an", "alphanumeric", "character", "(", "[", "a", "-", "z0", "-", "9A", "-", "Z", "]", ")", "with", "dashes", "(", "-", ")", "underscores", "(", "_", ")", "dots", "(", ".", ")", "and", "alphanumerics", "between", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/executors/kubernetes_executor.py#L493-L511
[ "def", "_make_safe_label_value", "(", "string", ")", ":", "MAX_LABEL_LEN", "=", "63", "safe_label", "=", "re", ".", "sub", "(", "r'^[^a-z0-9A-Z]*|[^a-zA-Z0-9_\\-\\.]|[^a-z0-9A-Z]*$'", ",", "''", ",", "string", ")", "if", "len", "(", "safe_label", ")", ">", "MAX_LABEL_LEN", "or", "string", "!=", "safe_label", ":", "safe_hash", "=", "hashlib", ".", "md5", "(", "string", ".", "encode", "(", ")", ")", ".", "hexdigest", "(", ")", "[", ":", "9", "]", "safe_label", "=", "safe_label", "[", ":", "MAX_LABEL_LEN", "-", "len", "(", "safe_hash", ")", "-", "1", "]", "+", "\"-\"", "+", "safe_hash", "return", "safe_label" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
KubernetesExecutor.clear_not_launched_queued_tasks
If the airflow scheduler restarts with pending "Queued" tasks, the tasks may or may not have been launched Thus, on starting up the scheduler let's check every "Queued" task to see if it has been launched (ie: if there is a corresponding pod on kubernetes) If it has been launched then do nothing, otherwise reset the state to "None" so the task will be rescheduled This will not be necessary in a future version of airflow in which there is proper support for State.LAUNCHED
airflow/contrib/executors/kubernetes_executor.py
def clear_not_launched_queued_tasks(self, session=None): """ If the airflow scheduler restarts with pending "Queued" tasks, the tasks may or may not have been launched Thus, on starting up the scheduler let's check every "Queued" task to see if it has been launched (ie: if there is a corresponding pod on kubernetes) If it has been launched then do nothing, otherwise reset the state to "None" so the task will be rescheduled This will not be necessary in a future version of airflow in which there is proper support for State.LAUNCHED """ queued_tasks = session\ .query(TaskInstance)\ .filter(TaskInstance.state == State.QUEUED).all() self.log.info( 'When executor started up, found %s queued task instances', len(queued_tasks) ) for task in queued_tasks: dict_string = ( "dag_id={},task_id={},execution_date={},airflow-worker={}".format( AirflowKubernetesScheduler._make_safe_label_value(task.dag_id), AirflowKubernetesScheduler._make_safe_label_value(task.task_id), AirflowKubernetesScheduler._datetime_to_label_safe_datestring( task.execution_date ), self.worker_uuid ) ) kwargs = dict(label_selector=dict_string) pod_list = self.kube_client.list_namespaced_pod( self.kube_config.kube_namespace, **kwargs) if len(pod_list.items) == 0: self.log.info( 'TaskInstance: %s found in queued state but was not launched, ' 'rescheduling', task ) session.query(TaskInstance).filter( TaskInstance.dag_id == task.dag_id, TaskInstance.task_id == task.task_id, TaskInstance.execution_date == task.execution_date ).update({TaskInstance.state: State.NONE})
def clear_not_launched_queued_tasks(self, session=None): """ If the airflow scheduler restarts with pending "Queued" tasks, the tasks may or may not have been launched Thus, on starting up the scheduler let's check every "Queued" task to see if it has been launched (ie: if there is a corresponding pod on kubernetes) If it has been launched then do nothing, otherwise reset the state to "None" so the task will be rescheduled This will not be necessary in a future version of airflow in which there is proper support for State.LAUNCHED """ queued_tasks = session\ .query(TaskInstance)\ .filter(TaskInstance.state == State.QUEUED).all() self.log.info( 'When executor started up, found %s queued task instances', len(queued_tasks) ) for task in queued_tasks: dict_string = ( "dag_id={},task_id={},execution_date={},airflow-worker={}".format( AirflowKubernetesScheduler._make_safe_label_value(task.dag_id), AirflowKubernetesScheduler._make_safe_label_value(task.task_id), AirflowKubernetesScheduler._datetime_to_label_safe_datestring( task.execution_date ), self.worker_uuid ) ) kwargs = dict(label_selector=dict_string) pod_list = self.kube_client.list_namespaced_pod( self.kube_config.kube_namespace, **kwargs) if len(pod_list.items) == 0: self.log.info( 'TaskInstance: %s found in queued state but was not launched, ' 'rescheduling', task ) session.query(TaskInstance).filter( TaskInstance.dag_id == task.dag_id, TaskInstance.task_id == task.task_id, TaskInstance.execution_date == task.execution_date ).update({TaskInstance.state: State.NONE})
[ "If", "the", "airflow", "scheduler", "restarts", "with", "pending", "Queued", "tasks", "the", "tasks", "may", "or", "may", "not", "have", "been", "launched", "Thus", "on", "starting", "up", "the", "scheduler", "let", "s", "check", "every", "Queued", "task", "to", "see", "if", "it", "has", "been", "launched", "(", "ie", ":", "if", "there", "is", "a", "corresponding", "pod", "on", "kubernetes", ")" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/executors/kubernetes_executor.py#L606-L653
[ "def", "clear_not_launched_queued_tasks", "(", "self", ",", "session", "=", "None", ")", ":", "queued_tasks", "=", "session", ".", "query", "(", "TaskInstance", ")", ".", "filter", "(", "TaskInstance", ".", "state", "==", "State", ".", "QUEUED", ")", ".", "all", "(", ")", "self", ".", "log", ".", "info", "(", "'When executor started up, found %s queued task instances'", ",", "len", "(", "queued_tasks", ")", ")", "for", "task", "in", "queued_tasks", ":", "dict_string", "=", "(", "\"dag_id={},task_id={},execution_date={},airflow-worker={}\"", ".", "format", "(", "AirflowKubernetesScheduler", ".", "_make_safe_label_value", "(", "task", ".", "dag_id", ")", ",", "AirflowKubernetesScheduler", ".", "_make_safe_label_value", "(", "task", ".", "task_id", ")", ",", "AirflowKubernetesScheduler", ".", "_datetime_to_label_safe_datestring", "(", "task", ".", "execution_date", ")", ",", "self", ".", "worker_uuid", ")", ")", "kwargs", "=", "dict", "(", "label_selector", "=", "dict_string", ")", "pod_list", "=", "self", ".", "kube_client", ".", "list_namespaced_pod", "(", "self", ".", "kube_config", ".", "kube_namespace", ",", "*", "*", "kwargs", ")", "if", "len", "(", "pod_list", ".", "items", ")", "==", "0", ":", "self", ".", "log", ".", "info", "(", "'TaskInstance: %s found in queued state but was not launched, '", "'rescheduling'", ",", "task", ")", "session", ".", "query", "(", "TaskInstance", ")", ".", "filter", "(", "TaskInstance", ".", "dag_id", "==", "task", ".", "dag_id", ",", "TaskInstance", ".", "task_id", "==", "task", ".", "task_id", ",", "TaskInstance", ".", "execution_date", "==", "task", ".", "execution_date", ")", ".", "update", "(", "{", "TaskInstance", ".", "state", ":", "State", ".", "NONE", "}", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
Pool.open_slots
Returns the number of slots open at the moment
airflow/models/pool.py
def open_slots(self, session): """ Returns the number of slots open at the moment """ from airflow.models.taskinstance import \ TaskInstance as TI # Avoid circular import used_slots = session.query(func.count()).filter(TI.pool == self.pool).filter( TI.state.in_([State.RUNNING, State.QUEUED])).scalar() return self.slots - used_slots
def open_slots(self, session): """ Returns the number of slots open at the moment """ from airflow.models.taskinstance import \ TaskInstance as TI # Avoid circular import used_slots = session.query(func.count()).filter(TI.pool == self.pool).filter( TI.state.in_([State.RUNNING, State.QUEUED])).scalar() return self.slots - used_slots
[ "Returns", "the", "number", "of", "slots", "open", "at", "the", "moment" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/pool.py#L60-L69
[ "def", "open_slots", "(", "self", ",", "session", ")", ":", "from", "airflow", ".", "models", ".", "taskinstance", "import", "TaskInstance", "as", "TI", "# Avoid circular import", "used_slots", "=", "session", ".", "query", "(", "func", ".", "count", "(", ")", ")", ".", "filter", "(", "TI", ".", "pool", "==", "self", ".", "pool", ")", ".", "filter", "(", "TI", ".", "state", ".", "in_", "(", "[", "State", ".", "RUNNING", ",", "State", ".", "QUEUED", "]", ")", ")", ".", "scalar", "(", ")", "return", "self", ".", "slots", "-", "used_slots" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
expand_env_var
Expands (potentially nested) env vars by repeatedly applying `expandvars` and `expanduser` until interpolation stops having any effect.
airflow/configuration.py
def expand_env_var(env_var): """ Expands (potentially nested) env vars by repeatedly applying `expandvars` and `expanduser` until interpolation stops having any effect. """ if not env_var: return env_var while True: interpolated = os.path.expanduser(os.path.expandvars(str(env_var))) if interpolated == env_var: return interpolated else: env_var = interpolated
def expand_env_var(env_var): """ Expands (potentially nested) env vars by repeatedly applying `expandvars` and `expanduser` until interpolation stops having any effect. """ if not env_var: return env_var while True: interpolated = os.path.expanduser(os.path.expandvars(str(env_var))) if interpolated == env_var: return interpolated else: env_var = interpolated
[ "Expands", "(", "potentially", "nested", ")", "env", "vars", "by", "repeatedly", "applying", "expandvars", "and", "expanduser", "until", "interpolation", "stops", "having", "any", "effect", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/configuration.py#L59-L72
[ "def", "expand_env_var", "(", "env_var", ")", ":", "if", "not", "env_var", ":", "return", "env_var", "while", "True", ":", "interpolated", "=", "os", ".", "path", ".", "expanduser", "(", "os", ".", "path", ".", "expandvars", "(", "str", "(", "env_var", ")", ")", ")", "if", "interpolated", "==", "env_var", ":", "return", "interpolated", "else", ":", "env_var", "=", "interpolated" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
run_command
Runs command and returns stdout
airflow/configuration.py
def run_command(command): """ Runs command and returns stdout """ process = subprocess.Popen( shlex.split(command), stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True) output, stderr = [stream.decode(sys.getdefaultencoding(), 'ignore') for stream in process.communicate()] if process.returncode != 0: raise AirflowConfigException( "Cannot execute {}. Error code is: {}. Output: {}, Stderr: {}" .format(command, process.returncode, output, stderr) ) return output
def run_command(command): """ Runs command and returns stdout """ process = subprocess.Popen( shlex.split(command), stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True) output, stderr = [stream.decode(sys.getdefaultencoding(), 'ignore') for stream in process.communicate()] if process.returncode != 0: raise AirflowConfigException( "Cannot execute {}. Error code is: {}. Output: {}, Stderr: {}" .format(command, process.returncode, output, stderr) ) return output
[ "Runs", "command", "and", "returns", "stdout" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/configuration.py#L75-L93
[ "def", "run_command", "(", "command", ")", ":", "process", "=", "subprocess", ".", "Popen", "(", "shlex", ".", "split", "(", "command", ")", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "close_fds", "=", "True", ")", "output", ",", "stderr", "=", "[", "stream", ".", "decode", "(", "sys", ".", "getdefaultencoding", "(", ")", ",", "'ignore'", ")", "for", "stream", "in", "process", ".", "communicate", "(", ")", "]", "if", "process", ".", "returncode", "!=", "0", ":", "raise", "AirflowConfigException", "(", "\"Cannot execute {}. Error code is: {}. Output: {}, Stderr: {}\"", ".", "format", "(", "command", ",", "process", ".", "returncode", ",", "output", ",", "stderr", ")", ")", "return", "output" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
parameterized_config
Generates a configuration from the provided template + variables defined in current scope :param template: a config content templated with {{variables}}
airflow/configuration.py
def parameterized_config(template): """ Generates a configuration from the provided template + variables defined in current scope :param template: a config content templated with {{variables}} """ all_vars = {k: v for d in [globals(), locals()] for k, v in d.items()} return template.format(**all_vars)
def parameterized_config(template): """ Generates a configuration from the provided template + variables defined in current scope :param template: a config content templated with {{variables}} """ all_vars = {k: v for d in [globals(), locals()] for k, v in d.items()} return template.format(**all_vars)
[ "Generates", "a", "configuration", "from", "the", "provided", "template", "+", "variables", "defined", "in", "current", "scope", ":", "param", "template", ":", "a", "config", "content", "templated", "with", "{{", "variables", "}}" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/configuration.py#L484-L491
[ "def", "parameterized_config", "(", "template", ")", ":", "all_vars", "=", "{", "k", ":", "v", "for", "d", "in", "[", "globals", "(", ")", ",", "locals", "(", ")", "]", "for", "k", ",", "v", "in", "d", ".", "items", "(", ")", "}", "return", "template", ".", "format", "(", "*", "*", "all_vars", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AirflowConfigParser.remove_option
Remove an option if it exists in config from a file or default config. If both of config have the same option, this removes the option in both configs unless remove_default=False.
airflow/configuration.py
def remove_option(self, section, option, remove_default=True): """ Remove an option if it exists in config from a file or default config. If both of config have the same option, this removes the option in both configs unless remove_default=False. """ if super().has_option(section, option): super().remove_option(section, option) if self.airflow_defaults.has_option(section, option) and remove_default: self.airflow_defaults.remove_option(section, option)
def remove_option(self, section, option, remove_default=True): """ Remove an option if it exists in config from a file or default config. If both of config have the same option, this removes the option in both configs unless remove_default=False. """ if super().has_option(section, option): super().remove_option(section, option) if self.airflow_defaults.has_option(section, option) and remove_default: self.airflow_defaults.remove_option(section, option)
[ "Remove", "an", "option", "if", "it", "exists", "in", "config", "from", "a", "file", "or", "default", "config", ".", "If", "both", "of", "config", "have", "the", "same", "option", "this", "removes", "the", "option", "in", "both", "configs", "unless", "remove_default", "=", "False", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/configuration.py#L297-L307
[ "def", "remove_option", "(", "self", ",", "section", ",", "option", ",", "remove_default", "=", "True", ")", ":", "if", "super", "(", ")", ".", "has_option", "(", "section", ",", "option", ")", ":", "super", "(", ")", ".", "remove_option", "(", "section", ",", "option", ")", "if", "self", ".", "airflow_defaults", ".", "has_option", "(", "section", ",", "option", ")", "and", "remove_default", ":", "self", ".", "airflow_defaults", ".", "remove_option", "(", "section", ",", "option", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AirflowConfigParser.getsection
Returns the section as a dict. Values are converted to int, float, bool as required. :param section: section from the config :rtype: dict
airflow/configuration.py
def getsection(self, section): """ Returns the section as a dict. Values are converted to int, float, bool as required. :param section: section from the config :rtype: dict """ if (section not in self._sections and section not in self.airflow_defaults._sections): return None _section = copy.deepcopy(self.airflow_defaults._sections[section]) if section in self._sections: _section.update(copy.deepcopy(self._sections[section])) section_prefix = 'AIRFLOW__{S}__'.format(S=section.upper()) for env_var in sorted(os.environ.keys()): if env_var.startswith(section_prefix): key = env_var.replace(section_prefix, '').lower() _section[key] = self._get_env_var_option(section, key) for key, val in iteritems(_section): try: val = int(val) except ValueError: try: val = float(val) except ValueError: if val.lower() in ('t', 'true'): val = True elif val.lower() in ('f', 'false'): val = False _section[key] = val return _section
def getsection(self, section): """ Returns the section as a dict. Values are converted to int, float, bool as required. :param section: section from the config :rtype: dict """ if (section not in self._sections and section not in self.airflow_defaults._sections): return None _section = copy.deepcopy(self.airflow_defaults._sections[section]) if section in self._sections: _section.update(copy.deepcopy(self._sections[section])) section_prefix = 'AIRFLOW__{S}__'.format(S=section.upper()) for env_var in sorted(os.environ.keys()): if env_var.startswith(section_prefix): key = env_var.replace(section_prefix, '').lower() _section[key] = self._get_env_var_option(section, key) for key, val in iteritems(_section): try: val = int(val) except ValueError: try: val = float(val) except ValueError: if val.lower() in ('t', 'true'): val = True elif val.lower() in ('f', 'false'): val = False _section[key] = val return _section
[ "Returns", "the", "section", "as", "a", "dict", ".", "Values", "are", "converted", "to", "int", "float", "bool", "as", "required", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/configuration.py#L309-L344
[ "def", "getsection", "(", "self", ",", "section", ")", ":", "if", "(", "section", "not", "in", "self", ".", "_sections", "and", "section", "not", "in", "self", ".", "airflow_defaults", ".", "_sections", ")", ":", "return", "None", "_section", "=", "copy", ".", "deepcopy", "(", "self", ".", "airflow_defaults", ".", "_sections", "[", "section", "]", ")", "if", "section", "in", "self", ".", "_sections", ":", "_section", ".", "update", "(", "copy", ".", "deepcopy", "(", "self", ".", "_sections", "[", "section", "]", ")", ")", "section_prefix", "=", "'AIRFLOW__{S}__'", ".", "format", "(", "S", "=", "section", ".", "upper", "(", ")", ")", "for", "env_var", "in", "sorted", "(", "os", ".", "environ", ".", "keys", "(", ")", ")", ":", "if", "env_var", ".", "startswith", "(", "section_prefix", ")", ":", "key", "=", "env_var", ".", "replace", "(", "section_prefix", ",", "''", ")", ".", "lower", "(", ")", "_section", "[", "key", "]", "=", "self", ".", "_get_env_var_option", "(", "section", ",", "key", ")", "for", "key", ",", "val", "in", "iteritems", "(", "_section", ")", ":", "try", ":", "val", "=", "int", "(", "val", ")", "except", "ValueError", ":", "try", ":", "val", "=", "float", "(", "val", ")", "except", "ValueError", ":", "if", "val", ".", "lower", "(", ")", "in", "(", "'t'", ",", "'true'", ")", ":", "val", "=", "True", "elif", "val", ".", "lower", "(", ")", "in", "(", "'f'", ",", "'false'", ")", ":", "val", "=", "False", "_section", "[", "key", "]", "=", "val", "return", "_section" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AirflowConfigParser.as_dict
Returns the current configuration as an OrderedDict of OrderedDicts. :param display_source: If False, the option value is returned. If True, a tuple of (option_value, source) is returned. Source is either 'airflow.cfg', 'default', 'env var', or 'cmd'. :type display_source: bool :param display_sensitive: If True, the values of options set by env vars and bash commands will be displayed. If False, those options are shown as '< hidden >' :type display_sensitive: bool :param raw: Should the values be output as interpolated values, or the "raw" form that can be fed back in to ConfigParser :type raw: bool
airflow/configuration.py
def as_dict( self, display_source=False, display_sensitive=False, raw=False): """ Returns the current configuration as an OrderedDict of OrderedDicts. :param display_source: If False, the option value is returned. If True, a tuple of (option_value, source) is returned. Source is either 'airflow.cfg', 'default', 'env var', or 'cmd'. :type display_source: bool :param display_sensitive: If True, the values of options set by env vars and bash commands will be displayed. If False, those options are shown as '< hidden >' :type display_sensitive: bool :param raw: Should the values be output as interpolated values, or the "raw" form that can be fed back in to ConfigParser :type raw: bool """ cfg = {} configs = [ ('default', self.airflow_defaults), ('airflow.cfg', self), ] for (source_name, config) in configs: for section in config.sections(): sect = cfg.setdefault(section, OrderedDict()) for (k, val) in config.items(section=section, raw=raw): if display_source: val = (val, source_name) sect[k] = val # add env vars and overwrite because they have priority for ev in [ev for ev in os.environ if ev.startswith('AIRFLOW__')]: try: _, section, key = ev.split('__') opt = self._get_env_var_option(section, key) except ValueError: continue if not display_sensitive and ev != 'AIRFLOW__CORE__UNIT_TEST_MODE': opt = '< hidden >' elif raw: opt = opt.replace('%', '%%') if display_source: opt = (opt, 'env var') cfg.setdefault(section.lower(), OrderedDict()).update( {key.lower(): opt}) # add bash commands for (section, key) in self.as_command_stdout: opt = self._get_cmd_option(section, key) if opt: if not display_sensitive: opt = '< hidden >' if display_source: opt = (opt, 'cmd') elif raw: opt = opt.replace('%', '%%') cfg.setdefault(section, OrderedDict()).update({key: opt}) del cfg[section][key + '_cmd'] return cfg
def as_dict( self, display_source=False, display_sensitive=False, raw=False): """ Returns the current configuration as an OrderedDict of OrderedDicts. :param display_source: If False, the option value is returned. If True, a tuple of (option_value, source) is returned. Source is either 'airflow.cfg', 'default', 'env var', or 'cmd'. :type display_source: bool :param display_sensitive: If True, the values of options set by env vars and bash commands will be displayed. If False, those options are shown as '< hidden >' :type display_sensitive: bool :param raw: Should the values be output as interpolated values, or the "raw" form that can be fed back in to ConfigParser :type raw: bool """ cfg = {} configs = [ ('default', self.airflow_defaults), ('airflow.cfg', self), ] for (source_name, config) in configs: for section in config.sections(): sect = cfg.setdefault(section, OrderedDict()) for (k, val) in config.items(section=section, raw=raw): if display_source: val = (val, source_name) sect[k] = val # add env vars and overwrite because they have priority for ev in [ev for ev in os.environ if ev.startswith('AIRFLOW__')]: try: _, section, key = ev.split('__') opt = self._get_env_var_option(section, key) except ValueError: continue if not display_sensitive and ev != 'AIRFLOW__CORE__UNIT_TEST_MODE': opt = '< hidden >' elif raw: opt = opt.replace('%', '%%') if display_source: opt = (opt, 'env var') cfg.setdefault(section.lower(), OrderedDict()).update( {key.lower(): opt}) # add bash commands for (section, key) in self.as_command_stdout: opt = self._get_cmd_option(section, key) if opt: if not display_sensitive: opt = '< hidden >' if display_source: opt = (opt, 'cmd') elif raw: opt = opt.replace('%', '%%') cfg.setdefault(section, OrderedDict()).update({key: opt}) del cfg[section][key + '_cmd'] return cfg
[ "Returns", "the", "current", "configuration", "as", "an", "OrderedDict", "of", "OrderedDicts", ".", ":", "param", "display_source", ":", "If", "False", "the", "option", "value", "is", "returned", ".", "If", "True", "a", "tuple", "of", "(", "option_value", "source", ")", "is", "returned", ".", "Source", "is", "either", "airflow", ".", "cfg", "default", "env", "var", "or", "cmd", ".", ":", "type", "display_source", ":", "bool", ":", "param", "display_sensitive", ":", "If", "True", "the", "values", "of", "options", "set", "by", "env", "vars", "and", "bash", "commands", "will", "be", "displayed", ".", "If", "False", "those", "options", "are", "shown", "as", "<", "hidden", ">", ":", "type", "display_sensitive", ":", "bool", ":", "param", "raw", ":", "Should", "the", "values", "be", "output", "as", "interpolated", "values", "or", "the", "raw", "form", "that", "can", "be", "fed", "back", "in", "to", "ConfigParser", ":", "type", "raw", ":", "bool" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/configuration.py#L346-L405
[ "def", "as_dict", "(", "self", ",", "display_source", "=", "False", ",", "display_sensitive", "=", "False", ",", "raw", "=", "False", ")", ":", "cfg", "=", "{", "}", "configs", "=", "[", "(", "'default'", ",", "self", ".", "airflow_defaults", ")", ",", "(", "'airflow.cfg'", ",", "self", ")", ",", "]", "for", "(", "source_name", ",", "config", ")", "in", "configs", ":", "for", "section", "in", "config", ".", "sections", "(", ")", ":", "sect", "=", "cfg", ".", "setdefault", "(", "section", ",", "OrderedDict", "(", ")", ")", "for", "(", "k", ",", "val", ")", "in", "config", ".", "items", "(", "section", "=", "section", ",", "raw", "=", "raw", ")", ":", "if", "display_source", ":", "val", "=", "(", "val", ",", "source_name", ")", "sect", "[", "k", "]", "=", "val", "# add env vars and overwrite because they have priority", "for", "ev", "in", "[", "ev", "for", "ev", "in", "os", ".", "environ", "if", "ev", ".", "startswith", "(", "'AIRFLOW__'", ")", "]", ":", "try", ":", "_", ",", "section", ",", "key", "=", "ev", ".", "split", "(", "'__'", ")", "opt", "=", "self", ".", "_get_env_var_option", "(", "section", ",", "key", ")", "except", "ValueError", ":", "continue", "if", "not", "display_sensitive", "and", "ev", "!=", "'AIRFLOW__CORE__UNIT_TEST_MODE'", ":", "opt", "=", "'< hidden >'", "elif", "raw", ":", "opt", "=", "opt", ".", "replace", "(", "'%'", ",", "'%%'", ")", "if", "display_source", ":", "opt", "=", "(", "opt", ",", "'env var'", ")", "cfg", ".", "setdefault", "(", "section", ".", "lower", "(", ")", ",", "OrderedDict", "(", ")", ")", ".", "update", "(", "{", "key", ".", "lower", "(", ")", ":", "opt", "}", ")", "# add bash commands", "for", "(", "section", ",", "key", ")", "in", "self", ".", "as_command_stdout", ":", "opt", "=", "self", ".", "_get_cmd_option", "(", "section", ",", "key", ")", "if", "opt", ":", "if", "not", "display_sensitive", ":", "opt", "=", "'< hidden >'", "if", "display_source", ":", "opt", "=", "(", "opt", ",", "'cmd'", ")", "elif", "raw", ":", "opt", "=", "opt", ".", "replace", "(", "'%'", ",", "'%%'", ")", "cfg", ".", "setdefault", "(", "section", ",", "OrderedDict", "(", ")", ")", ".", "update", "(", "{", "key", ":", "opt", "}", ")", "del", "cfg", "[", "section", "]", "[", "key", "+", "'_cmd'", "]", "return", "cfg" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DatastoreHook.allocate_ids
Allocate IDs for incomplete keys. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/allocateIds :param partial_keys: a list of partial keys. :type partial_keys: list :return: a list of full keys. :rtype: list
airflow/contrib/hooks/datastore_hook.py
def allocate_ids(self, partial_keys): """ Allocate IDs for incomplete keys. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/allocateIds :param partial_keys: a list of partial keys. :type partial_keys: list :return: a list of full keys. :rtype: list """ conn = self.get_conn() resp = (conn .projects() .allocateIds(projectId=self.project_id, body={'keys': partial_keys}) .execute(num_retries=self.num_retries)) return resp['keys']
def allocate_ids(self, partial_keys): """ Allocate IDs for incomplete keys. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/allocateIds :param partial_keys: a list of partial keys. :type partial_keys: list :return: a list of full keys. :rtype: list """ conn = self.get_conn() resp = (conn .projects() .allocateIds(projectId=self.project_id, body={'keys': partial_keys}) .execute(num_retries=self.num_retries)) return resp['keys']
[ "Allocate", "IDs", "for", "incomplete", "keys", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/datastore_hook.py#L62-L81
[ "def", "allocate_ids", "(", "self", ",", "partial_keys", ")", ":", "conn", "=", "self", ".", "get_conn", "(", ")", "resp", "=", "(", "conn", ".", "projects", "(", ")", ".", "allocateIds", "(", "projectId", "=", "self", ".", "project_id", ",", "body", "=", "{", "'keys'", ":", "partial_keys", "}", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", ")", "return", "resp", "[", "'keys'", "]" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DatastoreHook.begin_transaction
Begins a new transaction. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/beginTransaction :return: a transaction handle. :rtype: str
airflow/contrib/hooks/datastore_hook.py
def begin_transaction(self): """ Begins a new transaction. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/beginTransaction :return: a transaction handle. :rtype: str """ conn = self.get_conn() resp = (conn .projects() .beginTransaction(projectId=self.project_id, body={}) .execute(num_retries=self.num_retries)) return resp['transaction']
def begin_transaction(self): """ Begins a new transaction. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/beginTransaction :return: a transaction handle. :rtype: str """ conn = self.get_conn() resp = (conn .projects() .beginTransaction(projectId=self.project_id, body={}) .execute(num_retries=self.num_retries)) return resp['transaction']
[ "Begins", "a", "new", "transaction", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/datastore_hook.py#L83-L100
[ "def", "begin_transaction", "(", "self", ")", ":", "conn", "=", "self", ".", "get_conn", "(", ")", "resp", "=", "(", "conn", ".", "projects", "(", ")", ".", "beginTransaction", "(", "projectId", "=", "self", ".", "project_id", ",", "body", "=", "{", "}", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", ")", "return", "resp", "[", "'transaction'", "]" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DatastoreHook.commit
Commit a transaction, optionally creating, deleting or modifying some entities. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/commit :param body: the body of the commit request. :type body: dict :return: the response body of the commit request. :rtype: dict
airflow/contrib/hooks/datastore_hook.py
def commit(self, body): """ Commit a transaction, optionally creating, deleting or modifying some entities. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/commit :param body: the body of the commit request. :type body: dict :return: the response body of the commit request. :rtype: dict """ conn = self.get_conn() resp = (conn .projects() .commit(projectId=self.project_id, body=body) .execute(num_retries=self.num_retries)) return resp
def commit(self, body): """ Commit a transaction, optionally creating, deleting or modifying some entities. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/commit :param body: the body of the commit request. :type body: dict :return: the response body of the commit request. :rtype: dict """ conn = self.get_conn() resp = (conn .projects() .commit(projectId=self.project_id, body=body) .execute(num_retries=self.num_retries)) return resp
[ "Commit", "a", "transaction", "optionally", "creating", "deleting", "or", "modifying", "some", "entities", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/datastore_hook.py#L102-L121
[ "def", "commit", "(", "self", ",", "body", ")", ":", "conn", "=", "self", ".", "get_conn", "(", ")", "resp", "=", "(", "conn", ".", "projects", "(", ")", ".", "commit", "(", "projectId", "=", "self", ".", "project_id", ",", "body", "=", "body", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", ")", "return", "resp" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DatastoreHook.lookup
Lookup some entities by key. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/lookup :param keys: the keys to lookup. :type keys: list :param read_consistency: the read consistency to use. default, strong or eventual. Cannot be used with a transaction. :type read_consistency: str :param transaction: the transaction to use, if any. :type transaction: str :return: the response body of the lookup request. :rtype: dict
airflow/contrib/hooks/datastore_hook.py
def lookup(self, keys, read_consistency=None, transaction=None): """ Lookup some entities by key. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/lookup :param keys: the keys to lookup. :type keys: list :param read_consistency: the read consistency to use. default, strong or eventual. Cannot be used with a transaction. :type read_consistency: str :param transaction: the transaction to use, if any. :type transaction: str :return: the response body of the lookup request. :rtype: dict """ conn = self.get_conn() body = {'keys': keys} if read_consistency: body['readConsistency'] = read_consistency if transaction: body['transaction'] = transaction resp = (conn .projects() .lookup(projectId=self.project_id, body=body) .execute(num_retries=self.num_retries)) return resp
def lookup(self, keys, read_consistency=None, transaction=None): """ Lookup some entities by key. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/lookup :param keys: the keys to lookup. :type keys: list :param read_consistency: the read consistency to use. default, strong or eventual. Cannot be used with a transaction. :type read_consistency: str :param transaction: the transaction to use, if any. :type transaction: str :return: the response body of the lookup request. :rtype: dict """ conn = self.get_conn() body = {'keys': keys} if read_consistency: body['readConsistency'] = read_consistency if transaction: body['transaction'] = transaction resp = (conn .projects() .lookup(projectId=self.project_id, body=body) .execute(num_retries=self.num_retries)) return resp
[ "Lookup", "some", "entities", "by", "key", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/datastore_hook.py#L123-L152
[ "def", "lookup", "(", "self", ",", "keys", ",", "read_consistency", "=", "None", ",", "transaction", "=", "None", ")", ":", "conn", "=", "self", ".", "get_conn", "(", ")", "body", "=", "{", "'keys'", ":", "keys", "}", "if", "read_consistency", ":", "body", "[", "'readConsistency'", "]", "=", "read_consistency", "if", "transaction", ":", "body", "[", "'transaction'", "]", "=", "transaction", "resp", "=", "(", "conn", ".", "projects", "(", ")", ".", "lookup", "(", "projectId", "=", "self", ".", "project_id", ",", "body", "=", "body", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", ")", "return", "resp" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DatastoreHook.rollback
Roll back a transaction. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/rollback :param transaction: the transaction to roll back. :type transaction: str
airflow/contrib/hooks/datastore_hook.py
def rollback(self, transaction): """ Roll back a transaction. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/rollback :param transaction: the transaction to roll back. :type transaction: str """ conn = self.get_conn() conn.projects().rollback( projectId=self.project_id, body={'transaction': transaction} ).execute(num_retries=self.num_retries)
def rollback(self, transaction): """ Roll back a transaction. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/rollback :param transaction: the transaction to roll back. :type transaction: str """ conn = self.get_conn() conn.projects().rollback( projectId=self.project_id, body={'transaction': transaction} ).execute(num_retries=self.num_retries)
[ "Roll", "back", "a", "transaction", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/datastore_hook.py#L154-L168
[ "def", "rollback", "(", "self", ",", "transaction", ")", ":", "conn", "=", "self", ".", "get_conn", "(", ")", "conn", ".", "projects", "(", ")", ".", "rollback", "(", "projectId", "=", "self", ".", "project_id", ",", "body", "=", "{", "'transaction'", ":", "transaction", "}", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DatastoreHook.run_query
Run a query for entities. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/runQuery :param body: the body of the query request. :type body: dict :return: the batch of query results. :rtype: dict
airflow/contrib/hooks/datastore_hook.py
def run_query(self, body): """ Run a query for entities. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/runQuery :param body: the body of the query request. :type body: dict :return: the batch of query results. :rtype: dict """ conn = self.get_conn() resp = (conn .projects() .runQuery(projectId=self.project_id, body=body) .execute(num_retries=self.num_retries)) return resp['batch']
def run_query(self, body): """ Run a query for entities. .. seealso:: https://cloud.google.com/datastore/docs/reference/rest/v1/projects/runQuery :param body: the body of the query request. :type body: dict :return: the batch of query results. :rtype: dict """ conn = self.get_conn() resp = (conn .projects() .runQuery(projectId=self.project_id, body=body) .execute(num_retries=self.num_retries)) return resp['batch']
[ "Run", "a", "query", "for", "entities", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/datastore_hook.py#L170-L189
[ "def", "run_query", "(", "self", ",", "body", ")", ":", "conn", "=", "self", ".", "get_conn", "(", ")", "resp", "=", "(", "conn", ".", "projects", "(", ")", ".", "runQuery", "(", "projectId", "=", "self", ".", "project_id", ",", "body", "=", "body", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", ")", "return", "resp", "[", "'batch'", "]" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DatastoreHook.get_operation
Gets the latest state of a long-running operation. .. seealso:: https://cloud.google.com/datastore/docs/reference/data/rest/v1/projects.operations/get :param name: the name of the operation resource. :type name: str :return: a resource operation instance. :rtype: dict
airflow/contrib/hooks/datastore_hook.py
def get_operation(self, name): """ Gets the latest state of a long-running operation. .. seealso:: https://cloud.google.com/datastore/docs/reference/data/rest/v1/projects.operations/get :param name: the name of the operation resource. :type name: str :return: a resource operation instance. :rtype: dict """ conn = self.get_conn() resp = (conn .projects() .operations() .get(name=name) .execute(num_retries=self.num_retries)) return resp
def get_operation(self, name): """ Gets the latest state of a long-running operation. .. seealso:: https://cloud.google.com/datastore/docs/reference/data/rest/v1/projects.operations/get :param name: the name of the operation resource. :type name: str :return: a resource operation instance. :rtype: dict """ conn = self.get_conn() resp = (conn .projects() .operations() .get(name=name) .execute(num_retries=self.num_retries)) return resp
[ "Gets", "the", "latest", "state", "of", "a", "long", "-", "running", "operation", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/datastore_hook.py#L191-L211
[ "def", "get_operation", "(", "self", ",", "name", ")", ":", "conn", "=", "self", ".", "get_conn", "(", ")", "resp", "=", "(", "conn", ".", "projects", "(", ")", ".", "operations", "(", ")", ".", "get", "(", "name", "=", "name", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", ")", "return", "resp" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DatastoreHook.delete_operation
Deletes the long-running operation. .. seealso:: https://cloud.google.com/datastore/docs/reference/data/rest/v1/projects.operations/delete :param name: the name of the operation resource. :type name: str :return: none if successful. :rtype: dict
airflow/contrib/hooks/datastore_hook.py
def delete_operation(self, name): """ Deletes the long-running operation. .. seealso:: https://cloud.google.com/datastore/docs/reference/data/rest/v1/projects.operations/delete :param name: the name of the operation resource. :type name: str :return: none if successful. :rtype: dict """ conn = self.get_conn() resp = (conn .projects() .operations() .delete(name=name) .execute(num_retries=self.num_retries)) return resp
def delete_operation(self, name): """ Deletes the long-running operation. .. seealso:: https://cloud.google.com/datastore/docs/reference/data/rest/v1/projects.operations/delete :param name: the name of the operation resource. :type name: str :return: none if successful. :rtype: dict """ conn = self.get_conn() resp = (conn .projects() .operations() .delete(name=name) .execute(num_retries=self.num_retries)) return resp
[ "Deletes", "the", "long", "-", "running", "operation", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/datastore_hook.py#L213-L233
[ "def", "delete_operation", "(", "self", ",", "name", ")", ":", "conn", "=", "self", ".", "get_conn", "(", ")", "resp", "=", "(", "conn", ".", "projects", "(", ")", ".", "operations", "(", ")", ".", "delete", "(", "name", "=", "name", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", ")", "return", "resp" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DatastoreHook.poll_operation_until_done
Poll backup operation state until it's completed. :param name: the name of the operation resource :type name: str :param polling_interval_in_seconds: The number of seconds to wait before calling another request. :type polling_interval_in_seconds: int :return: a resource operation instance. :rtype: dict
airflow/contrib/hooks/datastore_hook.py
def poll_operation_until_done(self, name, polling_interval_in_seconds): """ Poll backup operation state until it's completed. :param name: the name of the operation resource :type name: str :param polling_interval_in_seconds: The number of seconds to wait before calling another request. :type polling_interval_in_seconds: int :return: a resource operation instance. :rtype: dict """ while True: result = self.get_operation(name) state = result['metadata']['common']['state'] if state == 'PROCESSING': self.log.info('Operation is processing. Re-polling state in {} seconds' .format(polling_interval_in_seconds)) time.sleep(polling_interval_in_seconds) else: return result
def poll_operation_until_done(self, name, polling_interval_in_seconds): """ Poll backup operation state until it's completed. :param name: the name of the operation resource :type name: str :param polling_interval_in_seconds: The number of seconds to wait before calling another request. :type polling_interval_in_seconds: int :return: a resource operation instance. :rtype: dict """ while True: result = self.get_operation(name) state = result['metadata']['common']['state'] if state == 'PROCESSING': self.log.info('Operation is processing. Re-polling state in {} seconds' .format(polling_interval_in_seconds)) time.sleep(polling_interval_in_seconds) else: return result
[ "Poll", "backup", "operation", "state", "until", "it", "s", "completed", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/datastore_hook.py#L235-L255
[ "def", "poll_operation_until_done", "(", "self", ",", "name", ",", "polling_interval_in_seconds", ")", ":", "while", "True", ":", "result", "=", "self", ".", "get_operation", "(", "name", ")", "state", "=", "result", "[", "'metadata'", "]", "[", "'common'", "]", "[", "'state'", "]", "if", "state", "==", "'PROCESSING'", ":", "self", ".", "log", ".", "info", "(", "'Operation is processing. Re-polling state in {} seconds'", ".", "format", "(", "polling_interval_in_seconds", ")", ")", "time", ".", "sleep", "(", "polling_interval_in_seconds", ")", "else", ":", "return", "result" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DatastoreHook.export_to_storage_bucket
Export entities from Cloud Datastore to Cloud Storage for backup. .. note:: Keep in mind that this requests the Admin API not the Data API. .. seealso:: https://cloud.google.com/datastore/docs/reference/admin/rest/v1/projects/export :param bucket: The name of the Cloud Storage bucket. :type bucket: str :param namespace: The Cloud Storage namespace path. :type namespace: str :param entity_filter: Description of what data from the project is included in the export. :type entity_filter: dict :param labels: Client-assigned labels. :type labels: dict of str :return: a resource operation instance. :rtype: dict
airflow/contrib/hooks/datastore_hook.py
def export_to_storage_bucket(self, bucket, namespace=None, entity_filter=None, labels=None): """ Export entities from Cloud Datastore to Cloud Storage for backup. .. note:: Keep in mind that this requests the Admin API not the Data API. .. seealso:: https://cloud.google.com/datastore/docs/reference/admin/rest/v1/projects/export :param bucket: The name of the Cloud Storage bucket. :type bucket: str :param namespace: The Cloud Storage namespace path. :type namespace: str :param entity_filter: Description of what data from the project is included in the export. :type entity_filter: dict :param labels: Client-assigned labels. :type labels: dict of str :return: a resource operation instance. :rtype: dict """ admin_conn = self.get_conn() output_uri_prefix = 'gs://' + '/'.join(filter(None, [bucket, namespace])) if not entity_filter: entity_filter = {} if not labels: labels = {} body = { 'outputUrlPrefix': output_uri_prefix, 'entityFilter': entity_filter, 'labels': labels, } resp = (admin_conn .projects() .export(projectId=self.project_id, body=body) .execute(num_retries=self.num_retries)) return resp
def export_to_storage_bucket(self, bucket, namespace=None, entity_filter=None, labels=None): """ Export entities from Cloud Datastore to Cloud Storage for backup. .. note:: Keep in mind that this requests the Admin API not the Data API. .. seealso:: https://cloud.google.com/datastore/docs/reference/admin/rest/v1/projects/export :param bucket: The name of the Cloud Storage bucket. :type bucket: str :param namespace: The Cloud Storage namespace path. :type namespace: str :param entity_filter: Description of what data from the project is included in the export. :type entity_filter: dict :param labels: Client-assigned labels. :type labels: dict of str :return: a resource operation instance. :rtype: dict """ admin_conn = self.get_conn() output_uri_prefix = 'gs://' + '/'.join(filter(None, [bucket, namespace])) if not entity_filter: entity_filter = {} if not labels: labels = {} body = { 'outputUrlPrefix': output_uri_prefix, 'entityFilter': entity_filter, 'labels': labels, } resp = (admin_conn .projects() .export(projectId=self.project_id, body=body) .execute(num_retries=self.num_retries)) return resp
[ "Export", "entities", "from", "Cloud", "Datastore", "to", "Cloud", "Storage", "for", "backup", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/datastore_hook.py#L257-L295
[ "def", "export_to_storage_bucket", "(", "self", ",", "bucket", ",", "namespace", "=", "None", ",", "entity_filter", "=", "None", ",", "labels", "=", "None", ")", ":", "admin_conn", "=", "self", ".", "get_conn", "(", ")", "output_uri_prefix", "=", "'gs://'", "+", "'/'", ".", "join", "(", "filter", "(", "None", ",", "[", "bucket", ",", "namespace", "]", ")", ")", "if", "not", "entity_filter", ":", "entity_filter", "=", "{", "}", "if", "not", "labels", ":", "labels", "=", "{", "}", "body", "=", "{", "'outputUrlPrefix'", ":", "output_uri_prefix", ",", "'entityFilter'", ":", "entity_filter", ",", "'labels'", ":", "labels", ",", "}", "resp", "=", "(", "admin_conn", ".", "projects", "(", ")", ".", "export", "(", "projectId", "=", "self", ".", "project_id", ",", "body", "=", "body", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", ")", "return", "resp" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DatastoreHook.import_from_storage_bucket
Import a backup from Cloud Storage to Cloud Datastore. .. note:: Keep in mind that this requests the Admin API not the Data API. .. seealso:: https://cloud.google.com/datastore/docs/reference/admin/rest/v1/projects/import :param bucket: The name of the Cloud Storage bucket. :type bucket: str :param file: the metadata file written by the projects.export operation. :type file: str :param namespace: The Cloud Storage namespace path. :type namespace: str :param entity_filter: specify which kinds/namespaces are to be imported. :type entity_filter: dict :param labels: Client-assigned labels. :type labels: dict of str :return: a resource operation instance. :rtype: dict
airflow/contrib/hooks/datastore_hook.py
def import_from_storage_bucket(self, bucket, file, namespace=None, entity_filter=None, labels=None): """ Import a backup from Cloud Storage to Cloud Datastore. .. note:: Keep in mind that this requests the Admin API not the Data API. .. seealso:: https://cloud.google.com/datastore/docs/reference/admin/rest/v1/projects/import :param bucket: The name of the Cloud Storage bucket. :type bucket: str :param file: the metadata file written by the projects.export operation. :type file: str :param namespace: The Cloud Storage namespace path. :type namespace: str :param entity_filter: specify which kinds/namespaces are to be imported. :type entity_filter: dict :param labels: Client-assigned labels. :type labels: dict of str :return: a resource operation instance. :rtype: dict """ admin_conn = self.get_conn() input_url = 'gs://' + '/'.join(filter(None, [bucket, namespace, file])) if not entity_filter: entity_filter = {} if not labels: labels = {} body = { 'inputUrl': input_url, 'entityFilter': entity_filter, 'labels': labels, } resp = (admin_conn .projects() .import_(projectId=self.project_id, body=body) .execute(num_retries=self.num_retries)) return resp
def import_from_storage_bucket(self, bucket, file, namespace=None, entity_filter=None, labels=None): """ Import a backup from Cloud Storage to Cloud Datastore. .. note:: Keep in mind that this requests the Admin API not the Data API. .. seealso:: https://cloud.google.com/datastore/docs/reference/admin/rest/v1/projects/import :param bucket: The name of the Cloud Storage bucket. :type bucket: str :param file: the metadata file written by the projects.export operation. :type file: str :param namespace: The Cloud Storage namespace path. :type namespace: str :param entity_filter: specify which kinds/namespaces are to be imported. :type entity_filter: dict :param labels: Client-assigned labels. :type labels: dict of str :return: a resource operation instance. :rtype: dict """ admin_conn = self.get_conn() input_url = 'gs://' + '/'.join(filter(None, [bucket, namespace, file])) if not entity_filter: entity_filter = {} if not labels: labels = {} body = { 'inputUrl': input_url, 'entityFilter': entity_filter, 'labels': labels, } resp = (admin_conn .projects() .import_(projectId=self.project_id, body=body) .execute(num_retries=self.num_retries)) return resp
[ "Import", "a", "backup", "from", "Cloud", "Storage", "to", "Cloud", "Datastore", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/datastore_hook.py#L297-L337
[ "def", "import_from_storage_bucket", "(", "self", ",", "bucket", ",", "file", ",", "namespace", "=", "None", ",", "entity_filter", "=", "None", ",", "labels", "=", "None", ")", ":", "admin_conn", "=", "self", ".", "get_conn", "(", ")", "input_url", "=", "'gs://'", "+", "'/'", ".", "join", "(", "filter", "(", "None", ",", "[", "bucket", ",", "namespace", ",", "file", "]", ")", ")", "if", "not", "entity_filter", ":", "entity_filter", "=", "{", "}", "if", "not", "labels", ":", "labels", "=", "{", "}", "body", "=", "{", "'inputUrl'", ":", "input_url", ",", "'entityFilter'", ":", "entity_filter", ",", "'labels'", ":", "labels", ",", "}", "resp", "=", "(", "admin_conn", ".", "projects", "(", ")", ".", "import_", "(", "projectId", "=", "self", ".", "project_id", ",", "body", "=", "body", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", ")", "return", "resp" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AwsSnsHook.publish_to_target
Publish a message to a topic or an endpoint. :param target_arn: either a TopicArn or an EndpointArn :type target_arn: str :param message: the default message you want to send :param message: str
airflow/contrib/hooks/aws_sns_hook.py
def publish_to_target(self, target_arn, message): """ Publish a message to a topic or an endpoint. :param target_arn: either a TopicArn or an EndpointArn :type target_arn: str :param message: the default message you want to send :param message: str """ conn = self.get_conn() messages = { 'default': message } return conn.publish( TargetArn=target_arn, Message=json.dumps(messages), MessageStructure='json' )
def publish_to_target(self, target_arn, message): """ Publish a message to a topic or an endpoint. :param target_arn: either a TopicArn or an EndpointArn :type target_arn: str :param message: the default message you want to send :param message: str """ conn = self.get_conn() messages = { 'default': message } return conn.publish( TargetArn=target_arn, Message=json.dumps(messages), MessageStructure='json' )
[ "Publish", "a", "message", "to", "a", "topic", "or", "an", "endpoint", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_sns_hook.py#L40-L60
[ "def", "publish_to_target", "(", "self", ",", "target_arn", ",", "message", ")", ":", "conn", "=", "self", ".", "get_conn", "(", ")", "messages", "=", "{", "'default'", ":", "message", "}", "return", "conn", ".", "publish", "(", "TargetArn", "=", "target_arn", ",", "Message", "=", "json", ".", "dumps", "(", "messages", ")", ",", "MessageStructure", "=", "'json'", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
get_hostname
Fetch the hostname using the callable from the config or using `socket.getfqdn` as a fallback.
airflow/utils/net.py
def get_hostname(): """ Fetch the hostname using the callable from the config or using `socket.getfqdn` as a fallback. """ # First we attempt to fetch the callable path from the config. try: callable_path = conf.get('core', 'hostname_callable') except AirflowConfigException: callable_path = None # Then we handle the case when the config is missing or empty. This is the # default behavior. if not callable_path: return socket.getfqdn() # Since we have a callable path, we try to import and run it next. module_path, attr_name = callable_path.split(':') module = importlib.import_module(module_path) callable = getattr(module, attr_name) return callable()
def get_hostname(): """ Fetch the hostname using the callable from the config or using `socket.getfqdn` as a fallback. """ # First we attempt to fetch the callable path from the config. try: callable_path = conf.get('core', 'hostname_callable') except AirflowConfigException: callable_path = None # Then we handle the case when the config is missing or empty. This is the # default behavior. if not callable_path: return socket.getfqdn() # Since we have a callable path, we try to import and run it next. module_path, attr_name = callable_path.split(':') module = importlib.import_module(module_path) callable = getattr(module, attr_name) return callable()
[ "Fetch", "the", "hostname", "using", "the", "callable", "from", "the", "config", "or", "using", "socket", ".", "getfqdn", "as", "a", "fallback", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/net.py#L25-L45
[ "def", "get_hostname", "(", ")", ":", "# First we attempt to fetch the callable path from the config.", "try", ":", "callable_path", "=", "conf", ".", "get", "(", "'core'", ",", "'hostname_callable'", ")", "except", "AirflowConfigException", ":", "callable_path", "=", "None", "# Then we handle the case when the config is missing or empty. This is the", "# default behavior.", "if", "not", "callable_path", ":", "return", "socket", ".", "getfqdn", "(", ")", "# Since we have a callable path, we try to import and run it next.", "module_path", ",", "attr_name", "=", "callable_path", ".", "split", "(", "':'", ")", "module", "=", "importlib", ".", "import_module", "(", "module_path", ")", "callable", "=", "getattr", "(", "module", ",", "attr_name", ")", "return", "callable", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
CloudNaturalLanguageHook.get_conn
Retrieves connection to Cloud Natural Language service. :return: Cloud Natural Language service object :rtype: google.cloud.language_v1.LanguageServiceClient
airflow/contrib/hooks/gcp_natural_language_hook.py
def get_conn(self): """ Retrieves connection to Cloud Natural Language service. :return: Cloud Natural Language service object :rtype: google.cloud.language_v1.LanguageServiceClient """ if not self._conn: self._conn = LanguageServiceClient(credentials=self._get_credentials()) return self._conn
def get_conn(self): """ Retrieves connection to Cloud Natural Language service. :return: Cloud Natural Language service object :rtype: google.cloud.language_v1.LanguageServiceClient """ if not self._conn: self._conn = LanguageServiceClient(credentials=self._get_credentials()) return self._conn
[ "Retrieves", "connection", "to", "Cloud", "Natural", "Language", "service", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_natural_language_hook.py#L44-L53
[ "def", "get_conn", "(", "self", ")", ":", "if", "not", "self", ".", "_conn", ":", "self", ".", "_conn", "=", "LanguageServiceClient", "(", "credentials", "=", "self", ".", "_get_credentials", "(", ")", ")", "return", "self", ".", "_conn" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
CloudNaturalLanguageHook.analyze_entities
Finds named entities in the text along with entity types, salience, mentions for each entity, and other properties. :param document: Input document. If a dict is provided, it must be of the same form as the protobuf message Document :type document: dict or class google.cloud.language_v1.types.Document :param encoding_type: The encoding type used by the API to calculate offsets. :type encoding_type: google.cloud.language_v1.types.EncodingType :param retry: A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :param metadata: Additional metadata that is provided to the method. :type metadata: sequence[tuple[str, str]]] :rtype: google.cloud.language_v1.types.AnalyzeEntitiesResponse
airflow/contrib/hooks/gcp_natural_language_hook.py
def analyze_entities(self, document, encoding_type=None, retry=None, timeout=None, metadata=None): """ Finds named entities in the text along with entity types, salience, mentions for each entity, and other properties. :param document: Input document. If a dict is provided, it must be of the same form as the protobuf message Document :type document: dict or class google.cloud.language_v1.types.Document :param encoding_type: The encoding type used by the API to calculate offsets. :type encoding_type: google.cloud.language_v1.types.EncodingType :param retry: A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :param metadata: Additional metadata that is provided to the method. :type metadata: sequence[tuple[str, str]]] :rtype: google.cloud.language_v1.types.AnalyzeEntitiesResponse """ client = self.get_conn() return client.analyze_entities( document=document, encoding_type=encoding_type, retry=retry, timeout=timeout, metadata=metadata )
def analyze_entities(self, document, encoding_type=None, retry=None, timeout=None, metadata=None): """ Finds named entities in the text along with entity types, salience, mentions for each entity, and other properties. :param document: Input document. If a dict is provided, it must be of the same form as the protobuf message Document :type document: dict or class google.cloud.language_v1.types.Document :param encoding_type: The encoding type used by the API to calculate offsets. :type encoding_type: google.cloud.language_v1.types.EncodingType :param retry: A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :param metadata: Additional metadata that is provided to the method. :type metadata: sequence[tuple[str, str]]] :rtype: google.cloud.language_v1.types.AnalyzeEntitiesResponse """ client = self.get_conn() return client.analyze_entities( document=document, encoding_type=encoding_type, retry=retry, timeout=timeout, metadata=metadata )
[ "Finds", "named", "entities", "in", "the", "text", "along", "with", "entity", "types", "salience", "mentions", "for", "each", "entity", "and", "other", "properties", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_natural_language_hook.py#L56-L80
[ "def", "analyze_entities", "(", "self", ",", "document", ",", "encoding_type", "=", "None", ",", "retry", "=", "None", ",", "timeout", "=", "None", ",", "metadata", "=", "None", ")", ":", "client", "=", "self", ".", "get_conn", "(", ")", "return", "client", ".", "analyze_entities", "(", "document", "=", "document", ",", "encoding_type", "=", "encoding_type", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
CloudNaturalLanguageHook.annotate_text
A convenience method that provides all the features that analyzeSentiment, analyzeEntities, and analyzeSyntax provide in one call. :param document: Input document. If a dict is provided, it must be of the same form as the protobuf message Document :type document: dict or google.cloud.language_v1.types.Document :param features: The enabled features. If a dict is provided, it must be of the same form as the protobuf message Features :type features: dict or google.cloud.language_v1.enums.Features :param encoding_type: The encoding type used by the API to calculate offsets. :type encoding_type: google.cloud.language_v1.types.EncodingType :param retry: A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :param metadata: Additional metadata that is provided to the method. :type metadata: sequence[tuple[str, str]]] :rtype: google.cloud.language_v1.types.AnnotateTextResponse
airflow/contrib/hooks/gcp_natural_language_hook.py
def annotate_text(self, document, features, encoding_type=None, retry=None, timeout=None, metadata=None): """ A convenience method that provides all the features that analyzeSentiment, analyzeEntities, and analyzeSyntax provide in one call. :param document: Input document. If a dict is provided, it must be of the same form as the protobuf message Document :type document: dict or google.cloud.language_v1.types.Document :param features: The enabled features. If a dict is provided, it must be of the same form as the protobuf message Features :type features: dict or google.cloud.language_v1.enums.Features :param encoding_type: The encoding type used by the API to calculate offsets. :type encoding_type: google.cloud.language_v1.types.EncodingType :param retry: A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :param metadata: Additional metadata that is provided to the method. :type metadata: sequence[tuple[str, str]]] :rtype: google.cloud.language_v1.types.AnnotateTextResponse """ client = self.get_conn() return client.annotate_text( document=document, features=features, encoding_type=encoding_type, retry=retry, timeout=timeout, metadata=metadata, )
def annotate_text(self, document, features, encoding_type=None, retry=None, timeout=None, metadata=None): """ A convenience method that provides all the features that analyzeSentiment, analyzeEntities, and analyzeSyntax provide in one call. :param document: Input document. If a dict is provided, it must be of the same form as the protobuf message Document :type document: dict or google.cloud.language_v1.types.Document :param features: The enabled features. If a dict is provided, it must be of the same form as the protobuf message Features :type features: dict or google.cloud.language_v1.enums.Features :param encoding_type: The encoding type used by the API to calculate offsets. :type encoding_type: google.cloud.language_v1.types.EncodingType :param retry: A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :param metadata: Additional metadata that is provided to the method. :type metadata: sequence[tuple[str, str]]] :rtype: google.cloud.language_v1.types.AnnotateTextResponse """ client = self.get_conn() return client.annotate_text( document=document, features=features, encoding_type=encoding_type, retry=retry, timeout=timeout, metadata=metadata, )
[ "A", "convenience", "method", "that", "provides", "all", "the", "features", "that", "analyzeSentiment", "analyzeEntities", "and", "analyzeSyntax", "provide", "in", "one", "call", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_natural_language_hook.py#L163-L195
[ "def", "annotate_text", "(", "self", ",", "document", ",", "features", ",", "encoding_type", "=", "None", ",", "retry", "=", "None", ",", "timeout", "=", "None", ",", "metadata", "=", "None", ")", ":", "client", "=", "self", ".", "get_conn", "(", ")", "return", "client", ".", "annotate_text", "(", "document", "=", "document", ",", "features", "=", "features", ",", "encoding_type", "=", "encoding_type", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ",", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
CloudNaturalLanguageHook.classify_text
Classifies a document into categories. :param document: Input document. If a dict is provided, it must be of the same form as the protobuf message Document :type document: dict or class google.cloud.language_v1.types.Document :param retry: A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :param metadata: Additional metadata that is provided to the method. :type metadata: sequence[tuple[str, str]]] :rtype: google.cloud.language_v1.types.AnalyzeEntitiesResponse
airflow/contrib/hooks/gcp_natural_language_hook.py
def classify_text(self, document, retry=None, timeout=None, metadata=None): """ Classifies a document into categories. :param document: Input document. If a dict is provided, it must be of the same form as the protobuf message Document :type document: dict or class google.cloud.language_v1.types.Document :param retry: A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :param metadata: Additional metadata that is provided to the method. :type metadata: sequence[tuple[str, str]]] :rtype: google.cloud.language_v1.types.AnalyzeEntitiesResponse """ client = self.get_conn() return client.classify_text(document=document, retry=retry, timeout=timeout, metadata=metadata)
def classify_text(self, document, retry=None, timeout=None, metadata=None): """ Classifies a document into categories. :param document: Input document. If a dict is provided, it must be of the same form as the protobuf message Document :type document: dict or class google.cloud.language_v1.types.Document :param retry: A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :param metadata: Additional metadata that is provided to the method. :type metadata: sequence[tuple[str, str]]] :rtype: google.cloud.language_v1.types.AnalyzeEntitiesResponse """ client = self.get_conn() return client.classify_text(document=document, retry=retry, timeout=timeout, metadata=metadata)
[ "Classifies", "a", "document", "into", "categories", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_natural_language_hook.py#L198-L217
[ "def", "classify_text", "(", "self", ",", "document", ",", "retry", "=", "None", ",", "timeout", "=", "None", ",", "metadata", "=", "None", ")", ":", "client", "=", "self", ".", "get_conn", "(", ")", "return", "client", ".", "classify_text", "(", "document", "=", "document", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
get_task
Return the task object identified by the given dag_id and task_id.
airflow/api/common/experimental/get_task.py
def get_task(dag_id, task_id): """Return the task object identified by the given dag_id and task_id.""" dagbag = DagBag() # Check DAG exists. if dag_id not in dagbag.dags: error_message = "Dag id {} not found".format(dag_id) raise DagNotFound(error_message) # Get DAG object and check Task Exists dag = dagbag.get_dag(dag_id) if not dag.has_task(task_id): error_message = 'Task {} not found in dag {}'.format(task_id, dag_id) raise TaskNotFound(error_message) # Return the task. return dag.get_task(task_id)
def get_task(dag_id, task_id): """Return the task object identified by the given dag_id and task_id.""" dagbag = DagBag() # Check DAG exists. if dag_id not in dagbag.dags: error_message = "Dag id {} not found".format(dag_id) raise DagNotFound(error_message) # Get DAG object and check Task Exists dag = dagbag.get_dag(dag_id) if not dag.has_task(task_id): error_message = 'Task {} not found in dag {}'.format(task_id, dag_id) raise TaskNotFound(error_message) # Return the task. return dag.get_task(task_id)
[ "Return", "the", "task", "object", "identified", "by", "the", "given", "dag_id", "and", "task_id", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/api/common/experimental/get_task.py#L24-L40
[ "def", "get_task", "(", "dag_id", ",", "task_id", ")", ":", "dagbag", "=", "DagBag", "(", ")", "# Check DAG exists.", "if", "dag_id", "not", "in", "dagbag", ".", "dags", ":", "error_message", "=", "\"Dag id {} not found\"", ".", "format", "(", "dag_id", ")", "raise", "DagNotFound", "(", "error_message", ")", "# Get DAG object and check Task Exists", "dag", "=", "dagbag", ".", "get_dag", "(", "dag_id", ")", "if", "not", "dag", ".", "has_task", "(", "task_id", ")", ":", "error_message", "=", "'Task {} not found in dag {}'", ".", "format", "(", "task_id", ",", "dag_id", ")", "raise", "TaskNotFound", "(", "error_message", ")", "# Return the task.", "return", "dag", ".", "get_task", "(", "task_id", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
get_template_field
Gets template fields for specific operator class. :param fullname: Full path to operator class. For example: ``airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetCreateOperator`` :return: List of template field :rtype: list[str]
docs/exts/docroles.py
def get_template_field(env, fullname): """ Gets template fields for specific operator class. :param fullname: Full path to operator class. For example: ``airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetCreateOperator`` :return: List of template field :rtype: list[str] """ modname, classname = fullname.rsplit(".", 1) try: with mock(env.config.autodoc_mock_imports): mod = import_module(modname) except ImportError: raise RoleException("Error loading %s module." % (modname, )) clazz = getattr(mod, classname) if not clazz: raise RoleException("Error finding %s class in %s module." % (classname, modname)) template_fields = getattr(clazz, "template_fields") if not template_fields: raise RoleException( "Could not find the template fields for %s class in %s module." % (classname, modname) ) return list(template_fields)
def get_template_field(env, fullname): """ Gets template fields for specific operator class. :param fullname: Full path to operator class. For example: ``airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetCreateOperator`` :return: List of template field :rtype: list[str] """ modname, classname = fullname.rsplit(".", 1) try: with mock(env.config.autodoc_mock_imports): mod = import_module(modname) except ImportError: raise RoleException("Error loading %s module." % (modname, )) clazz = getattr(mod, classname) if not clazz: raise RoleException("Error finding %s class in %s module." % (classname, modname)) template_fields = getattr(clazz, "template_fields") if not template_fields: raise RoleException( "Could not find the template fields for %s class in %s module." % (classname, modname) ) return list(template_fields)
[ "Gets", "template", "fields", "for", "specific", "operator", "class", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/docs/exts/docroles.py#L27-L55
[ "def", "get_template_field", "(", "env", ",", "fullname", ")", ":", "modname", ",", "classname", "=", "fullname", ".", "rsplit", "(", "\".\"", ",", "1", ")", "try", ":", "with", "mock", "(", "env", ".", "config", ".", "autodoc_mock_imports", ")", ":", "mod", "=", "import_module", "(", "modname", ")", "except", "ImportError", ":", "raise", "RoleException", "(", "\"Error loading %s module.\"", "%", "(", "modname", ",", ")", ")", "clazz", "=", "getattr", "(", "mod", ",", "classname", ")", "if", "not", "clazz", ":", "raise", "RoleException", "(", "\"Error finding %s class in %s module.\"", "%", "(", "classname", ",", "modname", ")", ")", "template_fields", "=", "getattr", "(", "clazz", ",", "\"template_fields\"", ")", "if", "not", "template_fields", ":", "raise", "RoleException", "(", "\"Could not find the template fields for %s class in %s module.\"", "%", "(", "classname", ",", "modname", ")", ")", "return", "list", "(", "template_fields", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
template_field_role
A role that allows you to include a list of template fields in the middle of the text. This is especially useful when writing guides describing how to use the operator. The result is a list of fields where each field is shorted in the literal block. Sample usage:: :template-fields:`airflow.contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeSentimentOperator` For further information look at: * [http://docutils.sourceforge.net/docs/howto/rst-roles.html](Creating reStructuredText Interpreted Text Roles)
docs/exts/docroles.py
def template_field_role(app, typ, rawtext, text, lineno, inliner, options={}, content=[]): """ A role that allows you to include a list of template fields in the middle of the text. This is especially useful when writing guides describing how to use the operator. The result is a list of fields where each field is shorted in the literal block. Sample usage:: :template-fields:`airflow.contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeSentimentOperator` For further information look at: * [http://docutils.sourceforge.net/docs/howto/rst-roles.html](Creating reStructuredText Interpreted Text Roles) """ text = utils.unescape(text) try: template_fields = get_template_field(app.env, text) except RoleException as e: msg = inliner.reporter.error("invalid class name %s \n%s" % (text, e, ), line=lineno) prb = inliner.problematic(rawtext, rawtext, msg) return [prb], [msg] node = nodes.inline(rawtext=rawtext) for i, field in enumerate(template_fields): if i != 0: node += nodes.Text(", ") node += nodes.literal(field, "", nodes.Text(field)) return [node], []
def template_field_role(app, typ, rawtext, text, lineno, inliner, options={}, content=[]): """ A role that allows you to include a list of template fields in the middle of the text. This is especially useful when writing guides describing how to use the operator. The result is a list of fields where each field is shorted in the literal block. Sample usage:: :template-fields:`airflow.contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeSentimentOperator` For further information look at: * [http://docutils.sourceforge.net/docs/howto/rst-roles.html](Creating reStructuredText Interpreted Text Roles) """ text = utils.unescape(text) try: template_fields = get_template_field(app.env, text) except RoleException as e: msg = inliner.reporter.error("invalid class name %s \n%s" % (text, e, ), line=lineno) prb = inliner.problematic(rawtext, rawtext, msg) return [prb], [msg] node = nodes.inline(rawtext=rawtext) for i, field in enumerate(template_fields): if i != 0: node += nodes.Text(", ") node += nodes.literal(field, "", nodes.Text(field)) return [node], []
[ "A", "role", "that", "allows", "you", "to", "include", "a", "list", "of", "template", "fields", "in", "the", "middle", "of", "the", "text", ".", "This", "is", "especially", "useful", "when", "writing", "guides", "describing", "how", "to", "use", "the", "operator", ".", "The", "result", "is", "a", "list", "of", "fields", "where", "each", "field", "is", "shorted", "in", "the", "literal", "block", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/docs/exts/docroles.py#L58-L88
[ "def", "template_field_role", "(", "app", ",", "typ", ",", "rawtext", ",", "text", ",", "lineno", ",", "inliner", ",", "options", "=", "{", "}", ",", "content", "=", "[", "]", ")", ":", "text", "=", "utils", ".", "unescape", "(", "text", ")", "try", ":", "template_fields", "=", "get_template_field", "(", "app", ".", "env", ",", "text", ")", "except", "RoleException", "as", "e", ":", "msg", "=", "inliner", ".", "reporter", ".", "error", "(", "\"invalid class name %s \\n%s\"", "%", "(", "text", ",", "e", ",", ")", ",", "line", "=", "lineno", ")", "prb", "=", "inliner", ".", "problematic", "(", "rawtext", ",", "rawtext", ",", "msg", ")", "return", "[", "prb", "]", ",", "[", "msg", "]", "node", "=", "nodes", ".", "inline", "(", "rawtext", "=", "rawtext", ")", "for", "i", ",", "field", "in", "enumerate", "(", "template_fields", ")", ":", "if", "i", "!=", "0", ":", "node", "+=", "nodes", ".", "Text", "(", "\", \"", ")", "node", "+=", "nodes", ".", "literal", "(", "field", ",", "\"\"", ",", "nodes", ".", "Text", "(", "field", ")", ")", "return", "[", "node", "]", ",", "[", "]" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
dispose_orm
Properly close pooled database connections
airflow/settings.py
def dispose_orm(): """ Properly close pooled database connections """ log.debug("Disposing DB connection pool (PID %s)", os.getpid()) global engine global Session if Session: Session.remove() Session = None if engine: engine.dispose() engine = None
def dispose_orm(): """ Properly close pooled database connections """ log.debug("Disposing DB connection pool (PID %s)", os.getpid()) global engine global Session if Session: Session.remove() Session = None if engine: engine.dispose() engine = None
[ "Properly", "close", "pooled", "database", "connections" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/settings.py#L166-L177
[ "def", "dispose_orm", "(", ")", ":", "log", ".", "debug", "(", "\"Disposing DB connection pool (PID %s)\"", ",", "os", ".", "getpid", "(", ")", ")", "global", "engine", "global", "Session", "if", "Session", ":", "Session", ".", "remove", "(", ")", "Session", "=", "None", "if", "engine", ":", "engine", ".", "dispose", "(", ")", "engine", "=", "None" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
prepare_classpath
Ensures that certain subfolders of AIRFLOW_HOME are on the classpath
airflow/settings.py
def prepare_classpath(): """ Ensures that certain subfolders of AIRFLOW_HOME are on the classpath """ if DAGS_FOLDER not in sys.path: sys.path.append(DAGS_FOLDER) # Add ./config/ for loading custom log parsers etc, or # airflow_local_settings etc. config_path = os.path.join(AIRFLOW_HOME, 'config') if config_path not in sys.path: sys.path.append(config_path) if PLUGINS_FOLDER not in sys.path: sys.path.append(PLUGINS_FOLDER)
def prepare_classpath(): """ Ensures that certain subfolders of AIRFLOW_HOME are on the classpath """ if DAGS_FOLDER not in sys.path: sys.path.append(DAGS_FOLDER) # Add ./config/ for loading custom log parsers etc, or # airflow_local_settings etc. config_path = os.path.join(AIRFLOW_HOME, 'config') if config_path not in sys.path: sys.path.append(config_path) if PLUGINS_FOLDER not in sys.path: sys.path.append(PLUGINS_FOLDER)
[ "Ensures", "that", "certain", "subfolders", "of", "AIRFLOW_HOME", "are", "on", "the", "classpath" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/settings.py#L220-L235
[ "def", "prepare_classpath", "(", ")", ":", "if", "DAGS_FOLDER", "not", "in", "sys", ".", "path", ":", "sys", ".", "path", ".", "append", "(", "DAGS_FOLDER", ")", "# Add ./config/ for loading custom log parsers etc, or", "# airflow_local_settings etc.", "config_path", "=", "os", ".", "path", ".", "join", "(", "AIRFLOW_HOME", ",", "'config'", ")", "if", "config_path", "not", "in", "sys", ".", "path", ":", "sys", ".", "path", ".", "append", "(", "config_path", ")", "if", "PLUGINS_FOLDER", "not", "in", "sys", ".", "path", ":", "sys", ".", "path", ".", "append", "(", "PLUGINS_FOLDER", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
CeleryQueueSensor._check_task_id
Gets the returned Celery result from the Airflow task ID provided to the sensor, and returns True if the celery result has been finished execution. :param context: Airflow's execution context :type context: dict :return: True if task has been executed, otherwise False :rtype: bool
airflow/contrib/sensors/celery_queue_sensor.py
def _check_task_id(self, context): """ Gets the returned Celery result from the Airflow task ID provided to the sensor, and returns True if the celery result has been finished execution. :param context: Airflow's execution context :type context: dict :return: True if task has been executed, otherwise False :rtype: bool """ ti = context['ti'] celery_result = ti.xcom_pull(task_ids=self.target_task_id) return celery_result.ready()
def _check_task_id(self, context): """ Gets the returned Celery result from the Airflow task ID provided to the sensor, and returns True if the celery result has been finished execution. :param context: Airflow's execution context :type context: dict :return: True if task has been executed, otherwise False :rtype: bool """ ti = context['ti'] celery_result = ti.xcom_pull(task_ids=self.target_task_id) return celery_result.ready()
[ "Gets", "the", "returned", "Celery", "result", "from", "the", "Airflow", "task", "ID", "provided", "to", "the", "sensor", "and", "returns", "True", "if", "the", "celery", "result", "has", "been", "finished", "execution", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/sensors/celery_queue_sensor.py#L49-L62
[ "def", "_check_task_id", "(", "self", ",", "context", ")", ":", "ti", "=", "context", "[", "'ti'", "]", "celery_result", "=", "ti", ".", "xcom_pull", "(", "task_ids", "=", "self", ".", "target_task_id", ")", "return", "celery_result", ".", "ready", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
detect_conf_var
Return true if the ticket cache contains "conf" information as is found in ticket caches of Kerberos 1.8.1 or later. This is incompatible with the Sun Java Krb5LoginModule in Java6, so we need to take an action to work around it.
airflow/security/kerberos.py
def detect_conf_var(): """Return true if the ticket cache contains "conf" information as is found in ticket caches of Kerberos 1.8.1 or later. This is incompatible with the Sun Java Krb5LoginModule in Java6, so we need to take an action to work around it. """ ticket_cache = configuration.conf.get('kerberos', 'ccache') with open(ticket_cache, 'rb') as f: # Note: this file is binary, so we check against a bytearray. return b'X-CACHECONF:' in f.read()
def detect_conf_var(): """Return true if the ticket cache contains "conf" information as is found in ticket caches of Kerberos 1.8.1 or later. This is incompatible with the Sun Java Krb5LoginModule in Java6, so we need to take an action to work around it. """ ticket_cache = configuration.conf.get('kerberos', 'ccache') with open(ticket_cache, 'rb') as f: # Note: this file is binary, so we check against a bytearray. return b'X-CACHECONF:' in f.read()
[ "Return", "true", "if", "the", "ticket", "cache", "contains", "conf", "information", "as", "is", "found", "in", "ticket", "caches", "of", "Kerberos", "1", ".", "8", ".", "1", "or", "later", ".", "This", "is", "incompatible", "with", "the", "Sun", "Java", "Krb5LoginModule", "in", "Java6", "so", "we", "need", "to", "take", "an", "action", "to", "work", "around", "it", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/security/kerberos.py#L100-L110
[ "def", "detect_conf_var", "(", ")", ":", "ticket_cache", "=", "configuration", ".", "conf", ".", "get", "(", "'kerberos'", ",", "'ccache'", ")", "with", "open", "(", "ticket_cache", ",", "'rb'", ")", "as", "f", ":", "# Note: this file is binary, so we check against a bytearray.", "return", "b'X-CACHECONF:'", "in", "f", ".", "read", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
alchemy_to_dict
Transforms a SQLAlchemy model instance into a dictionary
airflow/utils/helpers.py
def alchemy_to_dict(obj): """ Transforms a SQLAlchemy model instance into a dictionary """ if not obj: return None d = {} for c in obj.__table__.columns: value = getattr(obj, c.name) if type(value) == datetime: value = value.isoformat() d[c.name] = value return d
def alchemy_to_dict(obj): """ Transforms a SQLAlchemy model instance into a dictionary """ if not obj: return None d = {} for c in obj.__table__.columns: value = getattr(obj, c.name) if type(value) == datetime: value = value.isoformat() d[c.name] = value return d
[ "Transforms", "a", "SQLAlchemy", "model", "instance", "into", "a", "dictionary" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/helpers.py#L60-L72
[ "def", "alchemy_to_dict", "(", "obj", ")", ":", "if", "not", "obj", ":", "return", "None", "d", "=", "{", "}", "for", "c", "in", "obj", ".", "__table__", ".", "columns", ":", "value", "=", "getattr", "(", "obj", ",", "c", ".", "name", ")", "if", "type", "(", "value", ")", "==", "datetime", ":", "value", "=", "value", ".", "isoformat", "(", ")", "d", "[", "c", ".", "name", "]", "=", "value", "return", "d" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
chunks
Yield successive chunks of a given size from a list of items
airflow/utils/helpers.py
def chunks(items, chunk_size): """ Yield successive chunks of a given size from a list of items """ if chunk_size <= 0: raise ValueError('Chunk size must be a positive integer') for i in range(0, len(items), chunk_size): yield items[i:i + chunk_size]
def chunks(items, chunk_size): """ Yield successive chunks of a given size from a list of items """ if chunk_size <= 0: raise ValueError('Chunk size must be a positive integer') for i in range(0, len(items), chunk_size): yield items[i:i + chunk_size]
[ "Yield", "successive", "chunks", "of", "a", "given", "size", "from", "a", "list", "of", "items" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/helpers.py#L121-L128
[ "def", "chunks", "(", "items", ",", "chunk_size", ")", ":", "if", "chunk_size", "<=", "0", ":", "raise", "ValueError", "(", "'Chunk size must be a positive integer'", ")", "for", "i", "in", "range", "(", "0", ",", "len", "(", "items", ")", ",", "chunk_size", ")", ":", "yield", "items", "[", "i", ":", "i", "+", "chunk_size", "]" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
reduce_in_chunks
Reduce the given list of items by splitting it into chunks of the given size and passing each chunk through the reducer
airflow/utils/helpers.py
def reduce_in_chunks(fn, iterable, initializer, chunk_size=0): """ Reduce the given list of items by splitting it into chunks of the given size and passing each chunk through the reducer """ if len(iterable) == 0: return initializer if chunk_size == 0: chunk_size = len(iterable) return reduce(fn, chunks(iterable, chunk_size), initializer)
def reduce_in_chunks(fn, iterable, initializer, chunk_size=0): """ Reduce the given list of items by splitting it into chunks of the given size and passing each chunk through the reducer """ if len(iterable) == 0: return initializer if chunk_size == 0: chunk_size = len(iterable) return reduce(fn, chunks(iterable, chunk_size), initializer)
[ "Reduce", "the", "given", "list", "of", "items", "by", "splitting", "it", "into", "chunks", "of", "the", "given", "size", "and", "passing", "each", "chunk", "through", "the", "reducer" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/helpers.py#L131-L140
[ "def", "reduce_in_chunks", "(", "fn", ",", "iterable", ",", "initializer", ",", "chunk_size", "=", "0", ")", ":", "if", "len", "(", "iterable", ")", "==", "0", ":", "return", "initializer", "if", "chunk_size", "==", "0", ":", "chunk_size", "=", "len", "(", "iterable", ")", "return", "reduce", "(", "fn", ",", "chunks", "(", "iterable", ",", "chunk_size", ")", ",", "initializer", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
chain
Given a number of tasks, builds a dependency chain. chain(task_1, task_2, task_3, task_4) is equivalent to task_1.set_downstream(task_2) task_2.set_downstream(task_3) task_3.set_downstream(task_4)
airflow/utils/helpers.py
def chain(*tasks): """ Given a number of tasks, builds a dependency chain. chain(task_1, task_2, task_3, task_4) is equivalent to task_1.set_downstream(task_2) task_2.set_downstream(task_3) task_3.set_downstream(task_4) """ for up_task, down_task in zip(tasks[:-1], tasks[1:]): up_task.set_downstream(down_task)
def chain(*tasks): """ Given a number of tasks, builds a dependency chain. chain(task_1, task_2, task_3, task_4) is equivalent to task_1.set_downstream(task_2) task_2.set_downstream(task_3) task_3.set_downstream(task_4) """ for up_task, down_task in zip(tasks[:-1], tasks[1:]): up_task.set_downstream(down_task)
[ "Given", "a", "number", "of", "tasks", "builds", "a", "dependency", "chain", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/helpers.py#L153-L166
[ "def", "chain", "(", "*", "tasks", ")", ":", "for", "up_task", ",", "down_task", "in", "zip", "(", "tasks", "[", ":", "-", "1", "]", ",", "tasks", "[", "1", ":", "]", ")", ":", "up_task", ".", "set_downstream", "(", "down_task", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
pprinttable
Returns a pretty ascii table from tuples If namedtuple are used, the table will have headers
airflow/utils/helpers.py
def pprinttable(rows): """Returns a pretty ascii table from tuples If namedtuple are used, the table will have headers """ if not rows: return if hasattr(rows[0], '_fields'): # if namedtuple headers = rows[0]._fields else: headers = ["col{}".format(i) for i in range(len(rows[0]))] lens = [len(s) for s in headers] for row in rows: for i in range(len(rows[0])): slenght = len("{}".format(row[i])) if slenght > lens[i]: lens[i] = slenght formats = [] hformats = [] for i in range(len(rows[0])): if isinstance(rows[0][i], int): formats.append("%%%dd" % lens[i]) else: formats.append("%%-%ds" % lens[i]) hformats.append("%%-%ds" % lens[i]) pattern = " | ".join(formats) hpattern = " | ".join(hformats) separator = "-+-".join(['-' * n for n in lens]) s = "" s += separator + '\n' s += (hpattern % tuple(headers)) + '\n' s += separator + '\n' def f(t): return "{}".format(t) if isinstance(t, basestring) else t for line in rows: s += pattern % tuple(f(t) for t in line) + '\n' s += separator + '\n' return s
def pprinttable(rows): """Returns a pretty ascii table from tuples If namedtuple are used, the table will have headers """ if not rows: return if hasattr(rows[0], '_fields'): # if namedtuple headers = rows[0]._fields else: headers = ["col{}".format(i) for i in range(len(rows[0]))] lens = [len(s) for s in headers] for row in rows: for i in range(len(rows[0])): slenght = len("{}".format(row[i])) if slenght > lens[i]: lens[i] = slenght formats = [] hformats = [] for i in range(len(rows[0])): if isinstance(rows[0][i], int): formats.append("%%%dd" % lens[i]) else: formats.append("%%-%ds" % lens[i]) hformats.append("%%-%ds" % lens[i]) pattern = " | ".join(formats) hpattern = " | ".join(hformats) separator = "-+-".join(['-' * n for n in lens]) s = "" s += separator + '\n' s += (hpattern % tuple(headers)) + '\n' s += separator + '\n' def f(t): return "{}".format(t) if isinstance(t, basestring) else t for line in rows: s += pattern % tuple(f(t) for t in line) + '\n' s += separator + '\n' return s
[ "Returns", "a", "pretty", "ascii", "table", "from", "tuples" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/helpers.py#L200-L240
[ "def", "pprinttable", "(", "rows", ")", ":", "if", "not", "rows", ":", "return", "if", "hasattr", "(", "rows", "[", "0", "]", ",", "'_fields'", ")", ":", "# if namedtuple", "headers", "=", "rows", "[", "0", "]", ".", "_fields", "else", ":", "headers", "=", "[", "\"col{}\"", ".", "format", "(", "i", ")", "for", "i", "in", "range", "(", "len", "(", "rows", "[", "0", "]", ")", ")", "]", "lens", "=", "[", "len", "(", "s", ")", "for", "s", "in", "headers", "]", "for", "row", "in", "rows", ":", "for", "i", "in", "range", "(", "len", "(", "rows", "[", "0", "]", ")", ")", ":", "slenght", "=", "len", "(", "\"{}\"", ".", "format", "(", "row", "[", "i", "]", ")", ")", "if", "slenght", ">", "lens", "[", "i", "]", ":", "lens", "[", "i", "]", "=", "slenght", "formats", "=", "[", "]", "hformats", "=", "[", "]", "for", "i", "in", "range", "(", "len", "(", "rows", "[", "0", "]", ")", ")", ":", "if", "isinstance", "(", "rows", "[", "0", "]", "[", "i", "]", ",", "int", ")", ":", "formats", ".", "append", "(", "\"%%%dd\"", "%", "lens", "[", "i", "]", ")", "else", ":", "formats", ".", "append", "(", "\"%%-%ds\"", "%", "lens", "[", "i", "]", ")", "hformats", ".", "append", "(", "\"%%-%ds\"", "%", "lens", "[", "i", "]", ")", "pattern", "=", "\" | \"", ".", "join", "(", "formats", ")", "hpattern", "=", "\" | \"", ".", "join", "(", "hformats", ")", "separator", "=", "\"-+-\"", ".", "join", "(", "[", "'-'", "*", "n", "for", "n", "in", "lens", "]", ")", "s", "=", "\"\"", "s", "+=", "separator", "+", "'\\n'", "s", "+=", "(", "hpattern", "%", "tuple", "(", "headers", ")", ")", "+", "'\\n'", "s", "+=", "separator", "+", "'\\n'", "def", "f", "(", "t", ")", ":", "return", "\"{}\"", ".", "format", "(", "t", ")", "if", "isinstance", "(", "t", ",", "basestring", ")", "else", "t", "for", "line", "in", "rows", ":", "s", "+=", "pattern", "%", "tuple", "(", "f", "(", "t", ")", "for", "t", "in", "line", ")", "+", "'\\n'", "s", "+=", "separator", "+", "'\\n'", "return", "s" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
reap_process_group
Tries really hard to terminate all children (including grandchildren). Will send sig (SIGTERM) to the process group of pid. If any process is alive after timeout a SIGKILL will be send. :param log: log handler :param pid: pid to kill :param sig: signal type :param timeout: how much time a process has to terminate
airflow/utils/helpers.py
def reap_process_group(pid, log, sig=signal.SIGTERM, timeout=DEFAULT_TIME_TO_WAIT_AFTER_SIGTERM): """ Tries really hard to terminate all children (including grandchildren). Will send sig (SIGTERM) to the process group of pid. If any process is alive after timeout a SIGKILL will be send. :param log: log handler :param pid: pid to kill :param sig: signal type :param timeout: how much time a process has to terminate """ def on_terminate(p): log.info("Process %s (%s) terminated with exit code %s", p, p.pid, p.returncode) if pid == os.getpid(): raise RuntimeError("I refuse to kill myself") parent = psutil.Process(pid) children = parent.children(recursive=True) children.append(parent) try: pg = os.getpgid(pid) except OSError as err: # Skip if not such process - we experience a race and it just terminated if err.errno == errno.ESRCH: return raise log.info("Sending %s to GPID %s", sig, pg) os.killpg(os.getpgid(pid), sig) gone, alive = psutil.wait_procs(children, timeout=timeout, callback=on_terminate) if alive: for p in alive: log.warn("process %s (%s) did not respond to SIGTERM. Trying SIGKILL", p, pid) os.killpg(os.getpgid(pid), signal.SIGKILL) gone, alive = psutil.wait_procs(alive, timeout=timeout, callback=on_terminate) if alive: for p in alive: log.error("Process %s (%s) could not be killed. Giving up.", p, p.pid)
def reap_process_group(pid, log, sig=signal.SIGTERM, timeout=DEFAULT_TIME_TO_WAIT_AFTER_SIGTERM): """ Tries really hard to terminate all children (including grandchildren). Will send sig (SIGTERM) to the process group of pid. If any process is alive after timeout a SIGKILL will be send. :param log: log handler :param pid: pid to kill :param sig: signal type :param timeout: how much time a process has to terminate """ def on_terminate(p): log.info("Process %s (%s) terminated with exit code %s", p, p.pid, p.returncode) if pid == os.getpid(): raise RuntimeError("I refuse to kill myself") parent = psutil.Process(pid) children = parent.children(recursive=True) children.append(parent) try: pg = os.getpgid(pid) except OSError as err: # Skip if not such process - we experience a race and it just terminated if err.errno == errno.ESRCH: return raise log.info("Sending %s to GPID %s", sig, pg) os.killpg(os.getpgid(pid), sig) gone, alive = psutil.wait_procs(children, timeout=timeout, callback=on_terminate) if alive: for p in alive: log.warn("process %s (%s) did not respond to SIGTERM. Trying SIGKILL", p, pid) os.killpg(os.getpgid(pid), signal.SIGKILL) gone, alive = psutil.wait_procs(alive, timeout=timeout, callback=on_terminate) if alive: for p in alive: log.error("Process %s (%s) could not be killed. Giving up.", p, p.pid)
[ "Tries", "really", "hard", "to", "terminate", "all", "children", "(", "including", "grandchildren", ")", ".", "Will", "send", "sig", "(", "SIGTERM", ")", "to", "the", "process", "group", "of", "pid", ".", "If", "any", "process", "is", "alive", "after", "timeout", "a", "SIGKILL", "will", "be", "send", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/helpers.py#L243-L289
[ "def", "reap_process_group", "(", "pid", ",", "log", ",", "sig", "=", "signal", ".", "SIGTERM", ",", "timeout", "=", "DEFAULT_TIME_TO_WAIT_AFTER_SIGTERM", ")", ":", "def", "on_terminate", "(", "p", ")", ":", "log", ".", "info", "(", "\"Process %s (%s) terminated with exit code %s\"", ",", "p", ",", "p", ".", "pid", ",", "p", ".", "returncode", ")", "if", "pid", "==", "os", ".", "getpid", "(", ")", ":", "raise", "RuntimeError", "(", "\"I refuse to kill myself\"", ")", "parent", "=", "psutil", ".", "Process", "(", "pid", ")", "children", "=", "parent", ".", "children", "(", "recursive", "=", "True", ")", "children", ".", "append", "(", "parent", ")", "try", ":", "pg", "=", "os", ".", "getpgid", "(", "pid", ")", "except", "OSError", "as", "err", ":", "# Skip if not such process - we experience a race and it just terminated", "if", "err", ".", "errno", "==", "errno", ".", "ESRCH", ":", "return", "raise", "log", ".", "info", "(", "\"Sending %s to GPID %s\"", ",", "sig", ",", "pg", ")", "os", ".", "killpg", "(", "os", ".", "getpgid", "(", "pid", ")", ",", "sig", ")", "gone", ",", "alive", "=", "psutil", ".", "wait_procs", "(", "children", ",", "timeout", "=", "timeout", ",", "callback", "=", "on_terminate", ")", "if", "alive", ":", "for", "p", "in", "alive", ":", "log", ".", "warn", "(", "\"process %s (%s) did not respond to SIGTERM. Trying SIGKILL\"", ",", "p", ",", "pid", ")", "os", ".", "killpg", "(", "os", ".", "getpgid", "(", "pid", ")", ",", "signal", ".", "SIGKILL", ")", "gone", ",", "alive", "=", "psutil", ".", "wait_procs", "(", "alive", ",", "timeout", "=", "timeout", ",", "callback", "=", "on_terminate", ")", "if", "alive", ":", "for", "p", "in", "alive", ":", "log", ".", "error", "(", "\"Process %s (%s) could not be killed. Giving up.\"", ",", "p", ",", "p", ".", "pid", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
render_log_filename
Given task instance, try_number, filename_template, return the rendered log filename :param ti: task instance :param try_number: try_number of the task :param filename_template: filename template, which can be jinja template or python string template
airflow/utils/helpers.py
def render_log_filename(ti, try_number, filename_template): """ Given task instance, try_number, filename_template, return the rendered log filename :param ti: task instance :param try_number: try_number of the task :param filename_template: filename template, which can be jinja template or python string template """ filename_template, filename_jinja_template = parse_template_string(filename_template) if filename_jinja_template: jinja_context = ti.get_template_context() jinja_context['try_number'] = try_number return filename_jinja_template.render(**jinja_context) return filename_template.format(dag_id=ti.dag_id, task_id=ti.task_id, execution_date=ti.execution_date.isoformat(), try_number=try_number)
def render_log_filename(ti, try_number, filename_template): """ Given task instance, try_number, filename_template, return the rendered log filename :param ti: task instance :param try_number: try_number of the task :param filename_template: filename template, which can be jinja template or python string template """ filename_template, filename_jinja_template = parse_template_string(filename_template) if filename_jinja_template: jinja_context = ti.get_template_context() jinja_context['try_number'] = try_number return filename_jinja_template.render(**jinja_context) return filename_template.format(dag_id=ti.dag_id, task_id=ti.task_id, execution_date=ti.execution_date.isoformat(), try_number=try_number)
[ "Given", "task", "instance", "try_number", "filename_template", "return", "the", "rendered", "log", "filename" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/helpers.py#L299-L318
[ "def", "render_log_filename", "(", "ti", ",", "try_number", ",", "filename_template", ")", ":", "filename_template", ",", "filename_jinja_template", "=", "parse_template_string", "(", "filename_template", ")", "if", "filename_jinja_template", ":", "jinja_context", "=", "ti", ".", "get_template_context", "(", ")", "jinja_context", "[", "'try_number'", "]", "=", "try_number", "return", "filename_jinja_template", ".", "render", "(", "*", "*", "jinja_context", ")", "return", "filename_template", ".", "format", "(", "dag_id", "=", "ti", ".", "dag_id", ",", "task_id", "=", "ti", ".", "task_id", ",", "execution_date", "=", "ti", ".", "execution_date", ".", "isoformat", "(", ")", ",", "try_number", "=", "try_number", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
get_task_instance
Return the task object identified by the given dag_id and task_id.
airflow/api/common/experimental/get_task_instance.py
def get_task_instance(dag_id, task_id, execution_date): """Return the task object identified by the given dag_id and task_id.""" dagbag = DagBag() # Check DAG exists. if dag_id not in dagbag.dags: error_message = "Dag id {} not found".format(dag_id) raise DagNotFound(error_message) # Get DAG object and check Task Exists dag = dagbag.get_dag(dag_id) if not dag.has_task(task_id): error_message = 'Task {} not found in dag {}'.format(task_id, dag_id) raise TaskNotFound(error_message) # Get DagRun object and check that it exists dagrun = dag.get_dagrun(execution_date=execution_date) if not dagrun: error_message = ('Dag Run for date {} not found in dag {}' .format(execution_date, dag_id)) raise DagRunNotFound(error_message) # Get task instance object and check that it exists task_instance = dagrun.get_task_instance(task_id) if not task_instance: error_message = ('Task {} instance for date {} not found' .format(task_id, execution_date)) raise TaskInstanceNotFound(error_message) return task_instance
def get_task_instance(dag_id, task_id, execution_date): """Return the task object identified by the given dag_id and task_id.""" dagbag = DagBag() # Check DAG exists. if dag_id not in dagbag.dags: error_message = "Dag id {} not found".format(dag_id) raise DagNotFound(error_message) # Get DAG object and check Task Exists dag = dagbag.get_dag(dag_id) if not dag.has_task(task_id): error_message = 'Task {} not found in dag {}'.format(task_id, dag_id) raise TaskNotFound(error_message) # Get DagRun object and check that it exists dagrun = dag.get_dagrun(execution_date=execution_date) if not dagrun: error_message = ('Dag Run for date {} not found in dag {}' .format(execution_date, dag_id)) raise DagRunNotFound(error_message) # Get task instance object and check that it exists task_instance = dagrun.get_task_instance(task_id) if not task_instance: error_message = ('Task {} instance for date {} not found' .format(task_id, execution_date)) raise TaskInstanceNotFound(error_message) return task_instance
[ "Return", "the", "task", "object", "identified", "by", "the", "given", "dag_id", "and", "task_id", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/api/common/experimental/get_task_instance.py#L25-L55
[ "def", "get_task_instance", "(", "dag_id", ",", "task_id", ",", "execution_date", ")", ":", "dagbag", "=", "DagBag", "(", ")", "# Check DAG exists.", "if", "dag_id", "not", "in", "dagbag", ".", "dags", ":", "error_message", "=", "\"Dag id {} not found\"", ".", "format", "(", "dag_id", ")", "raise", "DagNotFound", "(", "error_message", ")", "# Get DAG object and check Task Exists", "dag", "=", "dagbag", ".", "get_dag", "(", "dag_id", ")", "if", "not", "dag", ".", "has_task", "(", "task_id", ")", ":", "error_message", "=", "'Task {} not found in dag {}'", ".", "format", "(", "task_id", ",", "dag_id", ")", "raise", "TaskNotFound", "(", "error_message", ")", "# Get DagRun object and check that it exists", "dagrun", "=", "dag", ".", "get_dagrun", "(", "execution_date", "=", "execution_date", ")", "if", "not", "dagrun", ":", "error_message", "=", "(", "'Dag Run for date {} not found in dag {}'", ".", "format", "(", "execution_date", ",", "dag_id", ")", ")", "raise", "DagRunNotFound", "(", "error_message", ")", "# Get task instance object and check that it exists", "task_instance", "=", "dagrun", ".", "get_task_instance", "(", "task_id", ")", "if", "not", "task_instance", ":", "error_message", "=", "(", "'Task {} instance for date {} not found'", ".", "format", "(", "task_id", ",", "execution_date", ")", ")", "raise", "TaskInstanceNotFound", "(", "error_message", ")", "return", "task_instance" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
_integrate_plugins
Integrate plugins to the context
airflow/operators/__init__.py
def _integrate_plugins(): """Integrate plugins to the context""" import sys from airflow.plugins_manager import operators_modules for operators_module in operators_modules: sys.modules[operators_module.__name__] = operators_module globals()[operators_module._name] = operators_module
def _integrate_plugins(): """Integrate plugins to the context""" import sys from airflow.plugins_manager import operators_modules for operators_module in operators_modules: sys.modules[operators_module.__name__] = operators_module globals()[operators_module._name] = operators_module
[ "Integrate", "plugins", "to", "the", "context" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/operators/__init__.py#L21-L27
[ "def", "_integrate_plugins", "(", ")", ":", "import", "sys", "from", "airflow", ".", "plugins_manager", "import", "operators_modules", "for", "operators_module", "in", "operators_modules", ":", "sys", ".", "modules", "[", "operators_module", ".", "__name__", "]", "=", "operators_module", "globals", "(", ")", "[", "operators_module", ".", "_name", "]", "=", "operators_module" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DataProcHook.get_conn
Returns a Google Cloud Dataproc service object.
airflow/contrib/hooks/gcp_dataproc_hook.py
def get_conn(self): """Returns a Google Cloud Dataproc service object.""" http_authorized = self._authorize() return build( 'dataproc', self.api_version, http=http_authorized, cache_discovery=False)
def get_conn(self): """Returns a Google Cloud Dataproc service object.""" http_authorized = self._authorize() return build( 'dataproc', self.api_version, http=http_authorized, cache_discovery=False)
[ "Returns", "a", "Google", "Cloud", "Dataproc", "service", "object", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_dataproc_hook.py#L218-L223
[ "def", "get_conn", "(", "self", ")", ":", "http_authorized", "=", "self", ".", "_authorize", "(", ")", "return", "build", "(", "'dataproc'", ",", "self", ".", "api_version", ",", "http", "=", "http_authorized", ",", "cache_discovery", "=", "False", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DataProcHook.wait
Awaits for Google Cloud Dataproc Operation to complete.
airflow/contrib/hooks/gcp_dataproc_hook.py
def wait(self, operation): """Awaits for Google Cloud Dataproc Operation to complete.""" submitted = _DataProcOperation(self.get_conn(), operation, self.num_retries) submitted.wait_for_done()
def wait(self, operation): """Awaits for Google Cloud Dataproc Operation to complete.""" submitted = _DataProcOperation(self.get_conn(), operation, self.num_retries) submitted.wait_for_done()
[ "Awaits", "for", "Google", "Cloud", "Dataproc", "Operation", "to", "complete", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_dataproc_hook.py#L243-L247
[ "def", "wait", "(", "self", ",", "operation", ")", ":", "submitted", "=", "_DataProcOperation", "(", "self", ".", "get_conn", "(", ")", ",", "operation", ",", "self", ".", "num_retries", ")", "submitted", ".", "wait_for_done", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
_deep_string_coerce
Coerces content or all values of content if it is a dict to a string. The function will throw if content contains non-string or non-numeric types. The reason why we have this function is because the ``self.json`` field must be a dict with only string values. This is because ``render_template`` will fail for numerical values.
airflow/contrib/operators/databricks_operator.py
def _deep_string_coerce(content, json_path='json'): """ Coerces content or all values of content if it is a dict to a string. The function will throw if content contains non-string or non-numeric types. The reason why we have this function is because the ``self.json`` field must be a dict with only string values. This is because ``render_template`` will fail for numerical values. """ c = _deep_string_coerce if isinstance(content, six.string_types): return content elif isinstance(content, six.integer_types + (float,)): # Databricks can tolerate either numeric or string types in the API backend. return str(content) elif isinstance(content, (list, tuple)): return [c(e, '{0}[{1}]'.format(json_path, i)) for i, e in enumerate(content)] elif isinstance(content, dict): return {k: c(v, '{0}[{1}]'.format(json_path, k)) for k, v in list(content.items())} else: param_type = type(content) msg = 'Type {0} used for parameter {1} is not a number or a string' \ .format(param_type, json_path) raise AirflowException(msg)
def _deep_string_coerce(content, json_path='json'): """ Coerces content or all values of content if it is a dict to a string. The function will throw if content contains non-string or non-numeric types. The reason why we have this function is because the ``self.json`` field must be a dict with only string values. This is because ``render_template`` will fail for numerical values. """ c = _deep_string_coerce if isinstance(content, six.string_types): return content elif isinstance(content, six.integer_types + (float,)): # Databricks can tolerate either numeric or string types in the API backend. return str(content) elif isinstance(content, (list, tuple)): return [c(e, '{0}[{1}]'.format(json_path, i)) for i, e in enumerate(content)] elif isinstance(content, dict): return {k: c(v, '{0}[{1}]'.format(json_path, k)) for k, v in list(content.items())} else: param_type = type(content) msg = 'Type {0} used for parameter {1} is not a number or a string' \ .format(param_type, json_path) raise AirflowException(msg)
[ "Coerces", "content", "or", "all", "values", "of", "content", "if", "it", "is", "a", "dict", "to", "a", "string", ".", "The", "function", "will", "throw", "if", "content", "contains", "non", "-", "string", "or", "non", "-", "numeric", "types", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/databricks_operator.py#L34-L58
[ "def", "_deep_string_coerce", "(", "content", ",", "json_path", "=", "'json'", ")", ":", "c", "=", "_deep_string_coerce", "if", "isinstance", "(", "content", ",", "six", ".", "string_types", ")", ":", "return", "content", "elif", "isinstance", "(", "content", ",", "six", ".", "integer_types", "+", "(", "float", ",", ")", ")", ":", "# Databricks can tolerate either numeric or string types in the API backend.", "return", "str", "(", "content", ")", "elif", "isinstance", "(", "content", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "[", "c", "(", "e", ",", "'{0}[{1}]'", ".", "format", "(", "json_path", ",", "i", ")", ")", "for", "i", ",", "e", "in", "enumerate", "(", "content", ")", "]", "elif", "isinstance", "(", "content", ",", "dict", ")", ":", "return", "{", "k", ":", "c", "(", "v", ",", "'{0}[{1}]'", ".", "format", "(", "json_path", ",", "k", ")", ")", "for", "k", ",", "v", "in", "list", "(", "content", ".", "items", "(", ")", ")", "}", "else", ":", "param_type", "=", "type", "(", "content", ")", "msg", "=", "'Type {0} used for parameter {1} is not a number or a string'", ".", "format", "(", "param_type", ",", "json_path", ")", "raise", "AirflowException", "(", "msg", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
_handle_databricks_operator_execution
Handles the Airflow + Databricks lifecycle logic for a Databricks operator :param operator: Databricks operator being handled :param context: Airflow context
airflow/contrib/operators/databricks_operator.py
def _handle_databricks_operator_execution(operator, hook, log, context): """ Handles the Airflow + Databricks lifecycle logic for a Databricks operator :param operator: Databricks operator being handled :param context: Airflow context """ if operator.do_xcom_push: context['ti'].xcom_push(key=XCOM_RUN_ID_KEY, value=operator.run_id) log.info('Run submitted with run_id: %s', operator.run_id) run_page_url = hook.get_run_page_url(operator.run_id) if operator.do_xcom_push: context['ti'].xcom_push(key=XCOM_RUN_PAGE_URL_KEY, value=run_page_url) log.info('View run status, Spark UI, and logs at %s', run_page_url) while True: run_state = hook.get_run_state(operator.run_id) if run_state.is_terminal: if run_state.is_successful: log.info('%s completed successfully.', operator.task_id) log.info('View run status, Spark UI, and logs at %s', run_page_url) return else: error_message = '{t} failed with terminal state: {s}'.format( t=operator.task_id, s=run_state) raise AirflowException(error_message) else: log.info('%s in run state: %s', operator.task_id, run_state) log.info('View run status, Spark UI, and logs at %s', run_page_url) log.info('Sleeping for %s seconds.', operator.polling_period_seconds) time.sleep(operator.polling_period_seconds)
def _handle_databricks_operator_execution(operator, hook, log, context): """ Handles the Airflow + Databricks lifecycle logic for a Databricks operator :param operator: Databricks operator being handled :param context: Airflow context """ if operator.do_xcom_push: context['ti'].xcom_push(key=XCOM_RUN_ID_KEY, value=operator.run_id) log.info('Run submitted with run_id: %s', operator.run_id) run_page_url = hook.get_run_page_url(operator.run_id) if operator.do_xcom_push: context['ti'].xcom_push(key=XCOM_RUN_PAGE_URL_KEY, value=run_page_url) log.info('View run status, Spark UI, and logs at %s', run_page_url) while True: run_state = hook.get_run_state(operator.run_id) if run_state.is_terminal: if run_state.is_successful: log.info('%s completed successfully.', operator.task_id) log.info('View run status, Spark UI, and logs at %s', run_page_url) return else: error_message = '{t} failed with terminal state: {s}'.format( t=operator.task_id, s=run_state) raise AirflowException(error_message) else: log.info('%s in run state: %s', operator.task_id, run_state) log.info('View run status, Spark UI, and logs at %s', run_page_url) log.info('Sleeping for %s seconds.', operator.polling_period_seconds) time.sleep(operator.polling_period_seconds)
[ "Handles", "the", "Airflow", "+", "Databricks", "lifecycle", "logic", "for", "a", "Databricks", "operator" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/databricks_operator.py#L61-L92
[ "def", "_handle_databricks_operator_execution", "(", "operator", ",", "hook", ",", "log", ",", "context", ")", ":", "if", "operator", ".", "do_xcom_push", ":", "context", "[", "'ti'", "]", ".", "xcom_push", "(", "key", "=", "XCOM_RUN_ID_KEY", ",", "value", "=", "operator", ".", "run_id", ")", "log", ".", "info", "(", "'Run submitted with run_id: %s'", ",", "operator", ".", "run_id", ")", "run_page_url", "=", "hook", ".", "get_run_page_url", "(", "operator", ".", "run_id", ")", "if", "operator", ".", "do_xcom_push", ":", "context", "[", "'ti'", "]", ".", "xcom_push", "(", "key", "=", "XCOM_RUN_PAGE_URL_KEY", ",", "value", "=", "run_page_url", ")", "log", ".", "info", "(", "'View run status, Spark UI, and logs at %s'", ",", "run_page_url", ")", "while", "True", ":", "run_state", "=", "hook", ".", "get_run_state", "(", "operator", ".", "run_id", ")", "if", "run_state", ".", "is_terminal", ":", "if", "run_state", ".", "is_successful", ":", "log", ".", "info", "(", "'%s completed successfully.'", ",", "operator", ".", "task_id", ")", "log", ".", "info", "(", "'View run status, Spark UI, and logs at %s'", ",", "run_page_url", ")", "return", "else", ":", "error_message", "=", "'{t} failed with terminal state: {s}'", ".", "format", "(", "t", "=", "operator", ".", "task_id", ",", "s", "=", "run_state", ")", "raise", "AirflowException", "(", "error_message", ")", "else", ":", "log", ".", "info", "(", "'%s in run state: %s'", ",", "operator", ".", "task_id", ",", "run_state", ")", "log", ".", "info", "(", "'View run status, Spark UI, and logs at %s'", ",", "run_page_url", ")", "log", ".", "info", "(", "'Sleeping for %s seconds.'", ",", "operator", ".", "polling_period_seconds", ")", "time", ".", "sleep", "(", "operator", ".", "polling_period_seconds", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PigCliHook.run_cli
Run an pig script using the pig cli >>> ph = PigCliHook() >>> result = ph.run_cli("ls /;") >>> ("hdfs://" in result) True
airflow/hooks/pig_hook.py
def run_cli(self, pig, verbose=True): """ Run an pig script using the pig cli >>> ph = PigCliHook() >>> result = ph.run_cli("ls /;") >>> ("hdfs://" in result) True """ with TemporaryDirectory(prefix='airflow_pigop_') as tmp_dir: with NamedTemporaryFile(dir=tmp_dir) as f: f.write(pig.encode('utf-8')) f.flush() fname = f.name pig_bin = 'pig' cmd_extra = [] pig_cmd = [pig_bin, '-f', fname] + cmd_extra if self.pig_properties: pig_properties_list = self.pig_properties.split() pig_cmd.extend(pig_properties_list) if verbose: self.log.info("%s", " ".join(pig_cmd)) sp = subprocess.Popen( pig_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=tmp_dir, close_fds=True) self.sp = sp stdout = '' for line in iter(sp.stdout.readline, b''): stdout += line.decode('utf-8') if verbose: self.log.info(line.strip()) sp.wait() if sp.returncode: raise AirflowException(stdout) return stdout
def run_cli(self, pig, verbose=True): """ Run an pig script using the pig cli >>> ph = PigCliHook() >>> result = ph.run_cli("ls /;") >>> ("hdfs://" in result) True """ with TemporaryDirectory(prefix='airflow_pigop_') as tmp_dir: with NamedTemporaryFile(dir=tmp_dir) as f: f.write(pig.encode('utf-8')) f.flush() fname = f.name pig_bin = 'pig' cmd_extra = [] pig_cmd = [pig_bin, '-f', fname] + cmd_extra if self.pig_properties: pig_properties_list = self.pig_properties.split() pig_cmd.extend(pig_properties_list) if verbose: self.log.info("%s", " ".join(pig_cmd)) sp = subprocess.Popen( pig_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=tmp_dir, close_fds=True) self.sp = sp stdout = '' for line in iter(sp.stdout.readline, b''): stdout += line.decode('utf-8') if verbose: self.log.info(line.strip()) sp.wait() if sp.returncode: raise AirflowException(stdout) return stdout
[ "Run", "an", "pig", "script", "using", "the", "pig", "cli" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/pig_hook.py#L45-L87
[ "def", "run_cli", "(", "self", ",", "pig", ",", "verbose", "=", "True", ")", ":", "with", "TemporaryDirectory", "(", "prefix", "=", "'airflow_pigop_'", ")", "as", "tmp_dir", ":", "with", "NamedTemporaryFile", "(", "dir", "=", "tmp_dir", ")", "as", "f", ":", "f", ".", "write", "(", "pig", ".", "encode", "(", "'utf-8'", ")", ")", "f", ".", "flush", "(", ")", "fname", "=", "f", ".", "name", "pig_bin", "=", "'pig'", "cmd_extra", "=", "[", "]", "pig_cmd", "=", "[", "pig_bin", ",", "'-f'", ",", "fname", "]", "+", "cmd_extra", "if", "self", ".", "pig_properties", ":", "pig_properties_list", "=", "self", ".", "pig_properties", ".", "split", "(", ")", "pig_cmd", ".", "extend", "(", "pig_properties_list", ")", "if", "verbose", ":", "self", ".", "log", ".", "info", "(", "\"%s\"", ",", "\" \"", ".", "join", "(", "pig_cmd", ")", ")", "sp", "=", "subprocess", ".", "Popen", "(", "pig_cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "STDOUT", ",", "cwd", "=", "tmp_dir", ",", "close_fds", "=", "True", ")", "self", ".", "sp", "=", "sp", "stdout", "=", "''", "for", "line", "in", "iter", "(", "sp", ".", "stdout", ".", "readline", ",", "b''", ")", ":", "stdout", "+=", "line", ".", "decode", "(", "'utf-8'", ")", "if", "verbose", ":", "self", ".", "log", ".", "info", "(", "line", ".", "strip", "(", ")", ")", "sp", ".", "wait", "(", ")", "if", "sp", ".", "returncode", ":", "raise", "AirflowException", "(", "stdout", ")", "return", "stdout" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
fetch_celery_task_state
Fetch and return the state of the given celery task. The scope of this function is global so that it can be called by subprocesses in the pool. :param celery_task: a tuple of the Celery task key and the async Celery object used to fetch the task's state :type celery_task: tuple(str, celery.result.AsyncResult) :return: a tuple of the Celery task key and the Celery state of the task :rtype: tuple[str, str]
airflow/executors/celery_executor.py
def fetch_celery_task_state(celery_task): """ Fetch and return the state of the given celery task. The scope of this function is global so that it can be called by subprocesses in the pool. :param celery_task: a tuple of the Celery task key and the async Celery object used to fetch the task's state :type celery_task: tuple(str, celery.result.AsyncResult) :return: a tuple of the Celery task key and the Celery state of the task :rtype: tuple[str, str] """ try: with timeout(seconds=2): # Accessing state property of celery task will make actual network request # to get the current state of the task. res = (celery_task[0], celery_task[1].state) except Exception as e: exception_traceback = "Celery Task ID: {}\n{}".format(celery_task[0], traceback.format_exc()) res = ExceptionWithTraceback(e, exception_traceback) return res
def fetch_celery_task_state(celery_task): """ Fetch and return the state of the given celery task. The scope of this function is global so that it can be called by subprocesses in the pool. :param celery_task: a tuple of the Celery task key and the async Celery object used to fetch the task's state :type celery_task: tuple(str, celery.result.AsyncResult) :return: a tuple of the Celery task key and the Celery state of the task :rtype: tuple[str, str] """ try: with timeout(seconds=2): # Accessing state property of celery task will make actual network request # to get the current state of the task. res = (celery_task[0], celery_task[1].state) except Exception as e: exception_traceback = "Celery Task ID: {}\n{}".format(celery_task[0], traceback.format_exc()) res = ExceptionWithTraceback(e, exception_traceback) return res
[ "Fetch", "and", "return", "the", "state", "of", "the", "given", "celery", "task", ".", "The", "scope", "of", "this", "function", "is", "global", "so", "that", "it", "can", "be", "called", "by", "subprocesses", "in", "the", "pool", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/executors/celery_executor.py#L90-L111
[ "def", "fetch_celery_task_state", "(", "celery_task", ")", ":", "try", ":", "with", "timeout", "(", "seconds", "=", "2", ")", ":", "# Accessing state property of celery task will make actual network request", "# to get the current state of the task.", "res", "=", "(", "celery_task", "[", "0", "]", ",", "celery_task", "[", "1", "]", ".", "state", ")", "except", "Exception", "as", "e", ":", "exception_traceback", "=", "\"Celery Task ID: {}\\n{}\"", ".", "format", "(", "celery_task", "[", "0", "]", ",", "traceback", ".", "format_exc", "(", ")", ")", "res", "=", "ExceptionWithTraceback", "(", "e", ",", "exception_traceback", ")", "return", "res" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
CeleryExecutor._num_tasks_per_send_process
How many Celery tasks should each worker process send. :return: Number of tasks that should be sent per process :rtype: int
airflow/executors/celery_executor.py
def _num_tasks_per_send_process(self, to_send_count): """ How many Celery tasks should each worker process send. :return: Number of tasks that should be sent per process :rtype: int """ return max(1, int(math.ceil(1.0 * to_send_count / self._sync_parallelism)))
def _num_tasks_per_send_process(self, to_send_count): """ How many Celery tasks should each worker process send. :return: Number of tasks that should be sent per process :rtype: int """ return max(1, int(math.ceil(1.0 * to_send_count / self._sync_parallelism)))
[ "How", "many", "Celery", "tasks", "should", "each", "worker", "process", "send", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/executors/celery_executor.py#L158-L166
[ "def", "_num_tasks_per_send_process", "(", "self", ",", "to_send_count", ")", ":", "return", "max", "(", "1", ",", "int", "(", "math", ".", "ceil", "(", "1.0", "*", "to_send_count", "/", "self", ".", "_sync_parallelism", ")", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
CeleryExecutor._num_tasks_per_fetch_process
How many Celery tasks should be sent to each worker process. :return: Number of tasks that should be used per process :rtype: int
airflow/executors/celery_executor.py
def _num_tasks_per_fetch_process(self): """ How many Celery tasks should be sent to each worker process. :return: Number of tasks that should be used per process :rtype: int """ return max(1, int(math.ceil(1.0 * len(self.tasks) / self._sync_parallelism)))
def _num_tasks_per_fetch_process(self): """ How many Celery tasks should be sent to each worker process. :return: Number of tasks that should be used per process :rtype: int """ return max(1, int(math.ceil(1.0 * len(self.tasks) / self._sync_parallelism)))
[ "How", "many", "Celery", "tasks", "should", "be", "sent", "to", "each", "worker", "process", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/executors/celery_executor.py#L168-L176
[ "def", "_num_tasks_per_fetch_process", "(", "self", ")", ":", "return", "max", "(", "1", ",", "int", "(", "math", ".", "ceil", "(", "1.0", "*", "len", "(", "self", ".", "tasks", ")", "/", "self", ".", "_sync_parallelism", ")", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
Variable.setdefault
Like a Python builtin dict object, setdefault returns the current value for a key, and if it isn't there, stores the default value and returns it. :param key: Dict key for this Variable :type key: str :param default: Default value to set and return if the variable isn't already in the DB :type default: Mixed :param deserialize_json: Store this as a JSON encoded value in the DB and un-encode it when retrieving a value :return: Mixed
airflow/models/variable.py
def setdefault(cls, key, default, deserialize_json=False): """ Like a Python builtin dict object, setdefault returns the current value for a key, and if it isn't there, stores the default value and returns it. :param key: Dict key for this Variable :type key: str :param default: Default value to set and return if the variable isn't already in the DB :type default: Mixed :param deserialize_json: Store this as a JSON encoded value in the DB and un-encode it when retrieving a value :return: Mixed """ obj = Variable.get(key, default_var=None, deserialize_json=deserialize_json) if obj is None: if default is not None: Variable.set(key, default, serialize_json=deserialize_json) return default else: raise ValueError('Default Value must be set') else: return obj
def setdefault(cls, key, default, deserialize_json=False): """ Like a Python builtin dict object, setdefault returns the current value for a key, and if it isn't there, stores the default value and returns it. :param key: Dict key for this Variable :type key: str :param default: Default value to set and return if the variable isn't already in the DB :type default: Mixed :param deserialize_json: Store this as a JSON encoded value in the DB and un-encode it when retrieving a value :return: Mixed """ obj = Variable.get(key, default_var=None, deserialize_json=deserialize_json) if obj is None: if default is not None: Variable.set(key, default, serialize_json=deserialize_json) return default else: raise ValueError('Default Value must be set') else: return obj
[ "Like", "a", "Python", "builtin", "dict", "object", "setdefault", "returns", "the", "current", "value", "for", "a", "key", "and", "if", "it", "isn", "t", "there", "stores", "the", "default", "value", "and", "returns", "it", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/variable.py#L76-L99
[ "def", "setdefault", "(", "cls", ",", "key", ",", "default", ",", "deserialize_json", "=", "False", ")", ":", "obj", "=", "Variable", ".", "get", "(", "key", ",", "default_var", "=", "None", ",", "deserialize_json", "=", "deserialize_json", ")", "if", "obj", "is", "None", ":", "if", "default", "is", "not", "None", ":", "Variable", ".", "set", "(", "key", ",", "default", ",", "serialize_json", "=", "deserialize_json", ")", "return", "default", "else", ":", "raise", "ValueError", "(", "'Default Value must be set'", ")", "else", ":", "return", "obj" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
MLEngineHook.get_conn
Returns a Google MLEngine service object.
airflow/contrib/hooks/gcp_mlengine_hook.py
def get_conn(self): """ Returns a Google MLEngine service object. """ authed_http = self._authorize() return build('ml', 'v1', http=authed_http, cache_discovery=False)
def get_conn(self): """ Returns a Google MLEngine service object. """ authed_http = self._authorize() return build('ml', 'v1', http=authed_http, cache_discovery=False)
[ "Returns", "a", "Google", "MLEngine", "service", "object", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_mlengine_hook.py#L53-L58
[ "def", "get_conn", "(", "self", ")", ":", "authed_http", "=", "self", ".", "_authorize", "(", ")", "return", "build", "(", "'ml'", ",", "'v1'", ",", "http", "=", "authed_http", ",", "cache_discovery", "=", "False", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
MLEngineHook.create_job
Launches a MLEngine job and wait for it to reach a terminal state. :param project_id: The Google Cloud project id within which MLEngine job will be launched. :type project_id: str :param job: MLEngine Job object that should be provided to the MLEngine API, such as: :: { 'jobId': 'my_job_id', 'trainingInput': { 'scaleTier': 'STANDARD_1', ... } } :type job: dict :param use_existing_job_fn: In case that a MLEngine job with the same job_id already exist, this method (if provided) will decide whether we should use this existing job, continue waiting for it to finish and returning the job object. It should accepts a MLEngine job object, and returns a boolean value indicating whether it is OK to reuse the existing job. If 'use_existing_job_fn' is not provided, we by default reuse the existing MLEngine job. :type use_existing_job_fn: function :return: The MLEngine job object if the job successfully reach a terminal state (which might be FAILED or CANCELLED state). :rtype: dict
airflow/contrib/hooks/gcp_mlengine_hook.py
def create_job(self, project_id, job, use_existing_job_fn=None): """ Launches a MLEngine job and wait for it to reach a terminal state. :param project_id: The Google Cloud project id within which MLEngine job will be launched. :type project_id: str :param job: MLEngine Job object that should be provided to the MLEngine API, such as: :: { 'jobId': 'my_job_id', 'trainingInput': { 'scaleTier': 'STANDARD_1', ... } } :type job: dict :param use_existing_job_fn: In case that a MLEngine job with the same job_id already exist, this method (if provided) will decide whether we should use this existing job, continue waiting for it to finish and returning the job object. It should accepts a MLEngine job object, and returns a boolean value indicating whether it is OK to reuse the existing job. If 'use_existing_job_fn' is not provided, we by default reuse the existing MLEngine job. :type use_existing_job_fn: function :return: The MLEngine job object if the job successfully reach a terminal state (which might be FAILED or CANCELLED state). :rtype: dict """ request = self._mlengine.projects().jobs().create( parent='projects/{}'.format(project_id), body=job) job_id = job['jobId'] try: request.execute() except HttpError as e: # 409 means there is an existing job with the same job ID. if e.resp.status == 409: if use_existing_job_fn is not None: existing_job = self._get_job(project_id, job_id) if not use_existing_job_fn(existing_job): self.log.error( 'Job with job_id %s already exist, but it does ' 'not match our expectation: %s', job_id, existing_job ) raise self.log.info( 'Job with job_id %s already exist. Will waiting for it to finish', job_id ) else: self.log.error('Failed to create MLEngine job: {}'.format(e)) raise return self._wait_for_job_done(project_id, job_id)
def create_job(self, project_id, job, use_existing_job_fn=None): """ Launches a MLEngine job and wait for it to reach a terminal state. :param project_id: The Google Cloud project id within which MLEngine job will be launched. :type project_id: str :param job: MLEngine Job object that should be provided to the MLEngine API, such as: :: { 'jobId': 'my_job_id', 'trainingInput': { 'scaleTier': 'STANDARD_1', ... } } :type job: dict :param use_existing_job_fn: In case that a MLEngine job with the same job_id already exist, this method (if provided) will decide whether we should use this existing job, continue waiting for it to finish and returning the job object. It should accepts a MLEngine job object, and returns a boolean value indicating whether it is OK to reuse the existing job. If 'use_existing_job_fn' is not provided, we by default reuse the existing MLEngine job. :type use_existing_job_fn: function :return: The MLEngine job object if the job successfully reach a terminal state (which might be FAILED or CANCELLED state). :rtype: dict """ request = self._mlengine.projects().jobs().create( parent='projects/{}'.format(project_id), body=job) job_id = job['jobId'] try: request.execute() except HttpError as e: # 409 means there is an existing job with the same job ID. if e.resp.status == 409: if use_existing_job_fn is not None: existing_job = self._get_job(project_id, job_id) if not use_existing_job_fn(existing_job): self.log.error( 'Job with job_id %s already exist, but it does ' 'not match our expectation: %s', job_id, existing_job ) raise self.log.info( 'Job with job_id %s already exist. Will waiting for it to finish', job_id ) else: self.log.error('Failed to create MLEngine job: {}'.format(e)) raise return self._wait_for_job_done(project_id, job_id)
[ "Launches", "a", "MLEngine", "job", "and", "wait", "for", "it", "to", "reach", "a", "terminal", "state", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_mlengine_hook.py#L60-L121
[ "def", "create_job", "(", "self", ",", "project_id", ",", "job", ",", "use_existing_job_fn", "=", "None", ")", ":", "request", "=", "self", ".", "_mlengine", ".", "projects", "(", ")", ".", "jobs", "(", ")", ".", "create", "(", "parent", "=", "'projects/{}'", ".", "format", "(", "project_id", ")", ",", "body", "=", "job", ")", "job_id", "=", "job", "[", "'jobId'", "]", "try", ":", "request", ".", "execute", "(", ")", "except", "HttpError", "as", "e", ":", "# 409 means there is an existing job with the same job ID.", "if", "e", ".", "resp", ".", "status", "==", "409", ":", "if", "use_existing_job_fn", "is", "not", "None", ":", "existing_job", "=", "self", ".", "_get_job", "(", "project_id", ",", "job_id", ")", "if", "not", "use_existing_job_fn", "(", "existing_job", ")", ":", "self", ".", "log", ".", "error", "(", "'Job with job_id %s already exist, but it does '", "'not match our expectation: %s'", ",", "job_id", ",", "existing_job", ")", "raise", "self", ".", "log", ".", "info", "(", "'Job with job_id %s already exist. Will waiting for it to finish'", ",", "job_id", ")", "else", ":", "self", ".", "log", ".", "error", "(", "'Failed to create MLEngine job: {}'", ".", "format", "(", "e", ")", ")", "raise", "return", "self", ".", "_wait_for_job_done", "(", "project_id", ",", "job_id", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
MLEngineHook._get_job
Gets a MLEngine job based on the job name. :return: MLEngine job object if succeed. :rtype: dict Raises: googleapiclient.errors.HttpError: if HTTP error is returned from server
airflow/contrib/hooks/gcp_mlengine_hook.py
def _get_job(self, project_id, job_id): """ Gets a MLEngine job based on the job name. :return: MLEngine job object if succeed. :rtype: dict Raises: googleapiclient.errors.HttpError: if HTTP error is returned from server """ job_name = 'projects/{}/jobs/{}'.format(project_id, job_id) request = self._mlengine.projects().jobs().get(name=job_name) while True: try: return request.execute() except HttpError as e: if e.resp.status == 429: # polling after 30 seconds when quota failure occurs time.sleep(30) else: self.log.error('Failed to get MLEngine job: {}'.format(e)) raise
def _get_job(self, project_id, job_id): """ Gets a MLEngine job based on the job name. :return: MLEngine job object if succeed. :rtype: dict Raises: googleapiclient.errors.HttpError: if HTTP error is returned from server """ job_name = 'projects/{}/jobs/{}'.format(project_id, job_id) request = self._mlengine.projects().jobs().get(name=job_name) while True: try: return request.execute() except HttpError as e: if e.resp.status == 429: # polling after 30 seconds when quota failure occurs time.sleep(30) else: self.log.error('Failed to get MLEngine job: {}'.format(e)) raise
[ "Gets", "a", "MLEngine", "job", "based", "on", "the", "job", "name", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_mlengine_hook.py#L123-L144
[ "def", "_get_job", "(", "self", ",", "project_id", ",", "job_id", ")", ":", "job_name", "=", "'projects/{}/jobs/{}'", ".", "format", "(", "project_id", ",", "job_id", ")", "request", "=", "self", ".", "_mlengine", ".", "projects", "(", ")", ".", "jobs", "(", ")", ".", "get", "(", "name", "=", "job_name", ")", "while", "True", ":", "try", ":", "return", "request", ".", "execute", "(", ")", "except", "HttpError", "as", "e", ":", "if", "e", ".", "resp", ".", "status", "==", "429", ":", "# polling after 30 seconds when quota failure occurs", "time", ".", "sleep", "(", "30", ")", "else", ":", "self", ".", "log", ".", "error", "(", "'Failed to get MLEngine job: {}'", ".", "format", "(", "e", ")", ")", "raise" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
MLEngineHook._wait_for_job_done
Waits for the Job to reach a terminal state. This method will periodically check the job state until the job reach a terminal state. Raises: googleapiclient.errors.HttpError: if HTTP error is returned when getting the job
airflow/contrib/hooks/gcp_mlengine_hook.py
def _wait_for_job_done(self, project_id, job_id, interval=30): """ Waits for the Job to reach a terminal state. This method will periodically check the job state until the job reach a terminal state. Raises: googleapiclient.errors.HttpError: if HTTP error is returned when getting the job """ if interval <= 0: raise ValueError("Interval must be > 0") while True: job = self._get_job(project_id, job_id) if job['state'] in ['SUCCEEDED', 'FAILED', 'CANCELLED']: return job time.sleep(interval)
def _wait_for_job_done(self, project_id, job_id, interval=30): """ Waits for the Job to reach a terminal state. This method will periodically check the job state until the job reach a terminal state. Raises: googleapiclient.errors.HttpError: if HTTP error is returned when getting the job """ if interval <= 0: raise ValueError("Interval must be > 0") while True: job = self._get_job(project_id, job_id) if job['state'] in ['SUCCEEDED', 'FAILED', 'CANCELLED']: return job time.sleep(interval)
[ "Waits", "for", "the", "Job", "to", "reach", "a", "terminal", "state", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_mlengine_hook.py#L146-L163
[ "def", "_wait_for_job_done", "(", "self", ",", "project_id", ",", "job_id", ",", "interval", "=", "30", ")", ":", "if", "interval", "<=", "0", ":", "raise", "ValueError", "(", "\"Interval must be > 0\"", ")", "while", "True", ":", "job", "=", "self", ".", "_get_job", "(", "project_id", ",", "job_id", ")", "if", "job", "[", "'state'", "]", "in", "[", "'SUCCEEDED'", ",", "'FAILED'", ",", "'CANCELLED'", "]", ":", "return", "job", "time", ".", "sleep", "(", "interval", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
MLEngineHook.create_version
Creates the Version on Google Cloud ML Engine. Returns the operation if the version was created successfully and raises an error otherwise.
airflow/contrib/hooks/gcp_mlengine_hook.py
def create_version(self, project_id, model_name, version_spec): """ Creates the Version on Google Cloud ML Engine. Returns the operation if the version was created successfully and raises an error otherwise. """ parent_name = 'projects/{}/models/{}'.format(project_id, model_name) create_request = self._mlengine.projects().models().versions().create( parent=parent_name, body=version_spec) response = create_request.execute() get_request = self._mlengine.projects().operations().get( name=response['name']) return _poll_with_exponential_delay( request=get_request, max_n=9, is_done_func=lambda resp: resp.get('done', False), is_error_func=lambda resp: resp.get('error', None) is not None)
def create_version(self, project_id, model_name, version_spec): """ Creates the Version on Google Cloud ML Engine. Returns the operation if the version was created successfully and raises an error otherwise. """ parent_name = 'projects/{}/models/{}'.format(project_id, model_name) create_request = self._mlengine.projects().models().versions().create( parent=parent_name, body=version_spec) response = create_request.execute() get_request = self._mlengine.projects().operations().get( name=response['name']) return _poll_with_exponential_delay( request=get_request, max_n=9, is_done_func=lambda resp: resp.get('done', False), is_error_func=lambda resp: resp.get('error', None) is not None)
[ "Creates", "the", "Version", "on", "Google", "Cloud", "ML", "Engine", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_mlengine_hook.py#L165-L183
[ "def", "create_version", "(", "self", ",", "project_id", ",", "model_name", ",", "version_spec", ")", ":", "parent_name", "=", "'projects/{}/models/{}'", ".", "format", "(", "project_id", ",", "model_name", ")", "create_request", "=", "self", ".", "_mlengine", ".", "projects", "(", ")", ".", "models", "(", ")", ".", "versions", "(", ")", ".", "create", "(", "parent", "=", "parent_name", ",", "body", "=", "version_spec", ")", "response", "=", "create_request", ".", "execute", "(", ")", "get_request", "=", "self", ".", "_mlengine", ".", "projects", "(", ")", ".", "operations", "(", ")", ".", "get", "(", "name", "=", "response", "[", "'name'", "]", ")", "return", "_poll_with_exponential_delay", "(", "request", "=", "get_request", ",", "max_n", "=", "9", ",", "is_done_func", "=", "lambda", "resp", ":", "resp", ".", "get", "(", "'done'", ",", "False", ")", ",", "is_error_func", "=", "lambda", "resp", ":", "resp", ".", "get", "(", "'error'", ",", "None", ")", "is", "not", "None", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
MLEngineHook.set_default_version
Sets a version to be the default. Blocks until finished.
airflow/contrib/hooks/gcp_mlengine_hook.py
def set_default_version(self, project_id, model_name, version_name): """ Sets a version to be the default. Blocks until finished. """ full_version_name = 'projects/{}/models/{}/versions/{}'.format( project_id, model_name, version_name) request = self._mlengine.projects().models().versions().setDefault( name=full_version_name, body={}) try: response = request.execute() self.log.info('Successfully set version: %s to default', response) return response except HttpError as e: self.log.error('Something went wrong: %s', e) raise
def set_default_version(self, project_id, model_name, version_name): """ Sets a version to be the default. Blocks until finished. """ full_version_name = 'projects/{}/models/{}/versions/{}'.format( project_id, model_name, version_name) request = self._mlengine.projects().models().versions().setDefault( name=full_version_name, body={}) try: response = request.execute() self.log.info('Successfully set version: %s to default', response) return response except HttpError as e: self.log.error('Something went wrong: %s', e) raise
[ "Sets", "a", "version", "to", "be", "the", "default", ".", "Blocks", "until", "finished", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_mlengine_hook.py#L185-L200
[ "def", "set_default_version", "(", "self", ",", "project_id", ",", "model_name", ",", "version_name", ")", ":", "full_version_name", "=", "'projects/{}/models/{}/versions/{}'", ".", "format", "(", "project_id", ",", "model_name", ",", "version_name", ")", "request", "=", "self", ".", "_mlengine", ".", "projects", "(", ")", ".", "models", "(", ")", ".", "versions", "(", ")", ".", "setDefault", "(", "name", "=", "full_version_name", ",", "body", "=", "{", "}", ")", "try", ":", "response", "=", "request", ".", "execute", "(", ")", "self", ".", "log", ".", "info", "(", "'Successfully set version: %s to default'", ",", "response", ")", "return", "response", "except", "HttpError", "as", "e", ":", "self", ".", "log", ".", "error", "(", "'Something went wrong: %s'", ",", "e", ")", "raise" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
MLEngineHook.list_versions
Lists all available versions of a model. Blocks until finished.
airflow/contrib/hooks/gcp_mlengine_hook.py
def list_versions(self, project_id, model_name): """ Lists all available versions of a model. Blocks until finished. """ result = [] full_parent_name = 'projects/{}/models/{}'.format( project_id, model_name) request = self._mlengine.projects().models().versions().list( parent=full_parent_name, pageSize=100) response = request.execute() next_page_token = response.get('nextPageToken', None) result.extend(response.get('versions', [])) while next_page_token is not None: next_request = self._mlengine.projects().models().versions().list( parent=full_parent_name, pageToken=next_page_token, pageSize=100) response = next_request.execute() next_page_token = response.get('nextPageToken', None) result.extend(response.get('versions', [])) time.sleep(5) return result
def list_versions(self, project_id, model_name): """ Lists all available versions of a model. Blocks until finished. """ result = [] full_parent_name = 'projects/{}/models/{}'.format( project_id, model_name) request = self._mlengine.projects().models().versions().list( parent=full_parent_name, pageSize=100) response = request.execute() next_page_token = response.get('nextPageToken', None) result.extend(response.get('versions', [])) while next_page_token is not None: next_request = self._mlengine.projects().models().versions().list( parent=full_parent_name, pageToken=next_page_token, pageSize=100) response = next_request.execute() next_page_token = response.get('nextPageToken', None) result.extend(response.get('versions', [])) time.sleep(5) return result
[ "Lists", "all", "available", "versions", "of", "a", "model", ".", "Blocks", "until", "finished", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_mlengine_hook.py#L202-L224
[ "def", "list_versions", "(", "self", ",", "project_id", ",", "model_name", ")", ":", "result", "=", "[", "]", "full_parent_name", "=", "'projects/{}/models/{}'", ".", "format", "(", "project_id", ",", "model_name", ")", "request", "=", "self", ".", "_mlengine", ".", "projects", "(", ")", ".", "models", "(", ")", ".", "versions", "(", ")", ".", "list", "(", "parent", "=", "full_parent_name", ",", "pageSize", "=", "100", ")", "response", "=", "request", ".", "execute", "(", ")", "next_page_token", "=", "response", ".", "get", "(", "'nextPageToken'", ",", "None", ")", "result", ".", "extend", "(", "response", ".", "get", "(", "'versions'", ",", "[", "]", ")", ")", "while", "next_page_token", "is", "not", "None", ":", "next_request", "=", "self", ".", "_mlengine", ".", "projects", "(", ")", ".", "models", "(", ")", ".", "versions", "(", ")", ".", "list", "(", "parent", "=", "full_parent_name", ",", "pageToken", "=", "next_page_token", ",", "pageSize", "=", "100", ")", "response", "=", "next_request", ".", "execute", "(", ")", "next_page_token", "=", "response", ".", "get", "(", "'nextPageToken'", ",", "None", ")", "result", ".", "extend", "(", "response", ".", "get", "(", "'versions'", ",", "[", "]", ")", ")", "time", ".", "sleep", "(", "5", ")", "return", "result" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
MLEngineHook.delete_version
Deletes the given version of a model. Blocks until finished.
airflow/contrib/hooks/gcp_mlengine_hook.py
def delete_version(self, project_id, model_name, version_name): """ Deletes the given version of a model. Blocks until finished. """ full_name = 'projects/{}/models/{}/versions/{}'.format( project_id, model_name, version_name) delete_request = self._mlengine.projects().models().versions().delete( name=full_name) response = delete_request.execute() get_request = self._mlengine.projects().operations().get( name=response['name']) return _poll_with_exponential_delay( request=get_request, max_n=9, is_done_func=lambda resp: resp.get('done', False), is_error_func=lambda resp: resp.get('error', None) is not None)
def delete_version(self, project_id, model_name, version_name): """ Deletes the given version of a model. Blocks until finished. """ full_name = 'projects/{}/models/{}/versions/{}'.format( project_id, model_name, version_name) delete_request = self._mlengine.projects().models().versions().delete( name=full_name) response = delete_request.execute() get_request = self._mlengine.projects().operations().get( name=response['name']) return _poll_with_exponential_delay( request=get_request, max_n=9, is_done_func=lambda resp: resp.get('done', False), is_error_func=lambda resp: resp.get('error', None) is not None)
[ "Deletes", "the", "given", "version", "of", "a", "model", ".", "Blocks", "until", "finished", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_mlengine_hook.py#L226-L242
[ "def", "delete_version", "(", "self", ",", "project_id", ",", "model_name", ",", "version_name", ")", ":", "full_name", "=", "'projects/{}/models/{}/versions/{}'", ".", "format", "(", "project_id", ",", "model_name", ",", "version_name", ")", "delete_request", "=", "self", ".", "_mlengine", ".", "projects", "(", ")", ".", "models", "(", ")", ".", "versions", "(", ")", ".", "delete", "(", "name", "=", "full_name", ")", "response", "=", "delete_request", ".", "execute", "(", ")", "get_request", "=", "self", ".", "_mlengine", ".", "projects", "(", ")", ".", "operations", "(", ")", ".", "get", "(", "name", "=", "response", "[", "'name'", "]", ")", "return", "_poll_with_exponential_delay", "(", "request", "=", "get_request", ",", "max_n", "=", "9", ",", "is_done_func", "=", "lambda", "resp", ":", "resp", ".", "get", "(", "'done'", ",", "False", ")", ",", "is_error_func", "=", "lambda", "resp", ":", "resp", ".", "get", "(", "'error'", ",", "None", ")", "is", "not", "None", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
MLEngineHook.create_model
Create a Model. Blocks until finished.
airflow/contrib/hooks/gcp_mlengine_hook.py
def create_model(self, project_id, model): """ Create a Model. Blocks until finished. """ if not model['name']: raise ValueError("Model name must be provided and " "could not be an empty string") project = 'projects/{}'.format(project_id) request = self._mlengine.projects().models().create( parent=project, body=model) return request.execute()
def create_model(self, project_id, model): """ Create a Model. Blocks until finished. """ if not model['name']: raise ValueError("Model name must be provided and " "could not be an empty string") project = 'projects/{}'.format(project_id) request = self._mlengine.projects().models().create( parent=project, body=model) return request.execute()
[ "Create", "a", "Model", ".", "Blocks", "until", "finished", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_mlengine_hook.py#L244-L255
[ "def", "create_model", "(", "self", ",", "project_id", ",", "model", ")", ":", "if", "not", "model", "[", "'name'", "]", ":", "raise", "ValueError", "(", "\"Model name must be provided and \"", "\"could not be an empty string\"", ")", "project", "=", "'projects/{}'", ".", "format", "(", "project_id", ")", "request", "=", "self", ".", "_mlengine", ".", "projects", "(", ")", ".", "models", "(", ")", ".", "create", "(", "parent", "=", "project", ",", "body", "=", "model", ")", "return", "request", ".", "execute", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
MLEngineHook.get_model
Gets a Model. Blocks until finished.
airflow/contrib/hooks/gcp_mlengine_hook.py
def get_model(self, project_id, model_name): """ Gets a Model. Blocks until finished. """ if not model_name: raise ValueError("Model name must be provided and " "it could not be an empty string") full_model_name = 'projects/{}/models/{}'.format( project_id, model_name) request = self._mlengine.projects().models().get(name=full_model_name) try: return request.execute() except HttpError as e: if e.resp.status == 404: self.log.error('Model was not found: %s', e) return None raise
def get_model(self, project_id, model_name): """ Gets a Model. Blocks until finished. """ if not model_name: raise ValueError("Model name must be provided and " "it could not be an empty string") full_model_name = 'projects/{}/models/{}'.format( project_id, model_name) request = self._mlengine.projects().models().get(name=full_model_name) try: return request.execute() except HttpError as e: if e.resp.status == 404: self.log.error('Model was not found: %s', e) return None raise
[ "Gets", "a", "Model", ".", "Blocks", "until", "finished", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_mlengine_hook.py#L257-L273
[ "def", "get_model", "(", "self", ",", "project_id", ",", "model_name", ")", ":", "if", "not", "model_name", ":", "raise", "ValueError", "(", "\"Model name must be provided and \"", "\"it could not be an empty string\"", ")", "full_model_name", "=", "'projects/{}/models/{}'", ".", "format", "(", "project_id", ",", "model_name", ")", "request", "=", "self", ".", "_mlengine", ".", "projects", "(", ")", ".", "models", "(", ")", ".", "get", "(", "name", "=", "full_model_name", ")", "try", ":", "return", "request", ".", "execute", "(", ")", "except", "HttpError", "as", "e", ":", "if", "e", ".", "resp", ".", "status", "==", "404", ":", "self", ".", "log", ".", "error", "(", "'Model was not found: %s'", ",", "e", ")", "return", "None", "raise" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
LocalWorker.execute_work
Executes command received and stores result state in queue. :param key: the key to identify the TI :type key: tuple(dag_id, task_id, execution_date) :param command: the command to execute :type command: str
airflow/executors/local_executor.py
def execute_work(self, key, command): """ Executes command received and stores result state in queue. :param key: the key to identify the TI :type key: tuple(dag_id, task_id, execution_date) :param command: the command to execute :type command: str """ if key is None: return self.log.info("%s running %s", self.__class__.__name__, command) try: subprocess.check_call(command, close_fds=True) state = State.SUCCESS except subprocess.CalledProcessError as e: state = State.FAILED self.log.error("Failed to execute task %s.", str(e)) # TODO: Why is this commented out? # raise e self.result_queue.put((key, state))
def execute_work(self, key, command): """ Executes command received and stores result state in queue. :param key: the key to identify the TI :type key: tuple(dag_id, task_id, execution_date) :param command: the command to execute :type command: str """ if key is None: return self.log.info("%s running %s", self.__class__.__name__, command) try: subprocess.check_call(command, close_fds=True) state = State.SUCCESS except subprocess.CalledProcessError as e: state = State.FAILED self.log.error("Failed to execute task %s.", str(e)) # TODO: Why is this commented out? # raise e self.result_queue.put((key, state))
[ "Executes", "command", "received", "and", "stores", "result", "state", "in", "queue", ".", ":", "param", "key", ":", "the", "key", "to", "identify", "the", "TI", ":", "type", "key", ":", "tuple", "(", "dag_id", "task_id", "execution_date", ")", ":", "param", "command", ":", "the", "command", "to", "execute", ":", "type", "command", ":", "str" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/executors/local_executor.py#L73-L92
[ "def", "execute_work", "(", "self", ",", "key", ",", "command", ")", ":", "if", "key", "is", "None", ":", "return", "self", ".", "log", ".", "info", "(", "\"%s running %s\"", ",", "self", ".", "__class__", ".", "__name__", ",", "command", ")", "try", ":", "subprocess", ".", "check_call", "(", "command", ",", "close_fds", "=", "True", ")", "state", "=", "State", ".", "SUCCESS", "except", "subprocess", ".", "CalledProcessError", "as", "e", ":", "state", "=", "State", ".", "FAILED", "self", ".", "log", ".", "error", "(", "\"Failed to execute task %s.\"", ",", "str", "(", "e", ")", ")", "# TODO: Why is this commented out?", "# raise e", "self", ".", "result_queue", ".", "put", "(", "(", "key", ",", "state", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AwsDynamoDBHook.write_batch_data
Write batch items to dynamodb table with provisioned throughout capacity.
airflow/contrib/hooks/aws_dynamodb_hook.py
def write_batch_data(self, items): """ Write batch items to dynamodb table with provisioned throughout capacity. """ dynamodb_conn = self.get_conn() try: table = dynamodb_conn.Table(self.table_name) with table.batch_writer(overwrite_by_pkeys=self.table_keys) as batch: for item in items: batch.put_item(Item=item) return True except Exception as general_error: raise AirflowException( 'Failed to insert items in dynamodb, error: {error}'.format( error=str(general_error) ) )
def write_batch_data(self, items): """ Write batch items to dynamodb table with provisioned throughout capacity. """ dynamodb_conn = self.get_conn() try: table = dynamodb_conn.Table(self.table_name) with table.batch_writer(overwrite_by_pkeys=self.table_keys) as batch: for item in items: batch.put_item(Item=item) return True except Exception as general_error: raise AirflowException( 'Failed to insert items in dynamodb, error: {error}'.format( error=str(general_error) ) )
[ "Write", "batch", "items", "to", "dynamodb", "table", "with", "provisioned", "throughout", "capacity", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_dynamodb_hook.py#L50-L69
[ "def", "write_batch_data", "(", "self", ",", "items", ")", ":", "dynamodb_conn", "=", "self", ".", "get_conn", "(", ")", "try", ":", "table", "=", "dynamodb_conn", ".", "Table", "(", "self", ".", "table_name", ")", "with", "table", ".", "batch_writer", "(", "overwrite_by_pkeys", "=", "self", ".", "table_keys", ")", "as", "batch", ":", "for", "item", "in", "items", ":", "batch", ".", "put_item", "(", "Item", "=", "item", ")", "return", "True", "except", "Exception", "as", "general_error", ":", "raise", "AirflowException", "(", "'Failed to insert items in dynamodb, error: {error}'", ".", "format", "(", "error", "=", "str", "(", "general_error", ")", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
_integrate_plugins
Integrate plugins to the context.
airflow/executors/__init__.py
def _integrate_plugins(): """Integrate plugins to the context.""" from airflow.plugins_manager import executors_modules for executors_module in executors_modules: sys.modules[executors_module.__name__] = executors_module globals()[executors_module._name] = executors_module
def _integrate_plugins(): """Integrate plugins to the context.""" from airflow.plugins_manager import executors_modules for executors_module in executors_modules: sys.modules[executors_module.__name__] = executors_module globals()[executors_module._name] = executors_module
[ "Integrate", "plugins", "to", "the", "context", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/executors/__init__.py#L31-L36
[ "def", "_integrate_plugins", "(", ")", ":", "from", "airflow", ".", "plugins_manager", "import", "executors_modules", "for", "executors_module", "in", "executors_modules", ":", "sys", ".", "modules", "[", "executors_module", ".", "__name__", "]", "=", "executors_module", "globals", "(", ")", "[", "executors_module", ".", "_name", "]", "=", "executors_module" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
get_default_executor
Creates a new instance of the configured executor if none exists and returns it
airflow/executors/__init__.py
def get_default_executor(): """Creates a new instance of the configured executor if none exists and returns it""" global DEFAULT_EXECUTOR if DEFAULT_EXECUTOR is not None: return DEFAULT_EXECUTOR executor_name = configuration.conf.get('core', 'EXECUTOR') DEFAULT_EXECUTOR = _get_executor(executor_name) log = LoggingMixin().log log.info("Using executor %s", executor_name) return DEFAULT_EXECUTOR
def get_default_executor(): """Creates a new instance of the configured executor if none exists and returns it""" global DEFAULT_EXECUTOR if DEFAULT_EXECUTOR is not None: return DEFAULT_EXECUTOR executor_name = configuration.conf.get('core', 'EXECUTOR') DEFAULT_EXECUTOR = _get_executor(executor_name) log = LoggingMixin().log log.info("Using executor %s", executor_name) return DEFAULT_EXECUTOR
[ "Creates", "a", "new", "instance", "of", "the", "configured", "executor", "if", "none", "exists", "and", "returns", "it" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/executors/__init__.py#L39-L53
[ "def", "get_default_executor", "(", ")", ":", "global", "DEFAULT_EXECUTOR", "if", "DEFAULT_EXECUTOR", "is", "not", "None", ":", "return", "DEFAULT_EXECUTOR", "executor_name", "=", "configuration", ".", "conf", ".", "get", "(", "'core'", ",", "'EXECUTOR'", ")", "DEFAULT_EXECUTOR", "=", "_get_executor", "(", "executor_name", ")", "log", "=", "LoggingMixin", "(", ")", ".", "log", "log", ".", "info", "(", "\"Using executor %s\"", ",", "executor_name", ")", "return", "DEFAULT_EXECUTOR" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
_get_executor
Creates a new instance of the named executor. In case the executor name is not know in airflow, look for it in the plugins
airflow/executors/__init__.py
def _get_executor(executor_name): """ Creates a new instance of the named executor. In case the executor name is not know in airflow, look for it in the plugins """ if executor_name == Executors.LocalExecutor: return LocalExecutor() elif executor_name == Executors.SequentialExecutor: return SequentialExecutor() elif executor_name == Executors.CeleryExecutor: from airflow.executors.celery_executor import CeleryExecutor return CeleryExecutor() elif executor_name == Executors.DaskExecutor: from airflow.executors.dask_executor import DaskExecutor return DaskExecutor() elif executor_name == Executors.KubernetesExecutor: from airflow.contrib.executors.kubernetes_executor import KubernetesExecutor return KubernetesExecutor() else: # Loading plugins _integrate_plugins() executor_path = executor_name.split('.') if len(executor_path) != 2: raise AirflowException( "Executor {0} not supported: " "please specify in format plugin_module.executor".format(executor_name)) if executor_path[0] in globals(): return globals()[executor_path[0]].__dict__[executor_path[1]]() else: raise AirflowException("Executor {0} not supported.".format(executor_name))
def _get_executor(executor_name): """ Creates a new instance of the named executor. In case the executor name is not know in airflow, look for it in the plugins """ if executor_name == Executors.LocalExecutor: return LocalExecutor() elif executor_name == Executors.SequentialExecutor: return SequentialExecutor() elif executor_name == Executors.CeleryExecutor: from airflow.executors.celery_executor import CeleryExecutor return CeleryExecutor() elif executor_name == Executors.DaskExecutor: from airflow.executors.dask_executor import DaskExecutor return DaskExecutor() elif executor_name == Executors.KubernetesExecutor: from airflow.contrib.executors.kubernetes_executor import KubernetesExecutor return KubernetesExecutor() else: # Loading plugins _integrate_plugins() executor_path = executor_name.split('.') if len(executor_path) != 2: raise AirflowException( "Executor {0} not supported: " "please specify in format plugin_module.executor".format(executor_name)) if executor_path[0] in globals(): return globals()[executor_path[0]].__dict__[executor_path[1]]() else: raise AirflowException("Executor {0} not supported.".format(executor_name))
[ "Creates", "a", "new", "instance", "of", "the", "named", "executor", ".", "In", "case", "the", "executor", "name", "is", "not", "know", "in", "airflow", "look", "for", "it", "in", "the", "plugins" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/executors/__init__.py#L64-L95
[ "def", "_get_executor", "(", "executor_name", ")", ":", "if", "executor_name", "==", "Executors", ".", "LocalExecutor", ":", "return", "LocalExecutor", "(", ")", "elif", "executor_name", "==", "Executors", ".", "SequentialExecutor", ":", "return", "SequentialExecutor", "(", ")", "elif", "executor_name", "==", "Executors", ".", "CeleryExecutor", ":", "from", "airflow", ".", "executors", ".", "celery_executor", "import", "CeleryExecutor", "return", "CeleryExecutor", "(", ")", "elif", "executor_name", "==", "Executors", ".", "DaskExecutor", ":", "from", "airflow", ".", "executors", ".", "dask_executor", "import", "DaskExecutor", "return", "DaskExecutor", "(", ")", "elif", "executor_name", "==", "Executors", ".", "KubernetesExecutor", ":", "from", "airflow", ".", "contrib", ".", "executors", ".", "kubernetes_executor", "import", "KubernetesExecutor", "return", "KubernetesExecutor", "(", ")", "else", ":", "# Loading plugins", "_integrate_plugins", "(", ")", "executor_path", "=", "executor_name", ".", "split", "(", "'.'", ")", "if", "len", "(", "executor_path", ")", "!=", "2", ":", "raise", "AirflowException", "(", "\"Executor {0} not supported: \"", "\"please specify in format plugin_module.executor\"", ".", "format", "(", "executor_name", ")", ")", "if", "executor_path", "[", "0", "]", "in", "globals", "(", ")", ":", "return", "globals", "(", ")", "[", "executor_path", "[", "0", "]", "]", ".", "__dict__", "[", "executor_path", "[", "1", "]", "]", "(", ")", "else", ":", "raise", "AirflowException", "(", "\"Executor {0} not supported.\"", ".", "format", "(", "executor_name", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SegmentHook.on_error
Handles error callbacks when using Segment with segment_debug_mode set to True
airflow/contrib/hooks/segment_hook.py
def on_error(self, error, items): """ Handles error callbacks when using Segment with segment_debug_mode set to True """ self.log.error('Encountered Segment error: {segment_error} with ' 'items: {with_items}'.format(segment_error=error, with_items=items)) raise AirflowException('Segment error: {}'.format(error))
def on_error(self, error, items): """ Handles error callbacks when using Segment with segment_debug_mode set to True """ self.log.error('Encountered Segment error: {segment_error} with ' 'items: {with_items}'.format(segment_error=error, with_items=items)) raise AirflowException('Segment error: {}'.format(error))
[ "Handles", "error", "callbacks", "when", "using", "Segment", "with", "segment_debug_mode", "set", "to", "True" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/segment_hook.py#L83-L90
[ "def", "on_error", "(", "self", ",", "error", ",", "items", ")", ":", "self", ".", "log", ".", "error", "(", "'Encountered Segment error: {segment_error} with '", "'items: {with_items}'", ".", "format", "(", "segment_error", "=", "error", ",", "with_items", "=", "items", ")", ")", "raise", "AirflowException", "(", "'Segment error: {}'", ".", "format", "(", "error", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PodLauncher.run_pod
Launches the pod synchronously and waits for completion. Args: pod (Pod): startup_timeout (int): Timeout for startup of the pod (if pod is pending for too long, considers task a failure
airflow/contrib/kubernetes/pod_launcher.py
def run_pod(self, pod, startup_timeout=120, get_logs=True): # type: (Pod, int, bool) -> Tuple[State, Optional[str]] """ Launches the pod synchronously and waits for completion. Args: pod (Pod): startup_timeout (int): Timeout for startup of the pod (if pod is pending for too long, considers task a failure """ resp = self.run_pod_async(pod) curr_time = dt.now() if resp.status.start_time is None: while self.pod_not_started(pod): delta = dt.now() - curr_time if delta.seconds >= startup_timeout: raise AirflowException("Pod took too long to start") time.sleep(1) self.log.debug('Pod not yet started') return self._monitor_pod(pod, get_logs)
def run_pod(self, pod, startup_timeout=120, get_logs=True): # type: (Pod, int, bool) -> Tuple[State, Optional[str]] """ Launches the pod synchronously and waits for completion. Args: pod (Pod): startup_timeout (int): Timeout for startup of the pod (if pod is pending for too long, considers task a failure """ resp = self.run_pod_async(pod) curr_time = dt.now() if resp.status.start_time is None: while self.pod_not_started(pod): delta = dt.now() - curr_time if delta.seconds >= startup_timeout: raise AirflowException("Pod took too long to start") time.sleep(1) self.log.debug('Pod not yet started') return self._monitor_pod(pod, get_logs)
[ "Launches", "the", "pod", "synchronously", "and", "waits", "for", "completion", ".", "Args", ":", "pod", "(", "Pod", ")", ":", "startup_timeout", "(", "int", ")", ":", "Timeout", "for", "startup", "of", "the", "pod", "(", "if", "pod", "is", "pending", "for", "too", "long", "considers", "task", "a", "failure" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/kubernetes/pod_launcher.py#L73-L92
[ "def", "run_pod", "(", "self", ",", "pod", ",", "startup_timeout", "=", "120", ",", "get_logs", "=", "True", ")", ":", "# type: (Pod, int, bool) -> Tuple[State, Optional[str]]", "resp", "=", "self", ".", "run_pod_async", "(", "pod", ")", "curr_time", "=", "dt", ".", "now", "(", ")", "if", "resp", ".", "status", ".", "start_time", "is", "None", ":", "while", "self", ".", "pod_not_started", "(", "pod", ")", ":", "delta", "=", "dt", ".", "now", "(", ")", "-", "curr_time", "if", "delta", ".", "seconds", ">=", "startup_timeout", ":", "raise", "AirflowException", "(", "\"Pod took too long to start\"", ")", "time", ".", "sleep", "(", "1", ")", "self", ".", "log", ".", "debug", "(", "'Pod not yet started'", ")", "return", "self", ".", "_monitor_pod", "(", "pod", ",", "get_logs", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
MsSqlHook.get_conn
Returns a mssql connection object
airflow/hooks/mssql_hook.py
def get_conn(self): """ Returns a mssql connection object """ conn = self.get_connection(self.mssql_conn_id) conn = pymssql.connect( server=conn.host, user=conn.login, password=conn.password, database=self.schema or conn.schema, port=conn.port) return conn
def get_conn(self): """ Returns a mssql connection object """ conn = self.get_connection(self.mssql_conn_id) conn = pymssql.connect( server=conn.host, user=conn.login, password=conn.password, database=self.schema or conn.schema, port=conn.port) return conn
[ "Returns", "a", "mssql", "connection", "object" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/mssql_hook.py#L38-L49
[ "def", "get_conn", "(", "self", ")", ":", "conn", "=", "self", ".", "get_connection", "(", "self", ".", "mssql_conn_id", ")", "conn", "=", "pymssql", ".", "connect", "(", "server", "=", "conn", ".", "host", ",", "user", "=", "conn", ".", "login", ",", "password", "=", "conn", ".", "password", ",", "database", "=", "self", ".", "schema", "or", "conn", ".", "schema", ",", "port", "=", "conn", ".", "port", ")", "return", "conn" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SparkSubmitOperator.execute
Call the SparkSubmitHook to run the provided spark job
airflow/contrib/operators/spark_submit_operator.py
def execute(self, context): """ Call the SparkSubmitHook to run the provided spark job """ self._hook = SparkSubmitHook( conf=self._conf, conn_id=self._conn_id, files=self._files, py_files=self._py_files, archives=self._archives, driver_class_path=self._driver_class_path, jars=self._jars, java_class=self._java_class, packages=self._packages, exclude_packages=self._exclude_packages, repositories=self._repositories, total_executor_cores=self._total_executor_cores, executor_cores=self._executor_cores, executor_memory=self._executor_memory, driver_memory=self._driver_memory, keytab=self._keytab, principal=self._principal, name=self._name, num_executors=self._num_executors, application_args=self._application_args, env_vars=self._env_vars, verbose=self._verbose, spark_binary=self._spark_binary ) self._hook.submit(self._application)
def execute(self, context): """ Call the SparkSubmitHook to run the provided spark job """ self._hook = SparkSubmitHook( conf=self._conf, conn_id=self._conn_id, files=self._files, py_files=self._py_files, archives=self._archives, driver_class_path=self._driver_class_path, jars=self._jars, java_class=self._java_class, packages=self._packages, exclude_packages=self._exclude_packages, repositories=self._repositories, total_executor_cores=self._total_executor_cores, executor_cores=self._executor_cores, executor_memory=self._executor_memory, driver_memory=self._driver_memory, keytab=self._keytab, principal=self._principal, name=self._name, num_executors=self._num_executors, application_args=self._application_args, env_vars=self._env_vars, verbose=self._verbose, spark_binary=self._spark_binary ) self._hook.submit(self._application)
[ "Call", "the", "SparkSubmitHook", "to", "run", "the", "provided", "spark", "job" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/spark_submit_operator.py#L147-L176
[ "def", "execute", "(", "self", ",", "context", ")", ":", "self", ".", "_hook", "=", "SparkSubmitHook", "(", "conf", "=", "self", ".", "_conf", ",", "conn_id", "=", "self", ".", "_conn_id", ",", "files", "=", "self", ".", "_files", ",", "py_files", "=", "self", ".", "_py_files", ",", "archives", "=", "self", ".", "_archives", ",", "driver_class_path", "=", "self", ".", "_driver_class_path", ",", "jars", "=", "self", ".", "_jars", ",", "java_class", "=", "self", ".", "_java_class", ",", "packages", "=", "self", ".", "_packages", ",", "exclude_packages", "=", "self", ".", "_exclude_packages", ",", "repositories", "=", "self", ".", "_repositories", ",", "total_executor_cores", "=", "self", ".", "_total_executor_cores", ",", "executor_cores", "=", "self", ".", "_executor_cores", ",", "executor_memory", "=", "self", ".", "_executor_memory", ",", "driver_memory", "=", "self", ".", "_driver_memory", ",", "keytab", "=", "self", ".", "_keytab", ",", "principal", "=", "self", ".", "_principal", ",", "name", "=", "self", ".", "_name", ",", "num_executors", "=", "self", ".", "_num_executors", ",", "application_args", "=", "self", ".", "_application_args", ",", "env_vars", "=", "self", ".", "_env_vars", ",", "verbose", "=", "self", ".", "_verbose", ",", "spark_binary", "=", "self", ".", "_spark_binary", ")", "self", ".", "_hook", ".", "submit", "(", "self", ".", "_application", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
trigger_dag
Trigger a new dag run for a Dag with an execution date of now unless specified in the data.
airflow/www/api/experimental/endpoints.py
def trigger_dag(dag_id): """ Trigger a new dag run for a Dag with an execution date of now unless specified in the data. """ data = request.get_json(force=True) run_id = None if 'run_id' in data: run_id = data['run_id'] conf = None if 'conf' in data: conf = data['conf'] execution_date = None if 'execution_date' in data and data['execution_date'] is not None: execution_date = data['execution_date'] # Convert string datetime into actual datetime try: execution_date = timezone.parse(execution_date) except ValueError: error_message = ( 'Given execution date, {}, could not be identified ' 'as a date. Example date format: 2015-11-16T14:34:15+00:00' .format(execution_date)) _log.info(error_message) response = jsonify({'error': error_message}) response.status_code = 400 return response try: dr = trigger.trigger_dag(dag_id, run_id, conf, execution_date) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response if getattr(g, 'user', None): _log.info("User %s created %s", g.user, dr) response = jsonify(message="Created {}".format(dr)) return response
def trigger_dag(dag_id): """ Trigger a new dag run for a Dag with an execution date of now unless specified in the data. """ data = request.get_json(force=True) run_id = None if 'run_id' in data: run_id = data['run_id'] conf = None if 'conf' in data: conf = data['conf'] execution_date = None if 'execution_date' in data and data['execution_date'] is not None: execution_date = data['execution_date'] # Convert string datetime into actual datetime try: execution_date = timezone.parse(execution_date) except ValueError: error_message = ( 'Given execution date, {}, could not be identified ' 'as a date. Example date format: 2015-11-16T14:34:15+00:00' .format(execution_date)) _log.info(error_message) response = jsonify({'error': error_message}) response.status_code = 400 return response try: dr = trigger.trigger_dag(dag_id, run_id, conf, execution_date) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response if getattr(g, 'user', None): _log.info("User %s created %s", g.user, dr) response = jsonify(message="Created {}".format(dr)) return response
[ "Trigger", "a", "new", "dag", "run", "for", "a", "Dag", "with", "an", "execution", "date", "of", "now", "unless", "specified", "in", "the", "data", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/api/experimental/endpoints.py#L47-L92
[ "def", "trigger_dag", "(", "dag_id", ")", ":", "data", "=", "request", ".", "get_json", "(", "force", "=", "True", ")", "run_id", "=", "None", "if", "'run_id'", "in", "data", ":", "run_id", "=", "data", "[", "'run_id'", "]", "conf", "=", "None", "if", "'conf'", "in", "data", ":", "conf", "=", "data", "[", "'conf'", "]", "execution_date", "=", "None", "if", "'execution_date'", "in", "data", "and", "data", "[", "'execution_date'", "]", "is", "not", "None", ":", "execution_date", "=", "data", "[", "'execution_date'", "]", "# Convert string datetime into actual datetime", "try", ":", "execution_date", "=", "timezone", ".", "parse", "(", "execution_date", ")", "except", "ValueError", ":", "error_message", "=", "(", "'Given execution date, {}, could not be identified '", "'as a date. Example date format: 2015-11-16T14:34:15+00:00'", ".", "format", "(", "execution_date", ")", ")", "_log", ".", "info", "(", "error_message", ")", "response", "=", "jsonify", "(", "{", "'error'", ":", "error_message", "}", ")", "response", ".", "status_code", "=", "400", "return", "response", "try", ":", "dr", "=", "trigger", ".", "trigger_dag", "(", "dag_id", ",", "run_id", ",", "conf", ",", "execution_date", ")", "except", "AirflowException", "as", "err", ":", "_log", ".", "error", "(", "err", ")", "response", "=", "jsonify", "(", "error", "=", "\"{}\"", ".", "format", "(", "err", ")", ")", "response", ".", "status_code", "=", "err", ".", "status_code", "return", "response", "if", "getattr", "(", "g", ",", "'user'", ",", "None", ")", ":", "_log", ".", "info", "(", "\"User %s created %s\"", ",", "g", ".", "user", ",", "dr", ")", "response", "=", "jsonify", "(", "message", "=", "\"Created {}\"", ".", "format", "(", "dr", ")", ")", "return", "response" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
delete_dag
Delete all DB records related to the specified Dag.
airflow/www/api/experimental/endpoints.py
def delete_dag(dag_id): """ Delete all DB records related to the specified Dag. """ try: count = delete.delete_dag(dag_id) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response return jsonify(message="Removed {} record(s)".format(count), count=count)
def delete_dag(dag_id): """ Delete all DB records related to the specified Dag. """ try: count = delete.delete_dag(dag_id) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response return jsonify(message="Removed {} record(s)".format(count), count=count)
[ "Delete", "all", "DB", "records", "related", "to", "the", "specified", "Dag", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/api/experimental/endpoints.py#L98-L109
[ "def", "delete_dag", "(", "dag_id", ")", ":", "try", ":", "count", "=", "delete", ".", "delete_dag", "(", "dag_id", ")", "except", "AirflowException", "as", "err", ":", "_log", ".", "error", "(", "err", ")", "response", "=", "jsonify", "(", "error", "=", "\"{}\"", ".", "format", "(", "err", ")", ")", "response", ".", "status_code", "=", "err", ".", "status_code", "return", "response", "return", "jsonify", "(", "message", "=", "\"Removed {} record(s)\"", ".", "format", "(", "count", ")", ",", "count", "=", "count", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
dag_runs
Returns a list of Dag Runs for a specific DAG ID. :query param state: a query string parameter '?state=queued|running|success...' :param dag_id: String identifier of a DAG :return: List of DAG runs of a DAG with requested state, or all runs if the state is not specified
airflow/www/api/experimental/endpoints.py
def dag_runs(dag_id): """ Returns a list of Dag Runs for a specific DAG ID. :query param state: a query string parameter '?state=queued|running|success...' :param dag_id: String identifier of a DAG :return: List of DAG runs of a DAG with requested state, or all runs if the state is not specified """ try: state = request.args.get('state') dagruns = get_dag_runs(dag_id, state) except AirflowException as err: _log.info(err) response = jsonify(error="{}".format(err)) response.status_code = 400 return response return jsonify(dagruns)
def dag_runs(dag_id): """ Returns a list of Dag Runs for a specific DAG ID. :query param state: a query string parameter '?state=queued|running|success...' :param dag_id: String identifier of a DAG :return: List of DAG runs of a DAG with requested state, or all runs if the state is not specified """ try: state = request.args.get('state') dagruns = get_dag_runs(dag_id, state) except AirflowException as err: _log.info(err) response = jsonify(error="{}".format(err)) response.status_code = 400 return response return jsonify(dagruns)
[ "Returns", "a", "list", "of", "Dag", "Runs", "for", "a", "specific", "DAG", "ID", ".", ":", "query", "param", "state", ":", "a", "query", "string", "parameter", "?state", "=", "queued|running|success", "...", ":", "param", "dag_id", ":", "String", "identifier", "of", "a", "DAG", ":", "return", ":", "List", "of", "DAG", "runs", "of", "a", "DAG", "with", "requested", "state", "or", "all", "runs", "if", "the", "state", "is", "not", "specified" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/api/experimental/endpoints.py#L114-L131
[ "def", "dag_runs", "(", "dag_id", ")", ":", "try", ":", "state", "=", "request", ".", "args", ".", "get", "(", "'state'", ")", "dagruns", "=", "get_dag_runs", "(", "dag_id", ",", "state", ")", "except", "AirflowException", "as", "err", ":", "_log", ".", "info", "(", "err", ")", "response", "=", "jsonify", "(", "error", "=", "\"{}\"", ".", "format", "(", "err", ")", ")", "response", ".", "status_code", "=", "400", "return", "response", "return", "jsonify", "(", "dagruns", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
get_dag_code
Return python code of a given dag_id.
airflow/www/api/experimental/endpoints.py
def get_dag_code(dag_id): """Return python code of a given dag_id.""" try: return get_code(dag_id) except AirflowException as err: _log.info(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response
def get_dag_code(dag_id): """Return python code of a given dag_id.""" try: return get_code(dag_id) except AirflowException as err: _log.info(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response
[ "Return", "python", "code", "of", "a", "given", "dag_id", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/api/experimental/endpoints.py#L142-L150
[ "def", "get_dag_code", "(", "dag_id", ")", ":", "try", ":", "return", "get_code", "(", "dag_id", ")", "except", "AirflowException", "as", "err", ":", "_log", ".", "info", "(", "err", ")", "response", "=", "jsonify", "(", "error", "=", "\"{}\"", ".", "format", "(", "err", ")", ")", "response", ".", "status_code", "=", "err", ".", "status_code", "return", "response" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
task_info
Returns a JSON with a task's public instance variables.
airflow/www/api/experimental/endpoints.py
def task_info(dag_id, task_id): """Returns a JSON with a task's public instance variables. """ try: info = get_task(dag_id, task_id) except AirflowException as err: _log.info(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response # JSONify and return. fields = {k: str(v) for k, v in vars(info).items() if not k.startswith('_')} return jsonify(fields)
def task_info(dag_id, task_id): """Returns a JSON with a task's public instance variables. """ try: info = get_task(dag_id, task_id) except AirflowException as err: _log.info(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response # JSONify and return. fields = {k: str(v) for k, v in vars(info).items() if not k.startswith('_')} return jsonify(fields)
[ "Returns", "a", "JSON", "with", "a", "task", "s", "public", "instance", "variables", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/api/experimental/endpoints.py#L155-L169
[ "def", "task_info", "(", "dag_id", ",", "task_id", ")", ":", "try", ":", "info", "=", "get_task", "(", "dag_id", ",", "task_id", ")", "except", "AirflowException", "as", "err", ":", "_log", ".", "info", "(", "err", ")", "response", "=", "jsonify", "(", "error", "=", "\"{}\"", ".", "format", "(", "err", ")", ")", "response", ".", "status_code", "=", "err", ".", "status_code", "return", "response", "# JSONify and return.", "fields", "=", "{", "k", ":", "str", "(", "v", ")", "for", "k", ",", "v", "in", "vars", "(", "info", ")", ".", "items", "(", ")", "if", "not", "k", ".", "startswith", "(", "'_'", ")", "}", "return", "jsonify", "(", "fields", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
dag_paused
(Un)pauses a dag
airflow/www/api/experimental/endpoints.py
def dag_paused(dag_id, paused): """(Un)pauses a dag""" DagModel = models.DagModel with create_session() as session: orm_dag = ( session.query(DagModel) .filter(DagModel.dag_id == dag_id).first() ) if paused == 'true': orm_dag.is_paused = True else: orm_dag.is_paused = False session.merge(orm_dag) session.commit() return jsonify({'response': 'ok'})
def dag_paused(dag_id, paused): """(Un)pauses a dag""" DagModel = models.DagModel with create_session() as session: orm_dag = ( session.query(DagModel) .filter(DagModel.dag_id == dag_id).first() ) if paused == 'true': orm_dag.is_paused = True else: orm_dag.is_paused = False session.merge(orm_dag) session.commit() return jsonify({'response': 'ok'})
[ "(", "Un", ")", "pauses", "a", "dag" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/api/experimental/endpoints.py#L175-L191
[ "def", "dag_paused", "(", "dag_id", ",", "paused", ")", ":", "DagModel", "=", "models", ".", "DagModel", "with", "create_session", "(", ")", "as", "session", ":", "orm_dag", "=", "(", "session", ".", "query", "(", "DagModel", ")", ".", "filter", "(", "DagModel", ".", "dag_id", "==", "dag_id", ")", ".", "first", "(", ")", ")", "if", "paused", "==", "'true'", ":", "orm_dag", ".", "is_paused", "=", "True", "else", ":", "orm_dag", ".", "is_paused", "=", "False", "session", ".", "merge", "(", "orm_dag", ")", "session", ".", "commit", "(", ")", "return", "jsonify", "(", "{", "'response'", ":", "'ok'", "}", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
task_instance_info
Returns a JSON with a task instance's public instance variables. The format for the exec_date is expected to be "YYYY-mm-DDTHH:MM:SS", for example: "2016-11-16T11:34:15". This will of course need to have been encoded for URL in the request.
airflow/www/api/experimental/endpoints.py
def task_instance_info(dag_id, execution_date, task_id): """ Returns a JSON with a task instance's public instance variables. The format for the exec_date is expected to be "YYYY-mm-DDTHH:MM:SS", for example: "2016-11-16T11:34:15". This will of course need to have been encoded for URL in the request. """ # Convert string datetime into actual datetime try: execution_date = timezone.parse(execution_date) except ValueError: error_message = ( 'Given execution date, {}, could not be identified ' 'as a date. Example date format: 2015-11-16T14:34:15+00:00' .format(execution_date)) _log.info(error_message) response = jsonify({'error': error_message}) response.status_code = 400 return response try: info = get_task_instance(dag_id, task_id, execution_date) except AirflowException as err: _log.info(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response # JSONify and return. fields = {k: str(v) for k, v in vars(info).items() if not k.startswith('_')} return jsonify(fields)
def task_instance_info(dag_id, execution_date, task_id): """ Returns a JSON with a task instance's public instance variables. The format for the exec_date is expected to be "YYYY-mm-DDTHH:MM:SS", for example: "2016-11-16T11:34:15". This will of course need to have been encoded for URL in the request. """ # Convert string datetime into actual datetime try: execution_date = timezone.parse(execution_date) except ValueError: error_message = ( 'Given execution date, {}, could not be identified ' 'as a date. Example date format: 2015-11-16T14:34:15+00:00' .format(execution_date)) _log.info(error_message) response = jsonify({'error': error_message}) response.status_code = 400 return response try: info = get_task_instance(dag_id, task_id, execution_date) except AirflowException as err: _log.info(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response # JSONify and return. fields = {k: str(v) for k, v in vars(info).items() if not k.startswith('_')} return jsonify(fields)
[ "Returns", "a", "JSON", "with", "a", "task", "instance", "s", "public", "instance", "variables", ".", "The", "format", "for", "the", "exec_date", "is", "expected", "to", "be", "YYYY", "-", "mm", "-", "DDTHH", ":", "MM", ":", "SS", "for", "example", ":", "2016", "-", "11", "-", "16T11", ":", "34", ":", "15", ".", "This", "will", "of", "course", "need", "to", "have", "been", "encoded", "for", "URL", "in", "the", "request", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/api/experimental/endpoints.py#L198-L232
[ "def", "task_instance_info", "(", "dag_id", ",", "execution_date", ",", "task_id", ")", ":", "# Convert string datetime into actual datetime", "try", ":", "execution_date", "=", "timezone", ".", "parse", "(", "execution_date", ")", "except", "ValueError", ":", "error_message", "=", "(", "'Given execution date, {}, could not be identified '", "'as a date. Example date format: 2015-11-16T14:34:15+00:00'", ".", "format", "(", "execution_date", ")", ")", "_log", ".", "info", "(", "error_message", ")", "response", "=", "jsonify", "(", "{", "'error'", ":", "error_message", "}", ")", "response", ".", "status_code", "=", "400", "return", "response", "try", ":", "info", "=", "get_task_instance", "(", "dag_id", ",", "task_id", ",", "execution_date", ")", "except", "AirflowException", "as", "err", ":", "_log", ".", "info", "(", "err", ")", "response", "=", "jsonify", "(", "error", "=", "\"{}\"", ".", "format", "(", "err", ")", ")", "response", ".", "status_code", "=", "err", ".", "status_code", "return", "response", "# JSONify and return.", "fields", "=", "{", "k", ":", "str", "(", "v", ")", "for", "k", ",", "v", "in", "vars", "(", "info", ")", ".", "items", "(", ")", "if", "not", "k", ".", "startswith", "(", "'_'", ")", "}", "return", "jsonify", "(", "fields", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
dag_run_status
Returns a JSON with a dag_run's public instance variables. The format for the exec_date is expected to be "YYYY-mm-DDTHH:MM:SS", for example: "2016-11-16T11:34:15". This will of course need to have been encoded for URL in the request.
airflow/www/api/experimental/endpoints.py
def dag_run_status(dag_id, execution_date): """ Returns a JSON with a dag_run's public instance variables. The format for the exec_date is expected to be "YYYY-mm-DDTHH:MM:SS", for example: "2016-11-16T11:34:15". This will of course need to have been encoded for URL in the request. """ # Convert string datetime into actual datetime try: execution_date = timezone.parse(execution_date) except ValueError: error_message = ( 'Given execution date, {}, could not be identified ' 'as a date. Example date format: 2015-11-16T14:34:15+00:00'.format( execution_date)) _log.info(error_message) response = jsonify({'error': error_message}) response.status_code = 400 return response try: info = get_dag_run_state(dag_id, execution_date) except AirflowException as err: _log.info(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response return jsonify(info)
def dag_run_status(dag_id, execution_date): """ Returns a JSON with a dag_run's public instance variables. The format for the exec_date is expected to be "YYYY-mm-DDTHH:MM:SS", for example: "2016-11-16T11:34:15". This will of course need to have been encoded for URL in the request. """ # Convert string datetime into actual datetime try: execution_date = timezone.parse(execution_date) except ValueError: error_message = ( 'Given execution date, {}, could not be identified ' 'as a date. Example date format: 2015-11-16T14:34:15+00:00'.format( execution_date)) _log.info(error_message) response = jsonify({'error': error_message}) response.status_code = 400 return response try: info = get_dag_run_state(dag_id, execution_date) except AirflowException as err: _log.info(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response return jsonify(info)
[ "Returns", "a", "JSON", "with", "a", "dag_run", "s", "public", "instance", "variables", ".", "The", "format", "for", "the", "exec_date", "is", "expected", "to", "be", "YYYY", "-", "mm", "-", "DDTHH", ":", "MM", ":", "SS", "for", "example", ":", "2016", "-", "11", "-", "16T11", ":", "34", ":", "15", ".", "This", "will", "of", "course", "need", "to", "have", "been", "encoded", "for", "URL", "in", "the", "request", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/api/experimental/endpoints.py#L239-L269
[ "def", "dag_run_status", "(", "dag_id", ",", "execution_date", ")", ":", "# Convert string datetime into actual datetime", "try", ":", "execution_date", "=", "timezone", ".", "parse", "(", "execution_date", ")", "except", "ValueError", ":", "error_message", "=", "(", "'Given execution date, {}, could not be identified '", "'as a date. Example date format: 2015-11-16T14:34:15+00:00'", ".", "format", "(", "execution_date", ")", ")", "_log", ".", "info", "(", "error_message", ")", "response", "=", "jsonify", "(", "{", "'error'", ":", "error_message", "}", ")", "response", ".", "status_code", "=", "400", "return", "response", "try", ":", "info", "=", "get_dag_run_state", "(", "dag_id", ",", "execution_date", ")", "except", "AirflowException", "as", "err", ":", "_log", ".", "info", "(", "err", ")", "response", "=", "jsonify", "(", "error", "=", "\"{}\"", ".", "format", "(", "err", ")", ")", "response", ".", "status_code", "=", "err", ".", "status_code", "return", "response", "return", "jsonify", "(", "info", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
get_pools
Get all pools.
airflow/www/api/experimental/endpoints.py
def get_pools(): """Get all pools.""" try: pools = pool_api.get_pools() except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response else: return jsonify([p.to_json() for p in pools])
def get_pools(): """Get all pools.""" try: pools = pool_api.get_pools() except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response else: return jsonify([p.to_json() for p in pools])
[ "Get", "all", "pools", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/api/experimental/endpoints.py#L309-L319
[ "def", "get_pools", "(", ")", ":", "try", ":", "pools", "=", "pool_api", ".", "get_pools", "(", ")", "except", "AirflowException", "as", "err", ":", "_log", ".", "error", "(", "err", ")", "response", "=", "jsonify", "(", "error", "=", "\"{}\"", ".", "format", "(", "err", ")", ")", "response", ".", "status_code", "=", "err", ".", "status_code", "return", "response", "else", ":", "return", "jsonify", "(", "[", "p", ".", "to_json", "(", ")", "for", "p", "in", "pools", "]", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
create_pool
Create a pool.
airflow/www/api/experimental/endpoints.py
def create_pool(): """Create a pool.""" params = request.get_json(force=True) try: pool = pool_api.create_pool(**params) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response else: return jsonify(pool.to_json())
def create_pool(): """Create a pool.""" params = request.get_json(force=True) try: pool = pool_api.create_pool(**params) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response else: return jsonify(pool.to_json())
[ "Create", "a", "pool", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/api/experimental/endpoints.py#L325-L336
[ "def", "create_pool", "(", ")", ":", "params", "=", "request", ".", "get_json", "(", "force", "=", "True", ")", "try", ":", "pool", "=", "pool_api", ".", "create_pool", "(", "*", "*", "params", ")", "except", "AirflowException", "as", "err", ":", "_log", ".", "error", "(", "err", ")", "response", "=", "jsonify", "(", "error", "=", "\"{}\"", ".", "format", "(", "err", ")", ")", "response", ".", "status_code", "=", "err", ".", "status_code", "return", "response", "else", ":", "return", "jsonify", "(", "pool", ".", "to_json", "(", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
delete_pool
Delete pool.
airflow/www/api/experimental/endpoints.py
def delete_pool(name): """Delete pool.""" try: pool = pool_api.delete_pool(name=name) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response else: return jsonify(pool.to_json())
def delete_pool(name): """Delete pool.""" try: pool = pool_api.delete_pool(name=name) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response else: return jsonify(pool.to_json())
[ "Delete", "pool", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/api/experimental/endpoints.py#L342-L352
[ "def", "delete_pool", "(", "name", ")", ":", "try", ":", "pool", "=", "pool_api", ".", "delete_pool", "(", "name", "=", "name", ")", "except", "AirflowException", "as", "err", ":", "_log", ".", "error", "(", "err", ")", "response", "=", "jsonify", "(", "error", "=", "\"{}\"", ".", "format", "(", "err", ")", ")", "response", ".", "status_code", "=", "err", ".", "status_code", "return", "response", "else", ":", "return", "jsonify", "(", "pool", ".", "to_json", "(", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AzureContainerInstanceHook.create_or_update
Create a new container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str :param container_group: the properties of the container group :type container_group: azure.mgmt.containerinstance.models.ContainerGroup
airflow/contrib/hooks/azure_container_instance_hook.py
def create_or_update(self, resource_group, name, container_group): """ Create a new container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str :param container_group: the properties of the container group :type container_group: azure.mgmt.containerinstance.models.ContainerGroup """ self.connection.container_groups.create_or_update(resource_group, name, container_group)
def create_or_update(self, resource_group, name, container_group): """ Create a new container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str :param container_group: the properties of the container group :type container_group: azure.mgmt.containerinstance.models.ContainerGroup """ self.connection.container_groups.create_or_update(resource_group, name, container_group)
[ "Create", "a", "new", "container", "group" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_container_instance_hook.py#L80-L93
[ "def", "create_or_update", "(", "self", ",", "resource_group", ",", "name", ",", "container_group", ")", ":", "self", ".", "connection", ".", "container_groups", ".", "create_or_update", "(", "resource_group", ",", "name", ",", "container_group", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AzureContainerInstanceHook.get_state_exitcode_details
Get the state and exitcode of a container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str :return: A tuple with the state, exitcode, and details. If the exitcode is unknown 0 is returned. :rtype: tuple(state,exitcode,details)
airflow/contrib/hooks/azure_container_instance_hook.py
def get_state_exitcode_details(self, resource_group, name): """ Get the state and exitcode of a container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str :return: A tuple with the state, exitcode, and details. If the exitcode is unknown 0 is returned. :rtype: tuple(state,exitcode,details) """ current_state = self._get_instance_view(resource_group, name).current_state return (current_state.state, current_state.exit_code, current_state.detail_status)
def get_state_exitcode_details(self, resource_group, name): """ Get the state and exitcode of a container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str :return: A tuple with the state, exitcode, and details. If the exitcode is unknown 0 is returned. :rtype: tuple(state,exitcode,details) """ current_state = self._get_instance_view(resource_group, name).current_state return (current_state.state, current_state.exit_code, current_state.detail_status)
[ "Get", "the", "state", "and", "exitcode", "of", "a", "container", "group" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_container_instance_hook.py#L95-L110
[ "def", "get_state_exitcode_details", "(", "self", ",", "resource_group", ",", "name", ")", ":", "current_state", "=", "self", ".", "_get_instance_view", "(", "resource_group", ",", "name", ")", ".", "current_state", "return", "(", "current_state", ".", "state", ",", "current_state", ".", "exit_code", ",", "current_state", ".", "detail_status", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597