partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
sequencelengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
sequencelengths
19
28.4k
sha
stringlengths
40
40
test
_normalize_mlengine_job_id
Replaces invalid MLEngine job_id characters with '_'. This also adds a leading 'z' in case job_id starts with an invalid character. Args: job_id: A job_id str that may have invalid characters. Returns: A valid job_id representation.
airflow/contrib/operators/mlengine_operator.py
def _normalize_mlengine_job_id(job_id): """ Replaces invalid MLEngine job_id characters with '_'. This also adds a leading 'z' in case job_id starts with an invalid character. Args: job_id: A job_id str that may have invalid characters. Returns: A valid job_id representation. """ # Add a prefix when a job_id starts with a digit or a template match = re.search(r'\d|\{{2}', job_id) if match and match.start() == 0: job = 'z_{}'.format(job_id) else: job = job_id # Clean up 'bad' characters except templates tracker = 0 cleansed_job_id = '' for m in re.finditer(r'\{{2}.+?\}{2}', job): cleansed_job_id += re.sub(r'[^0-9a-zA-Z]+', '_', job[tracker:m.start()]) cleansed_job_id += job[m.start():m.end()] tracker = m.end() # Clean up last substring or the full string if no templates cleansed_job_id += re.sub(r'[^0-9a-zA-Z]+', '_', job[tracker:]) return cleansed_job_id
def _normalize_mlengine_job_id(job_id): """ Replaces invalid MLEngine job_id characters with '_'. This also adds a leading 'z' in case job_id starts with an invalid character. Args: job_id: A job_id str that may have invalid characters. Returns: A valid job_id representation. """ # Add a prefix when a job_id starts with a digit or a template match = re.search(r'\d|\{{2}', job_id) if match and match.start() == 0: job = 'z_{}'.format(job_id) else: job = job_id # Clean up 'bad' characters except templates tracker = 0 cleansed_job_id = '' for m in re.finditer(r'\{{2}.+?\}{2}', job): cleansed_job_id += re.sub(r'[^0-9a-zA-Z]+', '_', job[tracker:m.start()]) cleansed_job_id += job[m.start():m.end()] tracker = m.end() # Clean up last substring or the full string if no templates cleansed_job_id += re.sub(r'[^0-9a-zA-Z]+', '_', job[tracker:]) return cleansed_job_id
[ "Replaces", "invalid", "MLEngine", "job_id", "characters", "with", "_", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/mlengine_operator.py#L29-L62
[ "def", "_normalize_mlengine_job_id", "(", "job_id", ")", ":", "# Add a prefix when a job_id starts with a digit or a template", "match", "=", "re", ".", "search", "(", "r'\\d|\\{{2}'", ",", "job_id", ")", "if", "match", "and", "match", ".", "start", "(", ")", "==", "0", ":", "job", "=", "'z_{}'", ".", "format", "(", "job_id", ")", "else", ":", "job", "=", "job_id", "# Clean up 'bad' characters except templates", "tracker", "=", "0", "cleansed_job_id", "=", "''", "for", "m", "in", "re", ".", "finditer", "(", "r'\\{{2}.+?\\}{2}'", ",", "job", ")", ":", "cleansed_job_id", "+=", "re", ".", "sub", "(", "r'[^0-9a-zA-Z]+'", ",", "'_'", ",", "job", "[", "tracker", ":", "m", ".", "start", "(", ")", "]", ")", "cleansed_job_id", "+=", "job", "[", "m", ".", "start", "(", ")", ":", "m", ".", "end", "(", ")", "]", "tracker", "=", "m", ".", "end", "(", ")", "# Clean up last substring or the full string if no templates", "cleansed_job_id", "+=", "re", ".", "sub", "(", "r'[^0-9a-zA-Z]+'", ",", "'_'", ",", "job", "[", "tracker", ":", "]", ")", "return", "cleansed_job_id" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
FTPSensor._get_error_code
Extract error code from ftp exception
airflow/contrib/sensors/ftp_sensor.py
def _get_error_code(self, e): """Extract error code from ftp exception""" try: matches = self.error_code_pattern.match(str(e)) code = int(matches.group(0)) return code except ValueError: return e
def _get_error_code(self, e): """Extract error code from ftp exception""" try: matches = self.error_code_pattern.match(str(e)) code = int(matches.group(0)) return code except ValueError: return e
[ "Extract", "error", "code", "from", "ftp", "exception" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/sensors/ftp_sensor.py#L69-L76
[ "def", "_get_error_code", "(", "self", ",", "e", ")", ":", "try", ":", "matches", "=", "self", ".", "error_code_pattern", ".", "match", "(", "str", "(", "e", ")", ")", "code", "=", "int", "(", "matches", ".", "group", "(", "0", ")", ")", "return", "code", "except", "ValueError", ":", "return", "e" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
_integrate_plugins
Integrate plugins to the context
airflow/sensors/__init__.py
def _integrate_plugins(): """Integrate plugins to the context""" import sys from airflow.plugins_manager import sensors_modules for sensors_module in sensors_modules: sys.modules[sensors_module.__name__] = sensors_module globals()[sensors_module._name] = sensors_module
def _integrate_plugins(): """Integrate plugins to the context""" import sys from airflow.plugins_manager import sensors_modules for sensors_module in sensors_modules: sys.modules[sensors_module.__name__] = sensors_module globals()[sensors_module._name] = sensors_module
[ "Integrate", "plugins", "to", "the", "context" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/sensors/__init__.py#L22-L28
[ "def", "_integrate_plugins", "(", ")", ":", "import", "sys", "from", "airflow", ".", "plugins_manager", "import", "sensors_modules", "for", "sensors_module", "in", "sensors_modules", ":", "sys", ".", "modules", "[", "sensors_module", ".", "__name__", "]", "=", "sensors_module", "globals", "(", ")", "[", "sensors_module", ".", "_name", "]", "=", "sensors_module" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
clear_dag_runs
Remove any existing DAG runs for the perf test DAGs.
scripts/perf/scheduler_ops_metrics.py
def clear_dag_runs(): """ Remove any existing DAG runs for the perf test DAGs. """ session = settings.Session() drs = session.query(DagRun).filter( DagRun.dag_id.in_(DAG_IDS), ).all() for dr in drs: logging.info('Deleting DagRun :: {}'.format(dr)) session.delete(dr)
def clear_dag_runs(): """ Remove any existing DAG runs for the perf test DAGs. """ session = settings.Session() drs = session.query(DagRun).filter( DagRun.dag_id.in_(DAG_IDS), ).all() for dr in drs: logging.info('Deleting DagRun :: {}'.format(dr)) session.delete(dr)
[ "Remove", "any", "existing", "DAG", "runs", "for", "the", "perf", "test", "DAGs", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/scripts/perf/scheduler_ops_metrics.py#L138-L148
[ "def", "clear_dag_runs", "(", ")", ":", "session", "=", "settings", ".", "Session", "(", ")", "drs", "=", "session", ".", "query", "(", "DagRun", ")", ".", "filter", "(", "DagRun", ".", "dag_id", ".", "in_", "(", "DAG_IDS", ")", ",", ")", ".", "all", "(", ")", "for", "dr", "in", "drs", ":", "logging", ".", "info", "(", "'Deleting DagRun :: {}'", ".", "format", "(", "dr", ")", ")", "session", ".", "delete", "(", "dr", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
clear_dag_task_instances
Remove any existing task instances for the perf test DAGs.
scripts/perf/scheduler_ops_metrics.py
def clear_dag_task_instances(): """ Remove any existing task instances for the perf test DAGs. """ session = settings.Session() TI = TaskInstance tis = ( session .query(TI) .filter(TI.dag_id.in_(DAG_IDS)) .all() ) for ti in tis: logging.info('Deleting TaskInstance :: {}'.format(ti)) session.delete(ti) session.commit()
def clear_dag_task_instances(): """ Remove any existing task instances for the perf test DAGs. """ session = settings.Session() TI = TaskInstance tis = ( session .query(TI) .filter(TI.dag_id.in_(DAG_IDS)) .all() ) for ti in tis: logging.info('Deleting TaskInstance :: {}'.format(ti)) session.delete(ti) session.commit()
[ "Remove", "any", "existing", "task", "instances", "for", "the", "perf", "test", "DAGs", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/scripts/perf/scheduler_ops_metrics.py#L151-L166
[ "def", "clear_dag_task_instances", "(", ")", ":", "session", "=", "settings", ".", "Session", "(", ")", "TI", "=", "TaskInstance", "tis", "=", "(", "session", ".", "query", "(", "TI", ")", ".", "filter", "(", "TI", ".", "dag_id", ".", "in_", "(", "DAG_IDS", ")", ")", ".", "all", "(", ")", ")", "for", "ti", "in", "tis", ":", "logging", ".", "info", "(", "'Deleting TaskInstance :: {}'", ".", "format", "(", "ti", ")", ")", "session", ".", "delete", "(", "ti", ")", "session", ".", "commit", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
set_dags_paused_state
Toggle the pause state of the DAGs in the test.
scripts/perf/scheduler_ops_metrics.py
def set_dags_paused_state(is_paused): """ Toggle the pause state of the DAGs in the test. """ session = settings.Session() dms = session.query(DagModel).filter( DagModel.dag_id.in_(DAG_IDS)) for dm in dms: logging.info('Setting DAG :: {} is_paused={}'.format(dm, is_paused)) dm.is_paused = is_paused session.commit()
def set_dags_paused_state(is_paused): """ Toggle the pause state of the DAGs in the test. """ session = settings.Session() dms = session.query(DagModel).filter( DagModel.dag_id.in_(DAG_IDS)) for dm in dms: logging.info('Setting DAG :: {} is_paused={}'.format(dm, is_paused)) dm.is_paused = is_paused session.commit()
[ "Toggle", "the", "pause", "state", "of", "the", "DAGs", "in", "the", "test", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/scripts/perf/scheduler_ops_metrics.py#L169-L179
[ "def", "set_dags_paused_state", "(", "is_paused", ")", ":", "session", "=", "settings", ".", "Session", "(", ")", "dms", "=", "session", ".", "query", "(", "DagModel", ")", ".", "filter", "(", "DagModel", ".", "dag_id", ".", "in_", "(", "DAG_IDS", ")", ")", "for", "dm", "in", "dms", ":", "logging", ".", "info", "(", "'Setting DAG :: {} is_paused={}'", ".", "format", "(", "dm", ",", "is_paused", ")", ")", "dm", ".", "is_paused", "=", "is_paused", "session", ".", "commit", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SchedulerMetricsJob.print_stats
Print operational metrics for the scheduler test.
scripts/perf/scheduler_ops_metrics.py
def print_stats(self): """ Print operational metrics for the scheduler test. """ session = settings.Session() TI = TaskInstance tis = ( session .query(TI) .filter(TI.dag_id.in_(DAG_IDS)) .all() ) successful_tis = [x for x in tis if x.state == State.SUCCESS] ti_perf = [(ti.dag_id, ti.task_id, ti.execution_date, (ti.queued_dttm - self.start_date).total_seconds(), (ti.start_date - self.start_date).total_seconds(), (ti.end_date - self.start_date).total_seconds(), ti.duration) for ti in successful_tis] ti_perf_df = pd.DataFrame(ti_perf, columns=['dag_id', 'task_id', 'execution_date', 'queue_delay', 'start_delay', 'land_time', 'duration']) print('Performance Results') print('###################') for dag_id in DAG_IDS: print('DAG {}'.format(dag_id)) print(ti_perf_df[ti_perf_df['dag_id'] == dag_id]) print('###################') if len(tis) > len(successful_tis): print("WARNING!! The following task instances haven't completed") print(pd.DataFrame([(ti.dag_id, ti.task_id, ti.execution_date, ti.state) for ti in filter(lambda x: x.state != State.SUCCESS, tis)], columns=['dag_id', 'task_id', 'execution_date', 'state'])) session.commit()
def print_stats(self): """ Print operational metrics for the scheduler test. """ session = settings.Session() TI = TaskInstance tis = ( session .query(TI) .filter(TI.dag_id.in_(DAG_IDS)) .all() ) successful_tis = [x for x in tis if x.state == State.SUCCESS] ti_perf = [(ti.dag_id, ti.task_id, ti.execution_date, (ti.queued_dttm - self.start_date).total_seconds(), (ti.start_date - self.start_date).total_seconds(), (ti.end_date - self.start_date).total_seconds(), ti.duration) for ti in successful_tis] ti_perf_df = pd.DataFrame(ti_perf, columns=['dag_id', 'task_id', 'execution_date', 'queue_delay', 'start_delay', 'land_time', 'duration']) print('Performance Results') print('###################') for dag_id in DAG_IDS: print('DAG {}'.format(dag_id)) print(ti_perf_df[ti_perf_df['dag_id'] == dag_id]) print('###################') if len(tis) > len(successful_tis): print("WARNING!! The following task instances haven't completed") print(pd.DataFrame([(ti.dag_id, ti.task_id, ti.execution_date, ti.state) for ti in filter(lambda x: x.state != State.SUCCESS, tis)], columns=['dag_id', 'task_id', 'execution_date', 'state'])) session.commit()
[ "Print", "operational", "metrics", "for", "the", "scheduler", "test", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/scripts/perf/scheduler_ops_metrics.py#L65-L101
[ "def", "print_stats", "(", "self", ")", ":", "session", "=", "settings", ".", "Session", "(", ")", "TI", "=", "TaskInstance", "tis", "=", "(", "session", ".", "query", "(", "TI", ")", ".", "filter", "(", "TI", ".", "dag_id", ".", "in_", "(", "DAG_IDS", ")", ")", ".", "all", "(", ")", ")", "successful_tis", "=", "[", "x", "for", "x", "in", "tis", "if", "x", ".", "state", "==", "State", ".", "SUCCESS", "]", "ti_perf", "=", "[", "(", "ti", ".", "dag_id", ",", "ti", ".", "task_id", ",", "ti", ".", "execution_date", ",", "(", "ti", ".", "queued_dttm", "-", "self", ".", "start_date", ")", ".", "total_seconds", "(", ")", ",", "(", "ti", ".", "start_date", "-", "self", ".", "start_date", ")", ".", "total_seconds", "(", ")", ",", "(", "ti", ".", "end_date", "-", "self", ".", "start_date", ")", ".", "total_seconds", "(", ")", ",", "ti", ".", "duration", ")", "for", "ti", "in", "successful_tis", "]", "ti_perf_df", "=", "pd", ".", "DataFrame", "(", "ti_perf", ",", "columns", "=", "[", "'dag_id'", ",", "'task_id'", ",", "'execution_date'", ",", "'queue_delay'", ",", "'start_delay'", ",", "'land_time'", ",", "'duration'", "]", ")", "print", "(", "'Performance Results'", ")", "print", "(", "'###################'", ")", "for", "dag_id", "in", "DAG_IDS", ":", "print", "(", "'DAG {}'", ".", "format", "(", "dag_id", ")", ")", "print", "(", "ti_perf_df", "[", "ti_perf_df", "[", "'dag_id'", "]", "==", "dag_id", "]", ")", "print", "(", "'###################'", ")", "if", "len", "(", "tis", ")", ">", "len", "(", "successful_tis", ")", ":", "print", "(", "\"WARNING!! The following task instances haven't completed\"", ")", "print", "(", "pd", ".", "DataFrame", "(", "[", "(", "ti", ".", "dag_id", ",", "ti", ".", "task_id", ",", "ti", ".", "execution_date", ",", "ti", ".", "state", ")", "for", "ti", "in", "filter", "(", "lambda", "x", ":", "x", ".", "state", "!=", "State", ".", "SUCCESS", ",", "tis", ")", "]", ",", "columns", "=", "[", "'dag_id'", ",", "'task_id'", ",", "'execution_date'", ",", "'state'", "]", ")", ")", "session", ".", "commit", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SchedulerMetricsJob.heartbeat
Override the scheduler heartbeat to determine when the test is complete
scripts/perf/scheduler_ops_metrics.py
def heartbeat(self): """ Override the scheduler heartbeat to determine when the test is complete """ super(SchedulerMetricsJob, self).heartbeat() session = settings.Session() # Get all the relevant task instances TI = TaskInstance successful_tis = ( session .query(TI) .filter(TI.dag_id.in_(DAG_IDS)) .filter(TI.state.in_([State.SUCCESS])) .all() ) session.commit() dagbag = DagBag(SUBDIR) dags = [dagbag.dags[dag_id] for dag_id in DAG_IDS] # the tasks in perf_dag_1 and per_dag_2 have a daily schedule interval. num_task_instances = sum([(timezone.utcnow() - task.start_date).days for dag in dags for task in dag.tasks]) if (len(successful_tis) == num_task_instances or (timezone.utcnow() - self.start_date).total_seconds() > MAX_RUNTIME_SECS): if len(successful_tis) == num_task_instances: self.log.info("All tasks processed! Printing stats.") else: self.log.info("Test timeout reached. Printing available stats.") self.print_stats() set_dags_paused_state(True) sys.exit()
def heartbeat(self): """ Override the scheduler heartbeat to determine when the test is complete """ super(SchedulerMetricsJob, self).heartbeat() session = settings.Session() # Get all the relevant task instances TI = TaskInstance successful_tis = ( session .query(TI) .filter(TI.dag_id.in_(DAG_IDS)) .filter(TI.state.in_([State.SUCCESS])) .all() ) session.commit() dagbag = DagBag(SUBDIR) dags = [dagbag.dags[dag_id] for dag_id in DAG_IDS] # the tasks in perf_dag_1 and per_dag_2 have a daily schedule interval. num_task_instances = sum([(timezone.utcnow() - task.start_date).days for dag in dags for task in dag.tasks]) if (len(successful_tis) == num_task_instances or (timezone.utcnow() - self.start_date).total_seconds() > MAX_RUNTIME_SECS): if len(successful_tis) == num_task_instances: self.log.info("All tasks processed! Printing stats.") else: self.log.info("Test timeout reached. Printing available stats.") self.print_stats() set_dags_paused_state(True) sys.exit()
[ "Override", "the", "scheduler", "heartbeat", "to", "determine", "when", "the", "test", "is", "complete" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/scripts/perf/scheduler_ops_metrics.py#L103-L135
[ "def", "heartbeat", "(", "self", ")", ":", "super", "(", "SchedulerMetricsJob", ",", "self", ")", ".", "heartbeat", "(", ")", "session", "=", "settings", ".", "Session", "(", ")", "# Get all the relevant task instances", "TI", "=", "TaskInstance", "successful_tis", "=", "(", "session", ".", "query", "(", "TI", ")", ".", "filter", "(", "TI", ".", "dag_id", ".", "in_", "(", "DAG_IDS", ")", ")", ".", "filter", "(", "TI", ".", "state", ".", "in_", "(", "[", "State", ".", "SUCCESS", "]", ")", ")", ".", "all", "(", ")", ")", "session", ".", "commit", "(", ")", "dagbag", "=", "DagBag", "(", "SUBDIR", ")", "dags", "=", "[", "dagbag", ".", "dags", "[", "dag_id", "]", "for", "dag_id", "in", "DAG_IDS", "]", "# the tasks in perf_dag_1 and per_dag_2 have a daily schedule interval.", "num_task_instances", "=", "sum", "(", "[", "(", "timezone", ".", "utcnow", "(", ")", "-", "task", ".", "start_date", ")", ".", "days", "for", "dag", "in", "dags", "for", "task", "in", "dag", ".", "tasks", "]", ")", "if", "(", "len", "(", "successful_tis", ")", "==", "num_task_instances", "or", "(", "timezone", ".", "utcnow", "(", ")", "-", "self", ".", "start_date", ")", ".", "total_seconds", "(", ")", ">", "MAX_RUNTIME_SECS", ")", ":", "if", "len", "(", "successful_tis", ")", "==", "num_task_instances", ":", "self", ".", "log", ".", "info", "(", "\"All tasks processed! Printing stats.\"", ")", "else", ":", "self", ".", "log", ".", "info", "(", "\"Test timeout reached. Printing available stats.\"", ")", "self", ".", "print_stats", "(", ")", "set_dags_paused_state", "(", "True", ")", "sys", ".", "exit", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AwsLambdaHook.invoke_lambda
Invoke Lambda Function
airflow/contrib/hooks/aws_lambda_hook.py
def invoke_lambda(self, payload): """ Invoke Lambda Function """ awslambda_conn = self.get_conn() response = awslambda_conn.invoke( FunctionName=self.function_name, InvocationType=self.invocation_type, LogType=self.log_type, Payload=payload, Qualifier=self.qualifier ) return response
def invoke_lambda(self, payload): """ Invoke Lambda Function """ awslambda_conn = self.get_conn() response = awslambda_conn.invoke( FunctionName=self.function_name, InvocationType=self.invocation_type, LogType=self.log_type, Payload=payload, Qualifier=self.qualifier ) return response
[ "Invoke", "Lambda", "Function" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_lambda_hook.py#L53-L68
[ "def", "invoke_lambda", "(", "self", ",", "payload", ")", ":", "awslambda_conn", "=", "self", ".", "get_conn", "(", ")", "response", "=", "awslambda_conn", ".", "invoke", "(", "FunctionName", "=", "self", ".", "function_name", ",", "InvocationType", "=", "self", ".", "invocation_type", ",", "LogType", "=", "self", ".", "log_type", ",", "Payload", "=", "payload", ",", "Qualifier", "=", "self", ".", "qualifier", ")", "return", "response" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
get_dag_run_state
Return the task object identified by the given dag_id and task_id.
airflow/api/common/experimental/get_dag_run_state.py
def get_dag_run_state(dag_id, execution_date): """Return the task object identified by the given dag_id and task_id.""" dagbag = DagBag() # Check DAG exists. if dag_id not in dagbag.dags: error_message = "Dag id {} not found".format(dag_id) raise DagNotFound(error_message) # Get DAG object and check Task Exists dag = dagbag.get_dag(dag_id) # Get DagRun object and check that it exists dagrun = dag.get_dagrun(execution_date=execution_date) if not dagrun: error_message = ('Dag Run for date {} not found in dag {}' .format(execution_date, dag_id)) raise DagRunNotFound(error_message) return {'state': dagrun.get_state()}
def get_dag_run_state(dag_id, execution_date): """Return the task object identified by the given dag_id and task_id.""" dagbag = DagBag() # Check DAG exists. if dag_id not in dagbag.dags: error_message = "Dag id {} not found".format(dag_id) raise DagNotFound(error_message) # Get DAG object and check Task Exists dag = dagbag.get_dag(dag_id) # Get DagRun object and check that it exists dagrun = dag.get_dagrun(execution_date=execution_date) if not dagrun: error_message = ('Dag Run for date {} not found in dag {}' .format(execution_date, dag_id)) raise DagRunNotFound(error_message) return {'state': dagrun.get_state()}
[ "Return", "the", "task", "object", "identified", "by", "the", "given", "dag_id", "and", "task_id", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/api/common/experimental/get_dag_run_state.py#L24-L44
[ "def", "get_dag_run_state", "(", "dag_id", ",", "execution_date", ")", ":", "dagbag", "=", "DagBag", "(", ")", "# Check DAG exists.", "if", "dag_id", "not", "in", "dagbag", ".", "dags", ":", "error_message", "=", "\"Dag id {} not found\"", ".", "format", "(", "dag_id", ")", "raise", "DagNotFound", "(", "error_message", ")", "# Get DAG object and check Task Exists", "dag", "=", "dagbag", ".", "get_dag", "(", "dag_id", ")", "# Get DagRun object and check that it exists", "dagrun", "=", "dag", ".", "get_dagrun", "(", "execution_date", "=", "execution_date", ")", "if", "not", "dagrun", ":", "error_message", "=", "(", "'Dag Run for date {} not found in dag {}'", ".", "format", "(", "execution_date", ",", "dag_id", ")", ")", "raise", "DagRunNotFound", "(", "error_message", ")", "return", "{", "'state'", ":", "dagrun", ".", "get_state", "(", ")", "}" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
create_evaluate_ops
Creates Operators needed for model evaluation and returns. It gets prediction over inputs via Cloud ML Engine BatchPrediction API by calling MLEngineBatchPredictionOperator, then summarize and validate the result via Cloud Dataflow using DataFlowPythonOperator. For details and pricing about Batch prediction, please refer to the website https://cloud.google.com/ml-engine/docs/how-tos/batch-predict and for Cloud Dataflow, https://cloud.google.com/dataflow/docs/ It returns three chained operators for prediction, summary, and validation, named as <prefix>-prediction, <prefix>-summary, and <prefix>-validation, respectively. (<prefix> should contain only alphanumeric characters or hyphen.) The upstream and downstream can be set accordingly like: pred, _, val = create_evaluate_ops(...) pred.set_upstream(upstream_op) ... downstream_op.set_upstream(val) Callers will provide two python callables, metric_fn and validate_fn, in order to customize the evaluation behavior as they wish. - metric_fn receives a dictionary per instance derived from json in the batch prediction result. The keys might vary depending on the model. It should return a tuple of metrics. - validation_fn receives a dictionary of the averaged metrics that metric_fn generated over all instances. The key/value of the dictionary matches to what's given by metric_fn_and_keys arg. The dictionary contains an additional metric, 'count' to represent the total number of instances received for evaluation. The function would raise an exception to mark the task as failed, in a case the validation result is not okay to proceed (i.e. to set the trained version as default). Typical examples are like this: def get_metric_fn_and_keys(): import math # imports should be outside of the metric_fn below. def error_and_squared_error(inst): label = float(inst['input_label']) classes = float(inst['classes']) # 0 or 1 err = abs(classes-label) squared_err = math.pow(classes-label, 2) return (err, squared_err) # returns a tuple. return error_and_squared_error, ['err', 'mse'] # key order must match. def validate_err_and_count(summary): if summary['err'] > 0.2: raise ValueError('Too high err>0.2; summary=%s' % summary) if summary['mse'] > 0.05: raise ValueError('Too high mse>0.05; summary=%s' % summary) if summary['count'] < 1000: raise ValueError('Too few instances<1000; summary=%s' % summary) return summary For the details on the other BatchPrediction-related arguments (project_id, job_id, region, data_format, input_paths, prediction_path, model_uri), please refer to MLEngineBatchPredictionOperator too. :param task_prefix: a prefix for the tasks. Only alphanumeric characters and hyphen are allowed (no underscores), since this will be used as dataflow job name, which doesn't allow other characters. :type task_prefix: str :param data_format: either of 'TEXT', 'TF_RECORD', 'TF_RECORD_GZIP' :type data_format: str :param input_paths: a list of input paths to be sent to BatchPrediction. :type input_paths: list[str] :param prediction_path: GCS path to put the prediction results in. :type prediction_path: str :param metric_fn_and_keys: a tuple of metric_fn and metric_keys: - metric_fn is a function that accepts a dictionary (for an instance), and returns a tuple of metric(s) that it calculates. - metric_keys is a list of strings to denote the key of each metric. :type metric_fn_and_keys: tuple of a function and a list[str] :param validate_fn: a function to validate whether the averaged metric(s) is good enough to push the model. :type validate_fn: function :param batch_prediction_job_id: the id to use for the Cloud ML Batch prediction job. Passed directly to the MLEngineBatchPredictionOperator as the job_id argument. :type batch_prediction_job_id: str :param project_id: the Google Cloud Platform project id in which to execute Cloud ML Batch Prediction and Dataflow jobs. If None, then the `dag`'s `default_args['project_id']` will be used. :type project_id: str :param region: the Google Cloud Platform region in which to execute Cloud ML Batch Prediction and Dataflow jobs. If None, then the `dag`'s `default_args['region']` will be used. :type region: str :param dataflow_options: options to run Dataflow jobs. If None, then the `dag`'s `default_args['dataflow_default_options']` will be used. :type dataflow_options: dictionary :param model_uri: GCS path of the model exported by Tensorflow using tensorflow.estimator.export_savedmodel(). It cannot be used with model_name or version_name below. See MLEngineBatchPredictionOperator for more detail. :type model_uri: str :param model_name: Used to indicate a model to use for prediction. Can be used in combination with version_name, but cannot be used together with model_uri. See MLEngineBatchPredictionOperator for more detail. If None, then the `dag`'s `default_args['model_name']` will be used. :type model_name: str :param version_name: Used to indicate a model version to use for prediction, in combination with model_name. Cannot be used together with model_uri. See MLEngineBatchPredictionOperator for more detail. If None, then the `dag`'s `default_args['version_name']` will be used. :type version_name: str :param dag: The `DAG` to use for all Operators. :type dag: airflow.models.DAG :returns: a tuple of three operators, (prediction, summary, validation) :rtype: tuple(DataFlowPythonOperator, DataFlowPythonOperator, PythonOperator)
airflow/contrib/utils/mlengine_operator_utils.py
def create_evaluate_ops(task_prefix, data_format, input_paths, prediction_path, metric_fn_and_keys, validate_fn, batch_prediction_job_id=None, project_id=None, region=None, dataflow_options=None, model_uri=None, model_name=None, version_name=None, dag=None): """ Creates Operators needed for model evaluation and returns. It gets prediction over inputs via Cloud ML Engine BatchPrediction API by calling MLEngineBatchPredictionOperator, then summarize and validate the result via Cloud Dataflow using DataFlowPythonOperator. For details and pricing about Batch prediction, please refer to the website https://cloud.google.com/ml-engine/docs/how-tos/batch-predict and for Cloud Dataflow, https://cloud.google.com/dataflow/docs/ It returns three chained operators for prediction, summary, and validation, named as <prefix>-prediction, <prefix>-summary, and <prefix>-validation, respectively. (<prefix> should contain only alphanumeric characters or hyphen.) The upstream and downstream can be set accordingly like: pred, _, val = create_evaluate_ops(...) pred.set_upstream(upstream_op) ... downstream_op.set_upstream(val) Callers will provide two python callables, metric_fn and validate_fn, in order to customize the evaluation behavior as they wish. - metric_fn receives a dictionary per instance derived from json in the batch prediction result. The keys might vary depending on the model. It should return a tuple of metrics. - validation_fn receives a dictionary of the averaged metrics that metric_fn generated over all instances. The key/value of the dictionary matches to what's given by metric_fn_and_keys arg. The dictionary contains an additional metric, 'count' to represent the total number of instances received for evaluation. The function would raise an exception to mark the task as failed, in a case the validation result is not okay to proceed (i.e. to set the trained version as default). Typical examples are like this: def get_metric_fn_and_keys(): import math # imports should be outside of the metric_fn below. def error_and_squared_error(inst): label = float(inst['input_label']) classes = float(inst['classes']) # 0 or 1 err = abs(classes-label) squared_err = math.pow(classes-label, 2) return (err, squared_err) # returns a tuple. return error_and_squared_error, ['err', 'mse'] # key order must match. def validate_err_and_count(summary): if summary['err'] > 0.2: raise ValueError('Too high err>0.2; summary=%s' % summary) if summary['mse'] > 0.05: raise ValueError('Too high mse>0.05; summary=%s' % summary) if summary['count'] < 1000: raise ValueError('Too few instances<1000; summary=%s' % summary) return summary For the details on the other BatchPrediction-related arguments (project_id, job_id, region, data_format, input_paths, prediction_path, model_uri), please refer to MLEngineBatchPredictionOperator too. :param task_prefix: a prefix for the tasks. Only alphanumeric characters and hyphen are allowed (no underscores), since this will be used as dataflow job name, which doesn't allow other characters. :type task_prefix: str :param data_format: either of 'TEXT', 'TF_RECORD', 'TF_RECORD_GZIP' :type data_format: str :param input_paths: a list of input paths to be sent to BatchPrediction. :type input_paths: list[str] :param prediction_path: GCS path to put the prediction results in. :type prediction_path: str :param metric_fn_and_keys: a tuple of metric_fn and metric_keys: - metric_fn is a function that accepts a dictionary (for an instance), and returns a tuple of metric(s) that it calculates. - metric_keys is a list of strings to denote the key of each metric. :type metric_fn_and_keys: tuple of a function and a list[str] :param validate_fn: a function to validate whether the averaged metric(s) is good enough to push the model. :type validate_fn: function :param batch_prediction_job_id: the id to use for the Cloud ML Batch prediction job. Passed directly to the MLEngineBatchPredictionOperator as the job_id argument. :type batch_prediction_job_id: str :param project_id: the Google Cloud Platform project id in which to execute Cloud ML Batch Prediction and Dataflow jobs. If None, then the `dag`'s `default_args['project_id']` will be used. :type project_id: str :param region: the Google Cloud Platform region in which to execute Cloud ML Batch Prediction and Dataflow jobs. If None, then the `dag`'s `default_args['region']` will be used. :type region: str :param dataflow_options: options to run Dataflow jobs. If None, then the `dag`'s `default_args['dataflow_default_options']` will be used. :type dataflow_options: dictionary :param model_uri: GCS path of the model exported by Tensorflow using tensorflow.estimator.export_savedmodel(). It cannot be used with model_name or version_name below. See MLEngineBatchPredictionOperator for more detail. :type model_uri: str :param model_name: Used to indicate a model to use for prediction. Can be used in combination with version_name, but cannot be used together with model_uri. See MLEngineBatchPredictionOperator for more detail. If None, then the `dag`'s `default_args['model_name']` will be used. :type model_name: str :param version_name: Used to indicate a model version to use for prediction, in combination with model_name. Cannot be used together with model_uri. See MLEngineBatchPredictionOperator for more detail. If None, then the `dag`'s `default_args['version_name']` will be used. :type version_name: str :param dag: The `DAG` to use for all Operators. :type dag: airflow.models.DAG :returns: a tuple of three operators, (prediction, summary, validation) :rtype: tuple(DataFlowPythonOperator, DataFlowPythonOperator, PythonOperator) """ # Verify that task_prefix doesn't have any special characters except hyphen # '-', which is the only allowed non-alphanumeric character by Dataflow. if not re.match(r"^[a-zA-Z][-A-Za-z0-9]*$", task_prefix): raise AirflowException( "Malformed task_id for DataFlowPythonOperator (only alphanumeric " "and hyphens are allowed but got: " + task_prefix) metric_fn, metric_keys = metric_fn_and_keys if not callable(metric_fn): raise AirflowException("`metric_fn` param must be callable.") if not callable(validate_fn): raise AirflowException("`validate_fn` param must be callable.") if dag is not None and dag.default_args is not None: default_args = dag.default_args project_id = project_id or default_args.get('project_id') region = region or default_args.get('region') model_name = model_name or default_args.get('model_name') version_name = version_name or default_args.get('version_name') dataflow_options = dataflow_options or \ default_args.get('dataflow_default_options') evaluate_prediction = MLEngineBatchPredictionOperator( task_id=(task_prefix + "-prediction"), project_id=project_id, job_id=batch_prediction_job_id, region=region, data_format=data_format, input_paths=input_paths, output_path=prediction_path, uri=model_uri, model_name=model_name, version_name=version_name, dag=dag) metric_fn_encoded = base64.b64encode(dill.dumps(metric_fn, recurse=True)) evaluate_summary = DataFlowPythonOperator( task_id=(task_prefix + "-summary"), py_options=["-m"], py_file="airflow.contrib.utils.mlengine_prediction_summary", dataflow_default_options=dataflow_options, options={ "prediction_path": prediction_path, "metric_fn_encoded": metric_fn_encoded, "metric_keys": ','.join(metric_keys) }, dag=dag) evaluate_summary.set_upstream(evaluate_prediction) def apply_validate_fn(*args, **kwargs): prediction_path = kwargs["templates_dict"]["prediction_path"] scheme, bucket, obj, _, _ = urlsplit(prediction_path) if scheme != "gs" or not bucket or not obj: raise ValueError("Wrong format prediction_path: %s", prediction_path) summary = os.path.join(obj.strip("/"), "prediction.summary.json") gcs_hook = GoogleCloudStorageHook() summary = json.loads(gcs_hook.download(bucket, summary)) return validate_fn(summary) evaluate_validation = PythonOperator( task_id=(task_prefix + "-validation"), python_callable=apply_validate_fn, provide_context=True, templates_dict={"prediction_path": prediction_path}, dag=dag) evaluate_validation.set_upstream(evaluate_summary) return evaluate_prediction, evaluate_summary, evaluate_validation
def create_evaluate_ops(task_prefix, data_format, input_paths, prediction_path, metric_fn_and_keys, validate_fn, batch_prediction_job_id=None, project_id=None, region=None, dataflow_options=None, model_uri=None, model_name=None, version_name=None, dag=None): """ Creates Operators needed for model evaluation and returns. It gets prediction over inputs via Cloud ML Engine BatchPrediction API by calling MLEngineBatchPredictionOperator, then summarize and validate the result via Cloud Dataflow using DataFlowPythonOperator. For details and pricing about Batch prediction, please refer to the website https://cloud.google.com/ml-engine/docs/how-tos/batch-predict and for Cloud Dataflow, https://cloud.google.com/dataflow/docs/ It returns three chained operators for prediction, summary, and validation, named as <prefix>-prediction, <prefix>-summary, and <prefix>-validation, respectively. (<prefix> should contain only alphanumeric characters or hyphen.) The upstream and downstream can be set accordingly like: pred, _, val = create_evaluate_ops(...) pred.set_upstream(upstream_op) ... downstream_op.set_upstream(val) Callers will provide two python callables, metric_fn and validate_fn, in order to customize the evaluation behavior as they wish. - metric_fn receives a dictionary per instance derived from json in the batch prediction result. The keys might vary depending on the model. It should return a tuple of metrics. - validation_fn receives a dictionary of the averaged metrics that metric_fn generated over all instances. The key/value of the dictionary matches to what's given by metric_fn_and_keys arg. The dictionary contains an additional metric, 'count' to represent the total number of instances received for evaluation. The function would raise an exception to mark the task as failed, in a case the validation result is not okay to proceed (i.e. to set the trained version as default). Typical examples are like this: def get_metric_fn_and_keys(): import math # imports should be outside of the metric_fn below. def error_and_squared_error(inst): label = float(inst['input_label']) classes = float(inst['classes']) # 0 or 1 err = abs(classes-label) squared_err = math.pow(classes-label, 2) return (err, squared_err) # returns a tuple. return error_and_squared_error, ['err', 'mse'] # key order must match. def validate_err_and_count(summary): if summary['err'] > 0.2: raise ValueError('Too high err>0.2; summary=%s' % summary) if summary['mse'] > 0.05: raise ValueError('Too high mse>0.05; summary=%s' % summary) if summary['count'] < 1000: raise ValueError('Too few instances<1000; summary=%s' % summary) return summary For the details on the other BatchPrediction-related arguments (project_id, job_id, region, data_format, input_paths, prediction_path, model_uri), please refer to MLEngineBatchPredictionOperator too. :param task_prefix: a prefix for the tasks. Only alphanumeric characters and hyphen are allowed (no underscores), since this will be used as dataflow job name, which doesn't allow other characters. :type task_prefix: str :param data_format: either of 'TEXT', 'TF_RECORD', 'TF_RECORD_GZIP' :type data_format: str :param input_paths: a list of input paths to be sent to BatchPrediction. :type input_paths: list[str] :param prediction_path: GCS path to put the prediction results in. :type prediction_path: str :param metric_fn_and_keys: a tuple of metric_fn and metric_keys: - metric_fn is a function that accepts a dictionary (for an instance), and returns a tuple of metric(s) that it calculates. - metric_keys is a list of strings to denote the key of each metric. :type metric_fn_and_keys: tuple of a function and a list[str] :param validate_fn: a function to validate whether the averaged metric(s) is good enough to push the model. :type validate_fn: function :param batch_prediction_job_id: the id to use for the Cloud ML Batch prediction job. Passed directly to the MLEngineBatchPredictionOperator as the job_id argument. :type batch_prediction_job_id: str :param project_id: the Google Cloud Platform project id in which to execute Cloud ML Batch Prediction and Dataflow jobs. If None, then the `dag`'s `default_args['project_id']` will be used. :type project_id: str :param region: the Google Cloud Platform region in which to execute Cloud ML Batch Prediction and Dataflow jobs. If None, then the `dag`'s `default_args['region']` will be used. :type region: str :param dataflow_options: options to run Dataflow jobs. If None, then the `dag`'s `default_args['dataflow_default_options']` will be used. :type dataflow_options: dictionary :param model_uri: GCS path of the model exported by Tensorflow using tensorflow.estimator.export_savedmodel(). It cannot be used with model_name or version_name below. See MLEngineBatchPredictionOperator for more detail. :type model_uri: str :param model_name: Used to indicate a model to use for prediction. Can be used in combination with version_name, but cannot be used together with model_uri. See MLEngineBatchPredictionOperator for more detail. If None, then the `dag`'s `default_args['model_name']` will be used. :type model_name: str :param version_name: Used to indicate a model version to use for prediction, in combination with model_name. Cannot be used together with model_uri. See MLEngineBatchPredictionOperator for more detail. If None, then the `dag`'s `default_args['version_name']` will be used. :type version_name: str :param dag: The `DAG` to use for all Operators. :type dag: airflow.models.DAG :returns: a tuple of three operators, (prediction, summary, validation) :rtype: tuple(DataFlowPythonOperator, DataFlowPythonOperator, PythonOperator) """ # Verify that task_prefix doesn't have any special characters except hyphen # '-', which is the only allowed non-alphanumeric character by Dataflow. if not re.match(r"^[a-zA-Z][-A-Za-z0-9]*$", task_prefix): raise AirflowException( "Malformed task_id for DataFlowPythonOperator (only alphanumeric " "and hyphens are allowed but got: " + task_prefix) metric_fn, metric_keys = metric_fn_and_keys if not callable(metric_fn): raise AirflowException("`metric_fn` param must be callable.") if not callable(validate_fn): raise AirflowException("`validate_fn` param must be callable.") if dag is not None and dag.default_args is not None: default_args = dag.default_args project_id = project_id or default_args.get('project_id') region = region or default_args.get('region') model_name = model_name or default_args.get('model_name') version_name = version_name or default_args.get('version_name') dataflow_options = dataflow_options or \ default_args.get('dataflow_default_options') evaluate_prediction = MLEngineBatchPredictionOperator( task_id=(task_prefix + "-prediction"), project_id=project_id, job_id=batch_prediction_job_id, region=region, data_format=data_format, input_paths=input_paths, output_path=prediction_path, uri=model_uri, model_name=model_name, version_name=version_name, dag=dag) metric_fn_encoded = base64.b64encode(dill.dumps(metric_fn, recurse=True)) evaluate_summary = DataFlowPythonOperator( task_id=(task_prefix + "-summary"), py_options=["-m"], py_file="airflow.contrib.utils.mlengine_prediction_summary", dataflow_default_options=dataflow_options, options={ "prediction_path": prediction_path, "metric_fn_encoded": metric_fn_encoded, "metric_keys": ','.join(metric_keys) }, dag=dag) evaluate_summary.set_upstream(evaluate_prediction) def apply_validate_fn(*args, **kwargs): prediction_path = kwargs["templates_dict"]["prediction_path"] scheme, bucket, obj, _, _ = urlsplit(prediction_path) if scheme != "gs" or not bucket or not obj: raise ValueError("Wrong format prediction_path: %s", prediction_path) summary = os.path.join(obj.strip("/"), "prediction.summary.json") gcs_hook = GoogleCloudStorageHook() summary = json.loads(gcs_hook.download(bucket, summary)) return validate_fn(summary) evaluate_validation = PythonOperator( task_id=(task_prefix + "-validation"), python_callable=apply_validate_fn, provide_context=True, templates_dict={"prediction_path": prediction_path}, dag=dag) evaluate_validation.set_upstream(evaluate_summary) return evaluate_prediction, evaluate_summary, evaluate_validation
[ "Creates", "Operators", "needed", "for", "model", "evaluation", "and", "returns", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/utils/mlengine_operator_utils.py#L32-L246
[ "def", "create_evaluate_ops", "(", "task_prefix", ",", "data_format", ",", "input_paths", ",", "prediction_path", ",", "metric_fn_and_keys", ",", "validate_fn", ",", "batch_prediction_job_id", "=", "None", ",", "project_id", "=", "None", ",", "region", "=", "None", ",", "dataflow_options", "=", "None", ",", "model_uri", "=", "None", ",", "model_name", "=", "None", ",", "version_name", "=", "None", ",", "dag", "=", "None", ")", ":", "# Verify that task_prefix doesn't have any special characters except hyphen", "# '-', which is the only allowed non-alphanumeric character by Dataflow.", "if", "not", "re", ".", "match", "(", "r\"^[a-zA-Z][-A-Za-z0-9]*$\"", ",", "task_prefix", ")", ":", "raise", "AirflowException", "(", "\"Malformed task_id for DataFlowPythonOperator (only alphanumeric \"", "\"and hyphens are allowed but got: \"", "+", "task_prefix", ")", "metric_fn", ",", "metric_keys", "=", "metric_fn_and_keys", "if", "not", "callable", "(", "metric_fn", ")", ":", "raise", "AirflowException", "(", "\"`metric_fn` param must be callable.\"", ")", "if", "not", "callable", "(", "validate_fn", ")", ":", "raise", "AirflowException", "(", "\"`validate_fn` param must be callable.\"", ")", "if", "dag", "is", "not", "None", "and", "dag", ".", "default_args", "is", "not", "None", ":", "default_args", "=", "dag", ".", "default_args", "project_id", "=", "project_id", "or", "default_args", ".", "get", "(", "'project_id'", ")", "region", "=", "region", "or", "default_args", ".", "get", "(", "'region'", ")", "model_name", "=", "model_name", "or", "default_args", ".", "get", "(", "'model_name'", ")", "version_name", "=", "version_name", "or", "default_args", ".", "get", "(", "'version_name'", ")", "dataflow_options", "=", "dataflow_options", "or", "default_args", ".", "get", "(", "'dataflow_default_options'", ")", "evaluate_prediction", "=", "MLEngineBatchPredictionOperator", "(", "task_id", "=", "(", "task_prefix", "+", "\"-prediction\"", ")", ",", "project_id", "=", "project_id", ",", "job_id", "=", "batch_prediction_job_id", ",", "region", "=", "region", ",", "data_format", "=", "data_format", ",", "input_paths", "=", "input_paths", ",", "output_path", "=", "prediction_path", ",", "uri", "=", "model_uri", ",", "model_name", "=", "model_name", ",", "version_name", "=", "version_name", ",", "dag", "=", "dag", ")", "metric_fn_encoded", "=", "base64", ".", "b64encode", "(", "dill", ".", "dumps", "(", "metric_fn", ",", "recurse", "=", "True", ")", ")", "evaluate_summary", "=", "DataFlowPythonOperator", "(", "task_id", "=", "(", "task_prefix", "+", "\"-summary\"", ")", ",", "py_options", "=", "[", "\"-m\"", "]", ",", "py_file", "=", "\"airflow.contrib.utils.mlengine_prediction_summary\"", ",", "dataflow_default_options", "=", "dataflow_options", ",", "options", "=", "{", "\"prediction_path\"", ":", "prediction_path", ",", "\"metric_fn_encoded\"", ":", "metric_fn_encoded", ",", "\"metric_keys\"", ":", "','", ".", "join", "(", "metric_keys", ")", "}", ",", "dag", "=", "dag", ")", "evaluate_summary", ".", "set_upstream", "(", "evaluate_prediction", ")", "def", "apply_validate_fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "prediction_path", "=", "kwargs", "[", "\"templates_dict\"", "]", "[", "\"prediction_path\"", "]", "scheme", ",", "bucket", ",", "obj", ",", "_", ",", "_", "=", "urlsplit", "(", "prediction_path", ")", "if", "scheme", "!=", "\"gs\"", "or", "not", "bucket", "or", "not", "obj", ":", "raise", "ValueError", "(", "\"Wrong format prediction_path: %s\"", ",", "prediction_path", ")", "summary", "=", "os", ".", "path", ".", "join", "(", "obj", ".", "strip", "(", "\"/\"", ")", ",", "\"prediction.summary.json\"", ")", "gcs_hook", "=", "GoogleCloudStorageHook", "(", ")", "summary", "=", "json", ".", "loads", "(", "gcs_hook", ".", "download", "(", "bucket", ",", "summary", ")", ")", "return", "validate_fn", "(", "summary", ")", "evaluate_validation", "=", "PythonOperator", "(", "task_id", "=", "(", "task_prefix", "+", "\"-validation\"", ")", ",", "python_callable", "=", "apply_validate_fn", ",", "provide_context", "=", "True", ",", "templates_dict", "=", "{", "\"prediction_path\"", ":", "prediction_path", "}", ",", "dag", "=", "dag", ")", "evaluate_validation", ".", "set_upstream", "(", "evaluate_summary", ")", "return", "evaluate_prediction", ",", "evaluate_summary", ",", "evaluate_validation" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
mkdirs
Creates the directory specified by path, creating intermediate directories as necessary. If directory already exists, this is a no-op. :param path: The directory to create :type path: str :param mode: The mode to give to the directory e.g. 0o755, ignores umask :type mode: int
airflow/utils/file.py
def mkdirs(path, mode): """ Creates the directory specified by path, creating intermediate directories as necessary. If directory already exists, this is a no-op. :param path: The directory to create :type path: str :param mode: The mode to give to the directory e.g. 0o755, ignores umask :type mode: int """ try: o_umask = os.umask(0) os.makedirs(path, mode) except OSError: if not os.path.isdir(path): raise finally: os.umask(o_umask)
def mkdirs(path, mode): """ Creates the directory specified by path, creating intermediate directories as necessary. If directory already exists, this is a no-op. :param path: The directory to create :type path: str :param mode: The mode to give to the directory e.g. 0o755, ignores umask :type mode: int """ try: o_umask = os.umask(0) os.makedirs(path, mode) except OSError: if not os.path.isdir(path): raise finally: os.umask(o_umask)
[ "Creates", "the", "directory", "specified", "by", "path", "creating", "intermediate", "directories", "as", "necessary", ".", "If", "directory", "already", "exists", "this", "is", "a", "no", "-", "op", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/file.py#L42-L59
[ "def", "mkdirs", "(", "path", ",", "mode", ")", ":", "try", ":", "o_umask", "=", "os", ".", "umask", "(", "0", ")", "os", ".", "makedirs", "(", "path", ",", "mode", ")", "except", "OSError", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "raise", "finally", ":", "os", ".", "umask", "(", "o_umask", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
_convert_to_float_if_possible
A small helper function to convert a string to a numeric value if appropriate :param s: the string to be converted :type s: str
airflow/operators/check_operator.py
def _convert_to_float_if_possible(s): """ A small helper function to convert a string to a numeric value if appropriate :param s: the string to be converted :type s: str """ try: ret = float(s) except (ValueError, TypeError): ret = s return ret
def _convert_to_float_if_possible(s): """ A small helper function to convert a string to a numeric value if appropriate :param s: the string to be converted :type s: str """ try: ret = float(s) except (ValueError, TypeError): ret = s return ret
[ "A", "small", "helper", "function", "to", "convert", "a", "string", "to", "a", "numeric", "value", "if", "appropriate" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/operators/check_operator.py#L98-L110
[ "def", "_convert_to_float_if_possible", "(", "s", ")", ":", "try", ":", "ret", "=", "float", "(", "s", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "ret", "=", "s", "return", "ret" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
utcnow
Get the current date and time in UTC :return:
airflow/utils/timezone.py
def utcnow(): """ Get the current date and time in UTC :return: """ # pendulum utcnow() is not used as that sets a TimezoneInfo object # instead of a Timezone. This is not pickable and also creates issues # when using replace() d = dt.datetime.utcnow() d = d.replace(tzinfo=utc) return d
def utcnow(): """ Get the current date and time in UTC :return: """ # pendulum utcnow() is not used as that sets a TimezoneInfo object # instead of a Timezone. This is not pickable and also creates issues # when using replace() d = dt.datetime.utcnow() d = d.replace(tzinfo=utc) return d
[ "Get", "the", "current", "date", "and", "time", "in", "UTC", ":", "return", ":" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/timezone.py#L52-L64
[ "def", "utcnow", "(", ")", ":", "# pendulum utcnow() is not used as that sets a TimezoneInfo object", "# instead of a Timezone. This is not pickable and also creates issues", "# when using replace()", "d", "=", "dt", ".", "datetime", ".", "utcnow", "(", ")", "d", "=", "d", ".", "replace", "(", "tzinfo", "=", "utc", ")", "return", "d" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
utc_epoch
Gets the epoch in the users timezone :return:
airflow/utils/timezone.py
def utc_epoch(): """ Gets the epoch in the users timezone :return: """ # pendulum utcnow() is not used as that sets a TimezoneInfo object # instead of a Timezone. This is not pickable and also creates issues # when using replace() d = dt.datetime(1970, 1, 1) d = d.replace(tzinfo=utc) return d
def utc_epoch(): """ Gets the epoch in the users timezone :return: """ # pendulum utcnow() is not used as that sets a TimezoneInfo object # instead of a Timezone. This is not pickable and also creates issues # when using replace() d = dt.datetime(1970, 1, 1) d = d.replace(tzinfo=utc) return d
[ "Gets", "the", "epoch", "in", "the", "users", "timezone", ":", "return", ":" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/timezone.py#L67-L79
[ "def", "utc_epoch", "(", ")", ":", "# pendulum utcnow() is not used as that sets a TimezoneInfo object", "# instead of a Timezone. This is not pickable and also creates issues", "# when using replace()", "d", "=", "dt", ".", "datetime", "(", "1970", ",", "1", ",", "1", ")", "d", "=", "d", ".", "replace", "(", "tzinfo", "=", "utc", ")", "return", "d" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
convert_to_utc
Returns the datetime with the default timezone added if timezone information was not associated :param value: datetime :return: datetime with tzinfo
airflow/utils/timezone.py
def convert_to_utc(value): """ Returns the datetime with the default timezone added if timezone information was not associated :param value: datetime :return: datetime with tzinfo """ if not value: return value if not is_localized(value): value = pendulum.instance(value, TIMEZONE) return value.astimezone(utc)
def convert_to_utc(value): """ Returns the datetime with the default timezone added if timezone information was not associated :param value: datetime :return: datetime with tzinfo """ if not value: return value if not is_localized(value): value = pendulum.instance(value, TIMEZONE) return value.astimezone(utc)
[ "Returns", "the", "datetime", "with", "the", "default", "timezone", "added", "if", "timezone", "information", "was", "not", "associated", ":", "param", "value", ":", "datetime", ":", "return", ":", "datetime", "with", "tzinfo" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/timezone.py#L82-L95
[ "def", "convert_to_utc", "(", "value", ")", ":", "if", "not", "value", ":", "return", "value", "if", "not", "is_localized", "(", "value", ")", ":", "value", "=", "pendulum", ".", "instance", "(", "value", ",", "TIMEZONE", ")", "return", "value", ".", "astimezone", "(", "utc", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
make_aware
Make a naive datetime.datetime in a given time zone aware. :param value: datetime :param timezone: timezone :return: localized datetime in settings.TIMEZONE or timezone
airflow/utils/timezone.py
def make_aware(value, timezone=None): """ Make a naive datetime.datetime in a given time zone aware. :param value: datetime :param timezone: timezone :return: localized datetime in settings.TIMEZONE or timezone """ if timezone is None: timezone = TIMEZONE # Check that we won't overwrite the timezone of an aware datetime. if is_localized(value): raise ValueError( "make_aware expects a naive datetime, got %s" % value) if hasattr(value, 'fold'): # In case of python 3.6 we want to do the same that pendulum does for python3.5 # i.e in case we move clock back we want to schedule the run at the time of the second # instance of the same clock time rather than the first one. # Fold parameter has no impact in other cases so we can safely set it to 1 here value = value.replace(fold=1) if hasattr(timezone, 'localize'): # This method is available for pytz time zones. return timezone.localize(value) elif hasattr(timezone, 'convert'): # For pendulum return timezone.convert(value) else: # This may be wrong around DST changes! return value.replace(tzinfo=timezone)
def make_aware(value, timezone=None): """ Make a naive datetime.datetime in a given time zone aware. :param value: datetime :param timezone: timezone :return: localized datetime in settings.TIMEZONE or timezone """ if timezone is None: timezone = TIMEZONE # Check that we won't overwrite the timezone of an aware datetime. if is_localized(value): raise ValueError( "make_aware expects a naive datetime, got %s" % value) if hasattr(value, 'fold'): # In case of python 3.6 we want to do the same that pendulum does for python3.5 # i.e in case we move clock back we want to schedule the run at the time of the second # instance of the same clock time rather than the first one. # Fold parameter has no impact in other cases so we can safely set it to 1 here value = value.replace(fold=1) if hasattr(timezone, 'localize'): # This method is available for pytz time zones. return timezone.localize(value) elif hasattr(timezone, 'convert'): # For pendulum return timezone.convert(value) else: # This may be wrong around DST changes! return value.replace(tzinfo=timezone)
[ "Make", "a", "naive", "datetime", ".", "datetime", "in", "a", "given", "time", "zone", "aware", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/timezone.py#L98-L128
[ "def", "make_aware", "(", "value", ",", "timezone", "=", "None", ")", ":", "if", "timezone", "is", "None", ":", "timezone", "=", "TIMEZONE", "# Check that we won't overwrite the timezone of an aware datetime.", "if", "is_localized", "(", "value", ")", ":", "raise", "ValueError", "(", "\"make_aware expects a naive datetime, got %s\"", "%", "value", ")", "if", "hasattr", "(", "value", ",", "'fold'", ")", ":", "# In case of python 3.6 we want to do the same that pendulum does for python3.5", "# i.e in case we move clock back we want to schedule the run at the time of the second", "# instance of the same clock time rather than the first one.", "# Fold parameter has no impact in other cases so we can safely set it to 1 here", "value", "=", "value", ".", "replace", "(", "fold", "=", "1", ")", "if", "hasattr", "(", "timezone", ",", "'localize'", ")", ":", "# This method is available for pytz time zones.", "return", "timezone", ".", "localize", "(", "value", ")", "elif", "hasattr", "(", "timezone", ",", "'convert'", ")", ":", "# For pendulum", "return", "timezone", ".", "convert", "(", "value", ")", "else", ":", "# This may be wrong around DST changes!", "return", "value", ".", "replace", "(", "tzinfo", "=", "timezone", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
make_naive
Make an aware datetime.datetime naive in a given time zone. :param value: datetime :param timezone: timezone :return: naive datetime
airflow/utils/timezone.py
def make_naive(value, timezone=None): """ Make an aware datetime.datetime naive in a given time zone. :param value: datetime :param timezone: timezone :return: naive datetime """ if timezone is None: timezone = TIMEZONE # Emulate the behavior of astimezone() on Python < 3.6. if is_naive(value): raise ValueError("make_naive() cannot be applied to a naive datetime") o = value.astimezone(timezone) # cross library compatibility naive = dt.datetime(o.year, o.month, o.day, o.hour, o.minute, o.second, o.microsecond) return naive
def make_naive(value, timezone=None): """ Make an aware datetime.datetime naive in a given time zone. :param value: datetime :param timezone: timezone :return: naive datetime """ if timezone is None: timezone = TIMEZONE # Emulate the behavior of astimezone() on Python < 3.6. if is_naive(value): raise ValueError("make_naive() cannot be applied to a naive datetime") o = value.astimezone(timezone) # cross library compatibility naive = dt.datetime(o.year, o.month, o.day, o.hour, o.minute, o.second, o.microsecond) return naive
[ "Make", "an", "aware", "datetime", ".", "datetime", "naive", "in", "a", "given", "time", "zone", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/timezone.py#L131-L157
[ "def", "make_naive", "(", "value", ",", "timezone", "=", "None", ")", ":", "if", "timezone", "is", "None", ":", "timezone", "=", "TIMEZONE", "# Emulate the behavior of astimezone() on Python < 3.6.", "if", "is_naive", "(", "value", ")", ":", "raise", "ValueError", "(", "\"make_naive() cannot be applied to a naive datetime\"", ")", "o", "=", "value", ".", "astimezone", "(", "timezone", ")", "# cross library compatibility", "naive", "=", "dt", ".", "datetime", "(", "o", ".", "year", ",", "o", ".", "month", ",", "o", ".", "day", ",", "o", ".", "hour", ",", "o", ".", "minute", ",", "o", ".", "second", ",", "o", ".", "microsecond", ")", "return", "naive" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
datetime
Wrapper around datetime.datetime that adds settings.TIMEZONE if tzinfo not specified :return: datetime.datetime
airflow/utils/timezone.py
def datetime(*args, **kwargs): """ Wrapper around datetime.datetime that adds settings.TIMEZONE if tzinfo not specified :return: datetime.datetime """ if 'tzinfo' not in kwargs: kwargs['tzinfo'] = TIMEZONE return dt.datetime(*args, **kwargs)
def datetime(*args, **kwargs): """ Wrapper around datetime.datetime that adds settings.TIMEZONE if tzinfo not specified :return: datetime.datetime """ if 'tzinfo' not in kwargs: kwargs['tzinfo'] = TIMEZONE return dt.datetime(*args, **kwargs)
[ "Wrapper", "around", "datetime", ".", "datetime", "that", "adds", "settings", ".", "TIMEZONE", "if", "tzinfo", "not", "specified" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/timezone.py#L160-L169
[ "def", "datetime", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "'tzinfo'", "not", "in", "kwargs", ":", "kwargs", "[", "'tzinfo'", "]", "=", "TIMEZONE", "return", "dt", ".", "datetime", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GKEPodOperator._set_env_from_extras
Sets the environment variable `GOOGLE_APPLICATION_CREDENTIALS` with either: - The path to the keyfile from the specified connection id - A generated file's path if the user specified JSON in the connection id. The file is assumed to be deleted after the process dies due to how mkstemp() works. The environment variable is used inside the gcloud command to determine correct service account to use.
airflow/contrib/operators/gcp_container_operator.py
def _set_env_from_extras(self, extras): """ Sets the environment variable `GOOGLE_APPLICATION_CREDENTIALS` with either: - The path to the keyfile from the specified connection id - A generated file's path if the user specified JSON in the connection id. The file is assumed to be deleted after the process dies due to how mkstemp() works. The environment variable is used inside the gcloud command to determine correct service account to use. """ key_path = self._get_field(extras, 'key_path', False) keyfile_json_str = self._get_field(extras, 'keyfile_dict', False) if not key_path and not keyfile_json_str: self.log.info('Using gcloud with application default credentials.') elif key_path: os.environ[G_APP_CRED] = key_path else: # Write service account JSON to secure file for gcloud to reference service_key = tempfile.NamedTemporaryFile(delete=False) service_key.write(keyfile_json_str) os.environ[G_APP_CRED] = service_key.name # Return file object to have a pointer to close after use, # thus deleting from file system. return service_key
def _set_env_from_extras(self, extras): """ Sets the environment variable `GOOGLE_APPLICATION_CREDENTIALS` with either: - The path to the keyfile from the specified connection id - A generated file's path if the user specified JSON in the connection id. The file is assumed to be deleted after the process dies due to how mkstemp() works. The environment variable is used inside the gcloud command to determine correct service account to use. """ key_path = self._get_field(extras, 'key_path', False) keyfile_json_str = self._get_field(extras, 'keyfile_dict', False) if not key_path and not keyfile_json_str: self.log.info('Using gcloud with application default credentials.') elif key_path: os.environ[G_APP_CRED] = key_path else: # Write service account JSON to secure file for gcloud to reference service_key = tempfile.NamedTemporaryFile(delete=False) service_key.write(keyfile_json_str) os.environ[G_APP_CRED] = service_key.name # Return file object to have a pointer to close after use, # thus deleting from file system. return service_key
[ "Sets", "the", "environment", "variable", "GOOGLE_APPLICATION_CREDENTIALS", "with", "either", ":" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/gcp_container_operator.py#L282-L308
[ "def", "_set_env_from_extras", "(", "self", ",", "extras", ")", ":", "key_path", "=", "self", ".", "_get_field", "(", "extras", ",", "'key_path'", ",", "False", ")", "keyfile_json_str", "=", "self", ".", "_get_field", "(", "extras", ",", "'keyfile_dict'", ",", "False", ")", "if", "not", "key_path", "and", "not", "keyfile_json_str", ":", "self", ".", "log", ".", "info", "(", "'Using gcloud with application default credentials.'", ")", "elif", "key_path", ":", "os", ".", "environ", "[", "G_APP_CRED", "]", "=", "key_path", "else", ":", "# Write service account JSON to secure file for gcloud to reference", "service_key", "=", "tempfile", ".", "NamedTemporaryFile", "(", "delete", "=", "False", ")", "service_key", ".", "write", "(", "keyfile_json_str", ")", "os", ".", "environ", "[", "G_APP_CRED", "]", "=", "service_key", ".", "name", "# Return file object to have a pointer to close after use,", "# thus deleting from file system.", "return", "service_key" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GKEPodOperator._get_field
Fetches a field from extras, and returns it. This is some Airflow magic. The google_cloud_platform hook type adds custom UI elements to the hook page, which allow admins to specify service_account, key_path, etc. They get formatted as shown below.
airflow/contrib/operators/gcp_container_operator.py
def _get_field(self, extras, field, default=None): """ Fetches a field from extras, and returns it. This is some Airflow magic. The google_cloud_platform hook type adds custom UI elements to the hook page, which allow admins to specify service_account, key_path, etc. They get formatted as shown below. """ long_f = 'extra__google_cloud_platform__{}'.format(field) if long_f in extras: return extras[long_f] else: self.log.info('Field %s not found in extras.', field) return default
def _get_field(self, extras, field, default=None): """ Fetches a field from extras, and returns it. This is some Airflow magic. The google_cloud_platform hook type adds custom UI elements to the hook page, which allow admins to specify service_account, key_path, etc. They get formatted as shown below. """ long_f = 'extra__google_cloud_platform__{}'.format(field) if long_f in extras: return extras[long_f] else: self.log.info('Field %s not found in extras.', field) return default
[ "Fetches", "a", "field", "from", "extras", "and", "returns", "it", ".", "This", "is", "some", "Airflow", "magic", ".", "The", "google_cloud_platform", "hook", "type", "adds", "custom", "UI", "elements", "to", "the", "hook", "page", "which", "allow", "admins", "to", "specify", "service_account", "key_path", "etc", ".", "They", "get", "formatted", "as", "shown", "below", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/gcp_container_operator.py#L310-L322
[ "def", "_get_field", "(", "self", ",", "extras", ",", "field", ",", "default", "=", "None", ")", ":", "long_f", "=", "'extra__google_cloud_platform__{}'", ".", "format", "(", "field", ")", "if", "long_f", "in", "extras", ":", "return", "extras", "[", "long_f", "]", "else", ":", "self", ".", "log", ".", "info", "(", "'Field %s not found in extras.'", ",", "field", ")", "return", "default" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DruidDbApiHook.get_conn
Establish a connection to druid broker.
airflow/hooks/druid_hook.py
def get_conn(self): """ Establish a connection to druid broker. """ conn = self.get_connection(self.druid_broker_conn_id) druid_broker_conn = connect( host=conn.host, port=conn.port, path=conn.extra_dejson.get('endpoint', '/druid/v2/sql'), scheme=conn.extra_dejson.get('schema', 'http') ) self.log.info('Get the connection to druid broker on %s', conn.host) return druid_broker_conn
def get_conn(self): """ Establish a connection to druid broker. """ conn = self.get_connection(self.druid_broker_conn_id) druid_broker_conn = connect( host=conn.host, port=conn.port, path=conn.extra_dejson.get('endpoint', '/druid/v2/sql'), scheme=conn.extra_dejson.get('schema', 'http') ) self.log.info('Get the connection to druid broker on %s', conn.host) return druid_broker_conn
[ "Establish", "a", "connection", "to", "druid", "broker", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/druid_hook.py#L127-L139
[ "def", "get_conn", "(", "self", ")", ":", "conn", "=", "self", ".", "get_connection", "(", "self", ".", "druid_broker_conn_id", ")", "druid_broker_conn", "=", "connect", "(", "host", "=", "conn", ".", "host", ",", "port", "=", "conn", ".", "port", ",", "path", "=", "conn", ".", "extra_dejson", ".", "get", "(", "'endpoint'", ",", "'/druid/v2/sql'", ")", ",", "scheme", "=", "conn", ".", "extra_dejson", ".", "get", "(", "'schema'", ",", "'http'", ")", ")", "self", ".", "log", ".", "info", "(", "'Get the connection to druid broker on %s'", ",", "conn", ".", "host", ")", "return", "druid_broker_conn" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
HttpHook.get_conn
Returns http session for use with requests :param headers: additional headers to be passed through as a dictionary :type headers: dict
airflow/hooks/http_hook.py
def get_conn(self, headers=None): """ Returns http session for use with requests :param headers: additional headers to be passed through as a dictionary :type headers: dict """ session = requests.Session() if self.http_conn_id: conn = self.get_connection(self.http_conn_id) if "://" in conn.host: self.base_url = conn.host else: # schema defaults to HTTP schema = conn.schema if conn.schema else "http" self.base_url = schema + "://" + conn.host if conn.port: self.base_url = self.base_url + ":" + str(conn.port) if conn.login: session.auth = (conn.login, conn.password) if conn.extra: try: session.headers.update(conn.extra_dejson) except TypeError: self.log.warn('Connection to %s has invalid extra field.', conn.host) if headers: session.headers.update(headers) return session
def get_conn(self, headers=None): """ Returns http session for use with requests :param headers: additional headers to be passed through as a dictionary :type headers: dict """ session = requests.Session() if self.http_conn_id: conn = self.get_connection(self.http_conn_id) if "://" in conn.host: self.base_url = conn.host else: # schema defaults to HTTP schema = conn.schema if conn.schema else "http" self.base_url = schema + "://" + conn.host if conn.port: self.base_url = self.base_url + ":" + str(conn.port) if conn.login: session.auth = (conn.login, conn.password) if conn.extra: try: session.headers.update(conn.extra_dejson) except TypeError: self.log.warn('Connection to %s has invalid extra field.', conn.host) if headers: session.headers.update(headers) return session
[ "Returns", "http", "session", "for", "use", "with", "requests" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/http_hook.py#L53-L83
[ "def", "get_conn", "(", "self", ",", "headers", "=", "None", ")", ":", "session", "=", "requests", ".", "Session", "(", ")", "if", "self", ".", "http_conn_id", ":", "conn", "=", "self", ".", "get_connection", "(", "self", ".", "http_conn_id", ")", "if", "\"://\"", "in", "conn", ".", "host", ":", "self", ".", "base_url", "=", "conn", ".", "host", "else", ":", "# schema defaults to HTTP", "schema", "=", "conn", ".", "schema", "if", "conn", ".", "schema", "else", "\"http\"", "self", ".", "base_url", "=", "schema", "+", "\"://\"", "+", "conn", ".", "host", "if", "conn", ".", "port", ":", "self", ".", "base_url", "=", "self", ".", "base_url", "+", "\":\"", "+", "str", "(", "conn", ".", "port", ")", "if", "conn", ".", "login", ":", "session", ".", "auth", "=", "(", "conn", ".", "login", ",", "conn", ".", "password", ")", "if", "conn", ".", "extra", ":", "try", ":", "session", ".", "headers", ".", "update", "(", "conn", ".", "extra_dejson", ")", "except", "TypeError", ":", "self", ".", "log", ".", "warn", "(", "'Connection to %s has invalid extra field.'", ",", "conn", ".", "host", ")", "if", "headers", ":", "session", ".", "headers", ".", "update", "(", "headers", ")", "return", "session" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
HttpHook.run
Performs the request :param endpoint: the endpoint to be called i.e. resource/v1/query? :type endpoint: str :param data: payload to be uploaded or request parameters :type data: dict :param headers: additional headers to be passed through as a dictionary :type headers: dict :param extra_options: additional options to be used when executing the request i.e. {'check_response': False} to avoid checking raising exceptions on non 2XX or 3XX status codes :type extra_options: dict
airflow/hooks/http_hook.py
def run(self, endpoint, data=None, headers=None, extra_options=None): """ Performs the request :param endpoint: the endpoint to be called i.e. resource/v1/query? :type endpoint: str :param data: payload to be uploaded or request parameters :type data: dict :param headers: additional headers to be passed through as a dictionary :type headers: dict :param extra_options: additional options to be used when executing the request i.e. {'check_response': False} to avoid checking raising exceptions on non 2XX or 3XX status codes :type extra_options: dict """ extra_options = extra_options or {} session = self.get_conn(headers) if self.base_url and not self.base_url.endswith('/') and \ endpoint and not endpoint.startswith('/'): url = self.base_url + '/' + endpoint else: url = (self.base_url or '') + (endpoint or '') req = None if self.method == 'GET': # GET uses params req = requests.Request(self.method, url, params=data, headers=headers) elif self.method == 'HEAD': # HEAD doesn't use params req = requests.Request(self.method, url, headers=headers) else: # Others use data req = requests.Request(self.method, url, data=data, headers=headers) prepped_request = session.prepare_request(req) self.log.info("Sending '%s' to url: %s", self.method, url) return self.run_and_check(session, prepped_request, extra_options)
def run(self, endpoint, data=None, headers=None, extra_options=None): """ Performs the request :param endpoint: the endpoint to be called i.e. resource/v1/query? :type endpoint: str :param data: payload to be uploaded or request parameters :type data: dict :param headers: additional headers to be passed through as a dictionary :type headers: dict :param extra_options: additional options to be used when executing the request i.e. {'check_response': False} to avoid checking raising exceptions on non 2XX or 3XX status codes :type extra_options: dict """ extra_options = extra_options or {} session = self.get_conn(headers) if self.base_url and not self.base_url.endswith('/') and \ endpoint and not endpoint.startswith('/'): url = self.base_url + '/' + endpoint else: url = (self.base_url or '') + (endpoint or '') req = None if self.method == 'GET': # GET uses params req = requests.Request(self.method, url, params=data, headers=headers) elif self.method == 'HEAD': # HEAD doesn't use params req = requests.Request(self.method, url, headers=headers) else: # Others use data req = requests.Request(self.method, url, data=data, headers=headers) prepped_request = session.prepare_request(req) self.log.info("Sending '%s' to url: %s", self.method, url) return self.run_and_check(session, prepped_request, extra_options)
[ "Performs", "the", "request" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/http_hook.py#L85-L131
[ "def", "run", "(", "self", ",", "endpoint", ",", "data", "=", "None", ",", "headers", "=", "None", ",", "extra_options", "=", "None", ")", ":", "extra_options", "=", "extra_options", "or", "{", "}", "session", "=", "self", ".", "get_conn", "(", "headers", ")", "if", "self", ".", "base_url", "and", "not", "self", ".", "base_url", ".", "endswith", "(", "'/'", ")", "and", "endpoint", "and", "not", "endpoint", ".", "startswith", "(", "'/'", ")", ":", "url", "=", "self", ".", "base_url", "+", "'/'", "+", "endpoint", "else", ":", "url", "=", "(", "self", ".", "base_url", "or", "''", ")", "+", "(", "endpoint", "or", "''", ")", "req", "=", "None", "if", "self", ".", "method", "==", "'GET'", ":", "# GET uses params", "req", "=", "requests", ".", "Request", "(", "self", ".", "method", ",", "url", ",", "params", "=", "data", ",", "headers", "=", "headers", ")", "elif", "self", ".", "method", "==", "'HEAD'", ":", "# HEAD doesn't use params", "req", "=", "requests", ".", "Request", "(", "self", ".", "method", ",", "url", ",", "headers", "=", "headers", ")", "else", ":", "# Others use data", "req", "=", "requests", ".", "Request", "(", "self", ".", "method", ",", "url", ",", "data", "=", "data", ",", "headers", "=", "headers", ")", "prepped_request", "=", "session", ".", "prepare_request", "(", "req", ")", "self", ".", "log", ".", "info", "(", "\"Sending '%s' to url: %s\"", ",", "self", ".", "method", ",", "url", ")", "return", "self", ".", "run_and_check", "(", "session", ",", "prepped_request", ",", "extra_options", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
HttpHook.check_response
Checks the status code and raise an AirflowException exception on non 2XX or 3XX status codes :param response: A requests response object :type response: requests.response
airflow/hooks/http_hook.py
def check_response(self, response): """ Checks the status code and raise an AirflowException exception on non 2XX or 3XX status codes :param response: A requests response object :type response: requests.response """ try: response.raise_for_status() except requests.exceptions.HTTPError: self.log.error("HTTP error: %s", response.reason) if self.method not in ['GET', 'HEAD']: self.log.error(response.text) raise AirflowException(str(response.status_code) + ":" + response.reason)
def check_response(self, response): """ Checks the status code and raise an AirflowException exception on non 2XX or 3XX status codes :param response: A requests response object :type response: requests.response """ try: response.raise_for_status() except requests.exceptions.HTTPError: self.log.error("HTTP error: %s", response.reason) if self.method not in ['GET', 'HEAD']: self.log.error(response.text) raise AirflowException(str(response.status_code) + ":" + response.reason)
[ "Checks", "the", "status", "code", "and", "raise", "an", "AirflowException", "exception", "on", "non", "2XX", "or", "3XX", "status", "codes" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/http_hook.py#L133-L147
[ "def", "check_response", "(", "self", ",", "response", ")", ":", "try", ":", "response", ".", "raise_for_status", "(", ")", "except", "requests", ".", "exceptions", ".", "HTTPError", ":", "self", ".", "log", ".", "error", "(", "\"HTTP error: %s\"", ",", "response", ".", "reason", ")", "if", "self", ".", "method", "not", "in", "[", "'GET'", ",", "'HEAD'", "]", ":", "self", ".", "log", ".", "error", "(", "response", ".", "text", ")", "raise", "AirflowException", "(", "str", "(", "response", ".", "status_code", ")", "+", "\":\"", "+", "response", ".", "reason", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
HttpHook.run_and_check
Grabs extra options like timeout and actually runs the request, checking for the result :param session: the session to be used to execute the request :type session: requests.Session :param prepped_request: the prepared request generated in run() :type prepped_request: session.prepare_request :param extra_options: additional options to be used when executing the request i.e. {'check_response': False} to avoid checking raising exceptions on non 2XX or 3XX status codes :type extra_options: dict
airflow/hooks/http_hook.py
def run_and_check(self, session, prepped_request, extra_options): """ Grabs extra options like timeout and actually runs the request, checking for the result :param session: the session to be used to execute the request :type session: requests.Session :param prepped_request: the prepared request generated in run() :type prepped_request: session.prepare_request :param extra_options: additional options to be used when executing the request i.e. {'check_response': False} to avoid checking raising exceptions on non 2XX or 3XX status codes :type extra_options: dict """ extra_options = extra_options or {} try: response = session.send( prepped_request, stream=extra_options.get("stream", False), verify=extra_options.get("verify", True), proxies=extra_options.get("proxies", {}), cert=extra_options.get("cert"), timeout=extra_options.get("timeout"), allow_redirects=extra_options.get("allow_redirects", True)) if extra_options.get('check_response', True): self.check_response(response) return response except requests.exceptions.ConnectionError as ex: self.log.warn(str(ex) + ' Tenacity will retry to execute the operation') raise ex
def run_and_check(self, session, prepped_request, extra_options): """ Grabs extra options like timeout and actually runs the request, checking for the result :param session: the session to be used to execute the request :type session: requests.Session :param prepped_request: the prepared request generated in run() :type prepped_request: session.prepare_request :param extra_options: additional options to be used when executing the request i.e. {'check_response': False} to avoid checking raising exceptions on non 2XX or 3XX status codes :type extra_options: dict """ extra_options = extra_options or {} try: response = session.send( prepped_request, stream=extra_options.get("stream", False), verify=extra_options.get("verify", True), proxies=extra_options.get("proxies", {}), cert=extra_options.get("cert"), timeout=extra_options.get("timeout"), allow_redirects=extra_options.get("allow_redirects", True)) if extra_options.get('check_response', True): self.check_response(response) return response except requests.exceptions.ConnectionError as ex: self.log.warn(str(ex) + ' Tenacity will retry to execute the operation') raise ex
[ "Grabs", "extra", "options", "like", "timeout", "and", "actually", "runs", "the", "request", "checking", "for", "the", "result" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/http_hook.py#L149-L181
[ "def", "run_and_check", "(", "self", ",", "session", ",", "prepped_request", ",", "extra_options", ")", ":", "extra_options", "=", "extra_options", "or", "{", "}", "try", ":", "response", "=", "session", ".", "send", "(", "prepped_request", ",", "stream", "=", "extra_options", ".", "get", "(", "\"stream\"", ",", "False", ")", ",", "verify", "=", "extra_options", ".", "get", "(", "\"verify\"", ",", "True", ")", ",", "proxies", "=", "extra_options", ".", "get", "(", "\"proxies\"", ",", "{", "}", ")", ",", "cert", "=", "extra_options", ".", "get", "(", "\"cert\"", ")", ",", "timeout", "=", "extra_options", ".", "get", "(", "\"timeout\"", ")", ",", "allow_redirects", "=", "extra_options", ".", "get", "(", "\"allow_redirects\"", ",", "True", ")", ")", "if", "extra_options", ".", "get", "(", "'check_response'", ",", "True", ")", ":", "self", ".", "check_response", "(", "response", ")", "return", "response", "except", "requests", ".", "exceptions", ".", "ConnectionError", "as", "ex", ":", "self", ".", "log", ".", "warn", "(", "str", "(", "ex", ")", "+", "' Tenacity will retry to execute the operation'", ")", "raise", "ex" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
HttpHook.run_with_advanced_retry
Runs Hook.run() with a Tenacity decorator attached to it. This is useful for connectors which might be disturbed by intermittent issues and should not instantly fail. :param _retry_args: Arguments which define the retry behaviour. See Tenacity documentation at https://github.com/jd/tenacity :type _retry_args: dict :Example:: hook = HttpHook(http_conn_id='my_conn',method='GET') retry_args = dict( wait=tenacity.wait_exponential(), stop=tenacity.stop_after_attempt(10), retry=requests.exceptions.ConnectionError ) hook.run_with_advanced_retry( endpoint='v1/test', _retry_args=retry_args )
airflow/hooks/http_hook.py
def run_with_advanced_retry(self, _retry_args, *args, **kwargs): """ Runs Hook.run() with a Tenacity decorator attached to it. This is useful for connectors which might be disturbed by intermittent issues and should not instantly fail. :param _retry_args: Arguments which define the retry behaviour. See Tenacity documentation at https://github.com/jd/tenacity :type _retry_args: dict :Example:: hook = HttpHook(http_conn_id='my_conn',method='GET') retry_args = dict( wait=tenacity.wait_exponential(), stop=tenacity.stop_after_attempt(10), retry=requests.exceptions.ConnectionError ) hook.run_with_advanced_retry( endpoint='v1/test', _retry_args=retry_args ) """ self._retry_obj = tenacity.Retrying( **_retry_args ) self._retry_obj(self.run, *args, **kwargs)
def run_with_advanced_retry(self, _retry_args, *args, **kwargs): """ Runs Hook.run() with a Tenacity decorator attached to it. This is useful for connectors which might be disturbed by intermittent issues and should not instantly fail. :param _retry_args: Arguments which define the retry behaviour. See Tenacity documentation at https://github.com/jd/tenacity :type _retry_args: dict :Example:: hook = HttpHook(http_conn_id='my_conn',method='GET') retry_args = dict( wait=tenacity.wait_exponential(), stop=tenacity.stop_after_attempt(10), retry=requests.exceptions.ConnectionError ) hook.run_with_advanced_retry( endpoint='v1/test', _retry_args=retry_args ) """ self._retry_obj = tenacity.Retrying( **_retry_args ) self._retry_obj(self.run, *args, **kwargs)
[ "Runs", "Hook", ".", "run", "()", "with", "a", "Tenacity", "decorator", "attached", "to", "it", ".", "This", "is", "useful", "for", "connectors", "which", "might", "be", "disturbed", "by", "intermittent", "issues", "and", "should", "not", "instantly", "fail", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/http_hook.py#L183-L211
[ "def", "run_with_advanced_retry", "(", "self", ",", "_retry_args", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_retry_obj", "=", "tenacity", ".", "Retrying", "(", "*", "*", "_retry_args", ")", "self", ".", "_retry_obj", "(", "self", ".", "run", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
create_session
Contextmanager that will create and teardown a session.
airflow/utils/db.py
def create_session(): """ Contextmanager that will create and teardown a session. """ session = settings.Session() try: yield session session.commit() except Exception: session.rollback() raise finally: session.close()
def create_session(): """ Contextmanager that will create and teardown a session. """ session = settings.Session() try: yield session session.commit() except Exception: session.rollback() raise finally: session.close()
[ "Contextmanager", "that", "will", "create", "and", "teardown", "a", "session", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/db.py#L32-L44
[ "def", "create_session", "(", ")", ":", "session", "=", "settings", ".", "Session", "(", ")", "try", ":", "yield", "session", "session", ".", "commit", "(", ")", "except", "Exception", ":", "session", ".", "rollback", "(", ")", "raise", "finally", ":", "session", ".", "close", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
provide_session
Function decorator that provides a session if it isn't provided. If you want to reuse a session or run the function as part of a database transaction, you pass it to the function, if not this wrapper will create one and close it for you.
airflow/utils/db.py
def provide_session(func): """ Function decorator that provides a session if it isn't provided. If you want to reuse a session or run the function as part of a database transaction, you pass it to the function, if not this wrapper will create one and close it for you. """ @wraps(func) def wrapper(*args, **kwargs): arg_session = 'session' func_params = func.__code__.co_varnames session_in_args = arg_session in func_params and \ func_params.index(arg_session) < len(args) session_in_kwargs = arg_session in kwargs if session_in_kwargs or session_in_args: return func(*args, **kwargs) else: with create_session() as session: kwargs[arg_session] = session return func(*args, **kwargs) return wrapper
def provide_session(func): """ Function decorator that provides a session if it isn't provided. If you want to reuse a session or run the function as part of a database transaction, you pass it to the function, if not this wrapper will create one and close it for you. """ @wraps(func) def wrapper(*args, **kwargs): arg_session = 'session' func_params = func.__code__.co_varnames session_in_args = arg_session in func_params and \ func_params.index(arg_session) < len(args) session_in_kwargs = arg_session in kwargs if session_in_kwargs or session_in_args: return func(*args, **kwargs) else: with create_session() as session: kwargs[arg_session] = session return func(*args, **kwargs) return wrapper
[ "Function", "decorator", "that", "provides", "a", "session", "if", "it", "isn", "t", "provided", ".", "If", "you", "want", "to", "reuse", "a", "session", "or", "run", "the", "function", "as", "part", "of", "a", "database", "transaction", "you", "pass", "it", "to", "the", "function", "if", "not", "this", "wrapper", "will", "create", "one", "and", "close", "it", "for", "you", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/db.py#L47-L70
[ "def", "provide_session", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "arg_session", "=", "'session'", "func_params", "=", "func", ".", "__code__", ".", "co_varnames", "session_in_args", "=", "arg_session", "in", "func_params", "and", "func_params", ".", "index", "(", "arg_session", ")", "<", "len", "(", "args", ")", "session_in_kwargs", "=", "arg_session", "in", "kwargs", "if", "session_in_kwargs", "or", "session_in_args", ":", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "with", "create_session", "(", ")", "as", "session", ":", "kwargs", "[", "arg_session", "]", "=", "session", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
resetdb
Clear out the database
airflow/utils/db.py
def resetdb(): """ Clear out the database """ from airflow import models # alembic adds significant import time, so we import it lazily from alembic.migration import MigrationContext log.info("Dropping tables that exist") models.base.Base.metadata.drop_all(settings.engine) mc = MigrationContext.configure(settings.engine) if mc._version.exists(settings.engine): mc._version.drop(settings.engine) from flask_appbuilder.models.sqla import Base Base.metadata.drop_all(settings.engine) initdb()
def resetdb(): """ Clear out the database """ from airflow import models # alembic adds significant import time, so we import it lazily from alembic.migration import MigrationContext log.info("Dropping tables that exist") models.base.Base.metadata.drop_all(settings.engine) mc = MigrationContext.configure(settings.engine) if mc._version.exists(settings.engine): mc._version.drop(settings.engine) from flask_appbuilder.models.sqla import Base Base.metadata.drop_all(settings.engine) initdb()
[ "Clear", "out", "the", "database" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/db.py#L312-L331
[ "def", "resetdb", "(", ")", ":", "from", "airflow", "import", "models", "# alembic adds significant import time, so we import it lazily", "from", "alembic", ".", "migration", "import", "MigrationContext", "log", ".", "info", "(", "\"Dropping tables that exist\"", ")", "models", ".", "base", ".", "Base", ".", "metadata", ".", "drop_all", "(", "settings", ".", "engine", ")", "mc", "=", "MigrationContext", ".", "configure", "(", "settings", ".", "engine", ")", "if", "mc", ".", "_version", ".", "exists", "(", "settings", ".", "engine", ")", ":", "mc", ".", "_version", ".", "drop", "(", "settings", ".", "engine", ")", "from", "flask_appbuilder", ".", "models", ".", "sqla", "import", "Base", "Base", ".", "metadata", ".", "drop_all", "(", "settings", ".", "engine", ")", "initdb", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
FileToWasbOperator.execute
Upload a file to Azure Blob Storage.
airflow/contrib/operators/file_to_wasb.py
def execute(self, context): """Upload a file to Azure Blob Storage.""" hook = WasbHook(wasb_conn_id=self.wasb_conn_id) self.log.info( 'Uploading %s to wasb://%s ' 'as %s'.format(self.file_path, self.container_name, self.blob_name) ) hook.load_file(self.file_path, self.container_name, self.blob_name, **self.load_options)
def execute(self, context): """Upload a file to Azure Blob Storage.""" hook = WasbHook(wasb_conn_id=self.wasb_conn_id) self.log.info( 'Uploading %s to wasb://%s ' 'as %s'.format(self.file_path, self.container_name, self.blob_name) ) hook.load_file(self.file_path, self.container_name, self.blob_name, **self.load_options)
[ "Upload", "a", "file", "to", "Azure", "Blob", "Storage", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/file_to_wasb.py#L56-L64
[ "def", "execute", "(", "self", ",", "context", ")", ":", "hook", "=", "WasbHook", "(", "wasb_conn_id", "=", "self", ".", "wasb_conn_id", ")", "self", ".", "log", ".", "info", "(", "'Uploading %s to wasb://%s '", "'as %s'", ".", "format", "(", "self", ".", "file_path", ",", "self", ".", "container_name", ",", "self", ".", "blob_name", ")", ")", "hook", ".", "load_file", "(", "self", ".", "file_path", ",", "self", ".", "container_name", ",", "self", ".", "blob_name", ",", "*", "*", "self", ".", "load_options", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PrestoHook.get_conn
Returns a connection object
airflow/hooks/presto_hook.py
def get_conn(self): """Returns a connection object""" db = self.get_connection(self.presto_conn_id) reqkwargs = None if db.password is not None: reqkwargs = {'auth': HTTPBasicAuth(db.login, db.password)} return presto.connect( host=db.host, port=db.port, username=db.login, source=db.extra_dejson.get('source', 'airflow'), protocol=db.extra_dejson.get('protocol', 'http'), catalog=db.extra_dejson.get('catalog', 'hive'), requests_kwargs=reqkwargs, schema=db.schema)
def get_conn(self): """Returns a connection object""" db = self.get_connection(self.presto_conn_id) reqkwargs = None if db.password is not None: reqkwargs = {'auth': HTTPBasicAuth(db.login, db.password)} return presto.connect( host=db.host, port=db.port, username=db.login, source=db.extra_dejson.get('source', 'airflow'), protocol=db.extra_dejson.get('protocol', 'http'), catalog=db.extra_dejson.get('catalog', 'hive'), requests_kwargs=reqkwargs, schema=db.schema)
[ "Returns", "a", "connection", "object" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/presto_hook.py#L46-L60
[ "def", "get_conn", "(", "self", ")", ":", "db", "=", "self", ".", "get_connection", "(", "self", ".", "presto_conn_id", ")", "reqkwargs", "=", "None", "if", "db", ".", "password", "is", "not", "None", ":", "reqkwargs", "=", "{", "'auth'", ":", "HTTPBasicAuth", "(", "db", ".", "login", ",", "db", ".", "password", ")", "}", "return", "presto", ".", "connect", "(", "host", "=", "db", ".", "host", ",", "port", "=", "db", ".", "port", ",", "username", "=", "db", ".", "login", ",", "source", "=", "db", ".", "extra_dejson", ".", "get", "(", "'source'", ",", "'airflow'", ")", ",", "protocol", "=", "db", ".", "extra_dejson", ".", "get", "(", "'protocol'", ",", "'http'", ")", ",", "catalog", "=", "db", ".", "extra_dejson", ".", "get", "(", "'catalog'", ",", "'hive'", ")", ",", "requests_kwargs", "=", "reqkwargs", ",", "schema", "=", "db", ".", "schema", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PrestoHook._get_pretty_exception_message
Parses some DatabaseError to provide a better error message
airflow/hooks/presto_hook.py
def _get_pretty_exception_message(e): """ Parses some DatabaseError to provide a better error message """ if (hasattr(e, 'message') and 'errorName' in e.message and 'message' in e.message): return ('{name}: {message}'.format( name=e.message['errorName'], message=e.message['message'])) else: return str(e)
def _get_pretty_exception_message(e): """ Parses some DatabaseError to provide a better error message """ if (hasattr(e, 'message') and 'errorName' in e.message and 'message' in e.message): return ('{name}: {message}'.format( name=e.message['errorName'], message=e.message['message'])) else: return str(e)
[ "Parses", "some", "DatabaseError", "to", "provide", "a", "better", "error", "message" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/presto_hook.py#L67-L78
[ "def", "_get_pretty_exception_message", "(", "e", ")", ":", "if", "(", "hasattr", "(", "e", ",", "'message'", ")", "and", "'errorName'", "in", "e", ".", "message", "and", "'message'", "in", "e", ".", "message", ")", ":", "return", "(", "'{name}: {message}'", ".", "format", "(", "name", "=", "e", ".", "message", "[", "'errorName'", "]", ",", "message", "=", "e", ".", "message", "[", "'message'", "]", ")", ")", "else", ":", "return", "str", "(", "e", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PrestoHook.get_records
Get a set of records from Presto
airflow/hooks/presto_hook.py
def get_records(self, hql, parameters=None): """ Get a set of records from Presto """ try: return super().get_records( self._strip_sql(hql), parameters) except DatabaseError as e: raise PrestoException(self._get_pretty_exception_message(e))
def get_records(self, hql, parameters=None): """ Get a set of records from Presto """ try: return super().get_records( self._strip_sql(hql), parameters) except DatabaseError as e: raise PrestoException(self._get_pretty_exception_message(e))
[ "Get", "a", "set", "of", "records", "from", "Presto" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/presto_hook.py#L80-L88
[ "def", "get_records", "(", "self", ",", "hql", ",", "parameters", "=", "None", ")", ":", "try", ":", "return", "super", "(", ")", ".", "get_records", "(", "self", ".", "_strip_sql", "(", "hql", ")", ",", "parameters", ")", "except", "DatabaseError", "as", "e", ":", "raise", "PrestoException", "(", "self", ".", "_get_pretty_exception_message", "(", "e", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PrestoHook.get_pandas_df
Get a pandas dataframe from a sql query.
airflow/hooks/presto_hook.py
def get_pandas_df(self, hql, parameters=None): """ Get a pandas dataframe from a sql query. """ import pandas cursor = self.get_cursor() try: cursor.execute(self._strip_sql(hql), parameters) data = cursor.fetchall() except DatabaseError as e: raise PrestoException(self._get_pretty_exception_message(e)) column_descriptions = cursor.description if data: df = pandas.DataFrame(data) df.columns = [c[0] for c in column_descriptions] else: df = pandas.DataFrame() return df
def get_pandas_df(self, hql, parameters=None): """ Get a pandas dataframe from a sql query. """ import pandas cursor = self.get_cursor() try: cursor.execute(self._strip_sql(hql), parameters) data = cursor.fetchall() except DatabaseError as e: raise PrestoException(self._get_pretty_exception_message(e)) column_descriptions = cursor.description if data: df = pandas.DataFrame(data) df.columns = [c[0] for c in column_descriptions] else: df = pandas.DataFrame() return df
[ "Get", "a", "pandas", "dataframe", "from", "a", "sql", "query", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/presto_hook.py#L101-L118
[ "def", "get_pandas_df", "(", "self", ",", "hql", ",", "parameters", "=", "None", ")", ":", "import", "pandas", "cursor", "=", "self", ".", "get_cursor", "(", ")", "try", ":", "cursor", ".", "execute", "(", "self", ".", "_strip_sql", "(", "hql", ")", ",", "parameters", ")", "data", "=", "cursor", ".", "fetchall", "(", ")", "except", "DatabaseError", "as", "e", ":", "raise", "PrestoException", "(", "self", ".", "_get_pretty_exception_message", "(", "e", ")", ")", "column_descriptions", "=", "cursor", ".", "description", "if", "data", ":", "df", "=", "pandas", ".", "DataFrame", "(", "data", ")", "df", ".", "columns", "=", "[", "c", "[", "0", "]", "for", "c", "in", "column_descriptions", "]", "else", ":", "df", "=", "pandas", ".", "DataFrame", "(", ")", "return", "df" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PrestoHook.run
Execute the statement against Presto. Can be used to create views.
airflow/hooks/presto_hook.py
def run(self, hql, parameters=None): """ Execute the statement against Presto. Can be used to create views. """ return super().run(self._strip_sql(hql), parameters)
def run(self, hql, parameters=None): """ Execute the statement against Presto. Can be used to create views. """ return super().run(self._strip_sql(hql), parameters)
[ "Execute", "the", "statement", "against", "Presto", ".", "Can", "be", "used", "to", "create", "views", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/presto_hook.py#L120-L124
[ "def", "run", "(", "self", ",", "hql", ",", "parameters", "=", "None", ")", ":", "return", "super", "(", ")", ".", "run", "(", "self", ".", "_strip_sql", "(", "hql", ")", ",", "parameters", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PrestoHook.insert_rows
A generic way to insert a set of tuples into a table. :param table: Name of the target table :type table: str :param rows: The rows to insert into the table :type rows: iterable of tuples :param target_fields: The names of the columns to fill in the table :type target_fields: iterable of strings
airflow/hooks/presto_hook.py
def insert_rows(self, table, rows, target_fields=None): """ A generic way to insert a set of tuples into a table. :param table: Name of the target table :type table: str :param rows: The rows to insert into the table :type rows: iterable of tuples :param target_fields: The names of the columns to fill in the table :type target_fields: iterable of strings """ super().insert_rows(table, rows, target_fields, 0)
def insert_rows(self, table, rows, target_fields=None): """ A generic way to insert a set of tuples into a table. :param table: Name of the target table :type table: str :param rows: The rows to insert into the table :type rows: iterable of tuples :param target_fields: The names of the columns to fill in the table :type target_fields: iterable of strings """ super().insert_rows(table, rows, target_fields, 0)
[ "A", "generic", "way", "to", "insert", "a", "set", "of", "tuples", "into", "a", "table", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/presto_hook.py#L129-L140
[ "def", "insert_rows", "(", "self", ",", "table", ",", "rows", ",", "target_fields", "=", "None", ")", ":", "super", "(", ")", ".", "insert_rows", "(", "table", ",", "rows", ",", "target_fields", ",", "0", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AzureCosmosDBHook.get_conn
Return a cosmos db client.
airflow/contrib/hooks/azure_cosmos_hook.py
def get_conn(self): """ Return a cosmos db client. """ if self.cosmos_client is not None: return self.cosmos_client # Initialize the Python Azure Cosmos DB client self.cosmos_client = cosmos_client.CosmosClient(self.endpoint_uri, {'masterKey': self.master_key}) return self.cosmos_client
def get_conn(self): """ Return a cosmos db client. """ if self.cosmos_client is not None: return self.cosmos_client # Initialize the Python Azure Cosmos DB client self.cosmos_client = cosmos_client.CosmosClient(self.endpoint_uri, {'masterKey': self.master_key}) return self.cosmos_client
[ "Return", "a", "cosmos", "db", "client", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_cosmos_hook.py#L50-L60
[ "def", "get_conn", "(", "self", ")", ":", "if", "self", ".", "cosmos_client", "is", "not", "None", ":", "return", "self", ".", "cosmos_client", "# Initialize the Python Azure Cosmos DB client", "self", ".", "cosmos_client", "=", "cosmos_client", ".", "CosmosClient", "(", "self", ".", "endpoint_uri", ",", "{", "'masterKey'", ":", "self", ".", "master_key", "}", ")", "return", "self", ".", "cosmos_client" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AzureCosmosDBHook.does_collection_exist
Checks if a collection exists in CosmosDB.
airflow/contrib/hooks/azure_cosmos_hook.py
def does_collection_exist(self, collection_name, database_name=None): """ Checks if a collection exists in CosmosDB. """ if collection_name is None: raise AirflowBadRequest("Collection name cannot be None.") existing_container = list(self.get_conn().QueryContainers( get_database_link(self.__get_database_name(database_name)), { "query": "SELECT * FROM r WHERE r.id=@id", "parameters": [ {"name": "@id", "value": collection_name} ] })) if len(existing_container) == 0: return False return True
def does_collection_exist(self, collection_name, database_name=None): """ Checks if a collection exists in CosmosDB. """ if collection_name is None: raise AirflowBadRequest("Collection name cannot be None.") existing_container = list(self.get_conn().QueryContainers( get_database_link(self.__get_database_name(database_name)), { "query": "SELECT * FROM r WHERE r.id=@id", "parameters": [ {"name": "@id", "value": collection_name} ] })) if len(existing_container) == 0: return False return True
[ "Checks", "if", "a", "collection", "exists", "in", "CosmosDB", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_cosmos_hook.py#L82-L99
[ "def", "does_collection_exist", "(", "self", ",", "collection_name", ",", "database_name", "=", "None", ")", ":", "if", "collection_name", "is", "None", ":", "raise", "AirflowBadRequest", "(", "\"Collection name cannot be None.\"", ")", "existing_container", "=", "list", "(", "self", ".", "get_conn", "(", ")", ".", "QueryContainers", "(", "get_database_link", "(", "self", ".", "__get_database_name", "(", "database_name", ")", ")", ",", "{", "\"query\"", ":", "\"SELECT * FROM r WHERE r.id=@id\"", ",", "\"parameters\"", ":", "[", "{", "\"name\"", ":", "\"@id\"", ",", "\"value\"", ":", "collection_name", "}", "]", "}", ")", ")", "if", "len", "(", "existing_container", ")", "==", "0", ":", "return", "False", "return", "True" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AzureCosmosDBHook.create_collection
Creates a new collection in the CosmosDB database.
airflow/contrib/hooks/azure_cosmos_hook.py
def create_collection(self, collection_name, database_name=None): """ Creates a new collection in the CosmosDB database. """ if collection_name is None: raise AirflowBadRequest("Collection name cannot be None.") # We need to check to see if this container already exists so we don't try # to create it twice existing_container = list(self.get_conn().QueryContainers( get_database_link(self.__get_database_name(database_name)), { "query": "SELECT * FROM r WHERE r.id=@id", "parameters": [ {"name": "@id", "value": collection_name} ] })) # Only create if we did not find it already existing if len(existing_container) == 0: self.get_conn().CreateContainer( get_database_link(self.__get_database_name(database_name)), {"id": collection_name})
def create_collection(self, collection_name, database_name=None): """ Creates a new collection in the CosmosDB database. """ if collection_name is None: raise AirflowBadRequest("Collection name cannot be None.") # We need to check to see if this container already exists so we don't try # to create it twice existing_container = list(self.get_conn().QueryContainers( get_database_link(self.__get_database_name(database_name)), { "query": "SELECT * FROM r WHERE r.id=@id", "parameters": [ {"name": "@id", "value": collection_name} ] })) # Only create if we did not find it already existing if len(existing_container) == 0: self.get_conn().CreateContainer( get_database_link(self.__get_database_name(database_name)), {"id": collection_name})
[ "Creates", "a", "new", "collection", "in", "the", "CosmosDB", "database", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_cosmos_hook.py#L101-L122
[ "def", "create_collection", "(", "self", ",", "collection_name", ",", "database_name", "=", "None", ")", ":", "if", "collection_name", "is", "None", ":", "raise", "AirflowBadRequest", "(", "\"Collection name cannot be None.\"", ")", "# We need to check to see if this container already exists so we don't try", "# to create it twice", "existing_container", "=", "list", "(", "self", ".", "get_conn", "(", ")", ".", "QueryContainers", "(", "get_database_link", "(", "self", ".", "__get_database_name", "(", "database_name", ")", ")", ",", "{", "\"query\"", ":", "\"SELECT * FROM r WHERE r.id=@id\"", ",", "\"parameters\"", ":", "[", "{", "\"name\"", ":", "\"@id\"", ",", "\"value\"", ":", "collection_name", "}", "]", "}", ")", ")", "# Only create if we did not find it already existing", "if", "len", "(", "existing_container", ")", "==", "0", ":", "self", ".", "get_conn", "(", ")", ".", "CreateContainer", "(", "get_database_link", "(", "self", ".", "__get_database_name", "(", "database_name", ")", ")", ",", "{", "\"id\"", ":", "collection_name", "}", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AzureCosmosDBHook.does_database_exist
Checks if a database exists in CosmosDB.
airflow/contrib/hooks/azure_cosmos_hook.py
def does_database_exist(self, database_name): """ Checks if a database exists in CosmosDB. """ if database_name is None: raise AirflowBadRequest("Database name cannot be None.") existing_database = list(self.get_conn().QueryDatabases({ "query": "SELECT * FROM r WHERE r.id=@id", "parameters": [ {"name": "@id", "value": database_name} ] })) if len(existing_database) == 0: return False return True
def does_database_exist(self, database_name): """ Checks if a database exists in CosmosDB. """ if database_name is None: raise AirflowBadRequest("Database name cannot be None.") existing_database = list(self.get_conn().QueryDatabases({ "query": "SELECT * FROM r WHERE r.id=@id", "parameters": [ {"name": "@id", "value": database_name} ] })) if len(existing_database) == 0: return False return True
[ "Checks", "if", "a", "database", "exists", "in", "CosmosDB", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_cosmos_hook.py#L124-L140
[ "def", "does_database_exist", "(", "self", ",", "database_name", ")", ":", "if", "database_name", "is", "None", ":", "raise", "AirflowBadRequest", "(", "\"Database name cannot be None.\"", ")", "existing_database", "=", "list", "(", "self", ".", "get_conn", "(", ")", ".", "QueryDatabases", "(", "{", "\"query\"", ":", "\"SELECT * FROM r WHERE r.id=@id\"", ",", "\"parameters\"", ":", "[", "{", "\"name\"", ":", "\"@id\"", ",", "\"value\"", ":", "database_name", "}", "]", "}", ")", ")", "if", "len", "(", "existing_database", ")", "==", "0", ":", "return", "False", "return", "True" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AzureCosmosDBHook.create_database
Creates a new database in CosmosDB.
airflow/contrib/hooks/azure_cosmos_hook.py
def create_database(self, database_name): """ Creates a new database in CosmosDB. """ if database_name is None: raise AirflowBadRequest("Database name cannot be None.") # We need to check to see if this database already exists so we don't try # to create it twice existing_database = list(self.get_conn().QueryDatabases({ "query": "SELECT * FROM r WHERE r.id=@id", "parameters": [ {"name": "@id", "value": database_name} ] })) # Only create if we did not find it already existing if len(existing_database) == 0: self.get_conn().CreateDatabase({"id": database_name})
def create_database(self, database_name): """ Creates a new database in CosmosDB. """ if database_name is None: raise AirflowBadRequest("Database name cannot be None.") # We need to check to see if this database already exists so we don't try # to create it twice existing_database = list(self.get_conn().QueryDatabases({ "query": "SELECT * FROM r WHERE r.id=@id", "parameters": [ {"name": "@id", "value": database_name} ] })) # Only create if we did not find it already existing if len(existing_database) == 0: self.get_conn().CreateDatabase({"id": database_name})
[ "Creates", "a", "new", "database", "in", "CosmosDB", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_cosmos_hook.py#L142-L160
[ "def", "create_database", "(", "self", ",", "database_name", ")", ":", "if", "database_name", "is", "None", ":", "raise", "AirflowBadRequest", "(", "\"Database name cannot be None.\"", ")", "# We need to check to see if this database already exists so we don't try", "# to create it twice", "existing_database", "=", "list", "(", "self", ".", "get_conn", "(", ")", ".", "QueryDatabases", "(", "{", "\"query\"", ":", "\"SELECT * FROM r WHERE r.id=@id\"", ",", "\"parameters\"", ":", "[", "{", "\"name\"", ":", "\"@id\"", ",", "\"value\"", ":", "database_name", "}", "]", "}", ")", ")", "# Only create if we did not find it already existing", "if", "len", "(", "existing_database", ")", "==", "0", ":", "self", ".", "get_conn", "(", ")", ".", "CreateDatabase", "(", "{", "\"id\"", ":", "database_name", "}", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AzureCosmosDBHook.delete_database
Deletes an existing database in CosmosDB.
airflow/contrib/hooks/azure_cosmos_hook.py
def delete_database(self, database_name): """ Deletes an existing database in CosmosDB. """ if database_name is None: raise AirflowBadRequest("Database name cannot be None.") self.get_conn().DeleteDatabase(get_database_link(database_name))
def delete_database(self, database_name): """ Deletes an existing database in CosmosDB. """ if database_name is None: raise AirflowBadRequest("Database name cannot be None.") self.get_conn().DeleteDatabase(get_database_link(database_name))
[ "Deletes", "an", "existing", "database", "in", "CosmosDB", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_cosmos_hook.py#L162-L169
[ "def", "delete_database", "(", "self", ",", "database_name", ")", ":", "if", "database_name", "is", "None", ":", "raise", "AirflowBadRequest", "(", "\"Database name cannot be None.\"", ")", "self", ".", "get_conn", "(", ")", ".", "DeleteDatabase", "(", "get_database_link", "(", "database_name", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AzureCosmosDBHook.delete_collection
Deletes an existing collection in the CosmosDB database.
airflow/contrib/hooks/azure_cosmos_hook.py
def delete_collection(self, collection_name, database_name=None): """ Deletes an existing collection in the CosmosDB database. """ if collection_name is None: raise AirflowBadRequest("Collection name cannot be None.") self.get_conn().DeleteContainer( get_collection_link(self.__get_database_name(database_name), collection_name))
def delete_collection(self, collection_name, database_name=None): """ Deletes an existing collection in the CosmosDB database. """ if collection_name is None: raise AirflowBadRequest("Collection name cannot be None.") self.get_conn().DeleteContainer( get_collection_link(self.__get_database_name(database_name), collection_name))
[ "Deletes", "an", "existing", "collection", "in", "the", "CosmosDB", "database", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_cosmos_hook.py#L171-L179
[ "def", "delete_collection", "(", "self", ",", "collection_name", ",", "database_name", "=", "None", ")", ":", "if", "collection_name", "is", "None", ":", "raise", "AirflowBadRequest", "(", "\"Collection name cannot be None.\"", ")", "self", ".", "get_conn", "(", ")", ".", "DeleteContainer", "(", "get_collection_link", "(", "self", ".", "__get_database_name", "(", "database_name", ")", ",", "collection_name", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AzureCosmosDBHook.upsert_document
Inserts a new document (or updates an existing one) into an existing collection in the CosmosDB database.
airflow/contrib/hooks/azure_cosmos_hook.py
def upsert_document(self, document, database_name=None, collection_name=None, document_id=None): """ Inserts a new document (or updates an existing one) into an existing collection in the CosmosDB database. """ # Assign unique ID if one isn't provided if document_id is None: document_id = str(uuid.uuid4()) if document is None: raise AirflowBadRequest("You cannot insert a None document") # Add document id if isn't found if 'id' in document: if document['id'] is None: document['id'] = document_id else: document['id'] = document_id created_document = self.get_conn().CreateItem( get_collection_link( self.__get_database_name(database_name), self.__get_collection_name(collection_name)), document) return created_document
def upsert_document(self, document, database_name=None, collection_name=None, document_id=None): """ Inserts a new document (or updates an existing one) into an existing collection in the CosmosDB database. """ # Assign unique ID if one isn't provided if document_id is None: document_id = str(uuid.uuid4()) if document is None: raise AirflowBadRequest("You cannot insert a None document") # Add document id if isn't found if 'id' in document: if document['id'] is None: document['id'] = document_id else: document['id'] = document_id created_document = self.get_conn().CreateItem( get_collection_link( self.__get_database_name(database_name), self.__get_collection_name(collection_name)), document) return created_document
[ "Inserts", "a", "new", "document", "(", "or", "updates", "an", "existing", "one", ")", "into", "an", "existing", "collection", "in", "the", "CosmosDB", "database", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_cosmos_hook.py#L181-L206
[ "def", "upsert_document", "(", "self", ",", "document", ",", "database_name", "=", "None", ",", "collection_name", "=", "None", ",", "document_id", "=", "None", ")", ":", "# Assign unique ID if one isn't provided", "if", "document_id", "is", "None", ":", "document_id", "=", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", "if", "document", "is", "None", ":", "raise", "AirflowBadRequest", "(", "\"You cannot insert a None document\"", ")", "# Add document id if isn't found", "if", "'id'", "in", "document", ":", "if", "document", "[", "'id'", "]", "is", "None", ":", "document", "[", "'id'", "]", "=", "document_id", "else", ":", "document", "[", "'id'", "]", "=", "document_id", "created_document", "=", "self", ".", "get_conn", "(", ")", ".", "CreateItem", "(", "get_collection_link", "(", "self", ".", "__get_database_name", "(", "database_name", ")", ",", "self", ".", "__get_collection_name", "(", "collection_name", ")", ")", ",", "document", ")", "return", "created_document" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AzureCosmosDBHook.insert_documents
Insert a list of new documents into an existing collection in the CosmosDB database.
airflow/contrib/hooks/azure_cosmos_hook.py
def insert_documents(self, documents, database_name=None, collection_name=None): """ Insert a list of new documents into an existing collection in the CosmosDB database. """ if documents is None: raise AirflowBadRequest("You cannot insert empty documents") created_documents = [] for single_document in documents: created_documents.append( self.get_conn().CreateItem( get_collection_link( self.__get_database_name(database_name), self.__get_collection_name(collection_name)), single_document)) return created_documents
def insert_documents(self, documents, database_name=None, collection_name=None): """ Insert a list of new documents into an existing collection in the CosmosDB database. """ if documents is None: raise AirflowBadRequest("You cannot insert empty documents") created_documents = [] for single_document in documents: created_documents.append( self.get_conn().CreateItem( get_collection_link( self.__get_database_name(database_name), self.__get_collection_name(collection_name)), single_document)) return created_documents
[ "Insert", "a", "list", "of", "new", "documents", "into", "an", "existing", "collection", "in", "the", "CosmosDB", "database", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_cosmos_hook.py#L208-L224
[ "def", "insert_documents", "(", "self", ",", "documents", ",", "database_name", "=", "None", ",", "collection_name", "=", "None", ")", ":", "if", "documents", "is", "None", ":", "raise", "AirflowBadRequest", "(", "\"You cannot insert empty documents\"", ")", "created_documents", "=", "[", "]", "for", "single_document", "in", "documents", ":", "created_documents", ".", "append", "(", "self", ".", "get_conn", "(", ")", ".", "CreateItem", "(", "get_collection_link", "(", "self", ".", "__get_database_name", "(", "database_name", ")", ",", "self", ".", "__get_collection_name", "(", "collection_name", ")", ")", ",", "single_document", ")", ")", "return", "created_documents" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AzureCosmosDBHook.delete_document
Delete an existing document out of a collection in the CosmosDB database.
airflow/contrib/hooks/azure_cosmos_hook.py
def delete_document(self, document_id, database_name=None, collection_name=None): """ Delete an existing document out of a collection in the CosmosDB database. """ if document_id is None: raise AirflowBadRequest("Cannot delete a document without an id") self.get_conn().DeleteItem( get_document_link( self.__get_database_name(database_name), self.__get_collection_name(collection_name), document_id))
def delete_document(self, document_id, database_name=None, collection_name=None): """ Delete an existing document out of a collection in the CosmosDB database. """ if document_id is None: raise AirflowBadRequest("Cannot delete a document without an id") self.get_conn().DeleteItem( get_document_link( self.__get_database_name(database_name), self.__get_collection_name(collection_name), document_id))
[ "Delete", "an", "existing", "document", "out", "of", "a", "collection", "in", "the", "CosmosDB", "database", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_cosmos_hook.py#L226-L237
[ "def", "delete_document", "(", "self", ",", "document_id", ",", "database_name", "=", "None", ",", "collection_name", "=", "None", ")", ":", "if", "document_id", "is", "None", ":", "raise", "AirflowBadRequest", "(", "\"Cannot delete a document without an id\"", ")", "self", ".", "get_conn", "(", ")", ".", "DeleteItem", "(", "get_document_link", "(", "self", ".", "__get_database_name", "(", "database_name", ")", ",", "self", ".", "__get_collection_name", "(", "collection_name", ")", ",", "document_id", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AzureCosmosDBHook.get_document
Get a document from an existing collection in the CosmosDB database.
airflow/contrib/hooks/azure_cosmos_hook.py
def get_document(self, document_id, database_name=None, collection_name=None): """ Get a document from an existing collection in the CosmosDB database. """ if document_id is None: raise AirflowBadRequest("Cannot get a document without an id") try: return self.get_conn().ReadItem( get_document_link( self.__get_database_name(database_name), self.__get_collection_name(collection_name), document_id)) except HTTPFailure: return None
def get_document(self, document_id, database_name=None, collection_name=None): """ Get a document from an existing collection in the CosmosDB database. """ if document_id is None: raise AirflowBadRequest("Cannot get a document without an id") try: return self.get_conn().ReadItem( get_document_link( self.__get_database_name(database_name), self.__get_collection_name(collection_name), document_id)) except HTTPFailure: return None
[ "Get", "a", "document", "from", "an", "existing", "collection", "in", "the", "CosmosDB", "database", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_cosmos_hook.py#L239-L253
[ "def", "get_document", "(", "self", ",", "document_id", ",", "database_name", "=", "None", ",", "collection_name", "=", "None", ")", ":", "if", "document_id", "is", "None", ":", "raise", "AirflowBadRequest", "(", "\"Cannot get a document without an id\"", ")", "try", ":", "return", "self", ".", "get_conn", "(", ")", ".", "ReadItem", "(", "get_document_link", "(", "self", ".", "__get_database_name", "(", "database_name", ")", ",", "self", ".", "__get_collection_name", "(", "collection_name", ")", ",", "document_id", ")", ")", "except", "HTTPFailure", ":", "return", "None" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AzureCosmosDBHook.get_documents
Get a list of documents from an existing collection in the CosmosDB database via SQL query.
airflow/contrib/hooks/azure_cosmos_hook.py
def get_documents(self, sql_string, database_name=None, collection_name=None, partition_key=None): """ Get a list of documents from an existing collection in the CosmosDB database via SQL query. """ if sql_string is None: raise AirflowBadRequest("SQL query string cannot be None") # Query them in SQL query = {'query': sql_string} try: result_iterable = self.get_conn().QueryItems( get_collection_link( self.__get_database_name(database_name), self.__get_collection_name(collection_name)), query, partition_key) return list(result_iterable) except HTTPFailure: return None
def get_documents(self, sql_string, database_name=None, collection_name=None, partition_key=None): """ Get a list of documents from an existing collection in the CosmosDB database via SQL query. """ if sql_string is None: raise AirflowBadRequest("SQL query string cannot be None") # Query them in SQL query = {'query': sql_string} try: result_iterable = self.get_conn().QueryItems( get_collection_link( self.__get_database_name(database_name), self.__get_collection_name(collection_name)), query, partition_key) return list(result_iterable) except HTTPFailure: return None
[ "Get", "a", "list", "of", "documents", "from", "an", "existing", "collection", "in", "the", "CosmosDB", "database", "via", "SQL", "query", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_cosmos_hook.py#L255-L275
[ "def", "get_documents", "(", "self", ",", "sql_string", ",", "database_name", "=", "None", ",", "collection_name", "=", "None", ",", "partition_key", "=", "None", ")", ":", "if", "sql_string", "is", "None", ":", "raise", "AirflowBadRequest", "(", "\"SQL query string cannot be None\"", ")", "# Query them in SQL", "query", "=", "{", "'query'", ":", "sql_string", "}", "try", ":", "result_iterable", "=", "self", ".", "get_conn", "(", ")", ".", "QueryItems", "(", "get_collection_link", "(", "self", ".", "__get_database_name", "(", "database_name", ")", ",", "self", ".", "__get_collection_name", "(", "collection_name", ")", ")", ",", "query", ",", "partition_key", ")", "return", "list", "(", "result_iterable", ")", "except", "HTTPFailure", ":", "return", "None" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
get_code
Return python code of a given dag_id.
airflow/api/common/experimental/get_code.py
def get_code(dag_id): """Return python code of a given dag_id.""" session = settings.Session() DM = models.DagModel dag = session.query(DM).filter(DM.dag_id == dag_id).first() session.close() # Check DAG exists. if dag is None: error_message = "Dag id {} not found".format(dag_id) raise DagNotFound(error_message) try: with wwwutils.open_maybe_zipped(dag.fileloc, 'r') as f: code = f.read() return code except IOError as e: error_message = "Error {} while reading Dag id {} Code".format(str(e), dag_id) raise AirflowException(error_message)
def get_code(dag_id): """Return python code of a given dag_id.""" session = settings.Session() DM = models.DagModel dag = session.query(DM).filter(DM.dag_id == dag_id).first() session.close() # Check DAG exists. if dag is None: error_message = "Dag id {} not found".format(dag_id) raise DagNotFound(error_message) try: with wwwutils.open_maybe_zipped(dag.fileloc, 'r') as f: code = f.read() return code except IOError as e: error_message = "Error {} while reading Dag id {} Code".format(str(e), dag_id) raise AirflowException(error_message)
[ "Return", "python", "code", "of", "a", "given", "dag_id", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/api/common/experimental/get_code.py#L25-L42
[ "def", "get_code", "(", "dag_id", ")", ":", "session", "=", "settings", ".", "Session", "(", ")", "DM", "=", "models", ".", "DagModel", "dag", "=", "session", ".", "query", "(", "DM", ")", ".", "filter", "(", "DM", ".", "dag_id", "==", "dag_id", ")", ".", "first", "(", ")", "session", ".", "close", "(", ")", "# Check DAG exists.", "if", "dag", "is", "None", ":", "error_message", "=", "\"Dag id {} not found\"", ".", "format", "(", "dag_id", ")", "raise", "DagNotFound", "(", "error_message", ")", "try", ":", "with", "wwwutils", ".", "open_maybe_zipped", "(", "dag", ".", "fileloc", ",", "'r'", ")", "as", "f", ":", "code", "=", "f", ".", "read", "(", ")", "return", "code", "except", "IOError", "as", "e", ":", "error_message", "=", "\"Error {} while reading Dag id {} Code\"", ".", "format", "(", "str", "(", "e", ")", ",", "dag_id", ")", "raise", "AirflowException", "(", "error_message", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GcfHook.get_function
Returns the Cloud Function with the given name. :param name: Name of the function. :type name: str :return: A Cloud Functions object representing the function. :rtype: dict
airflow/contrib/hooks/gcp_function_hook.py
def get_function(self, name): """ Returns the Cloud Function with the given name. :param name: Name of the function. :type name: str :return: A Cloud Functions object representing the function. :rtype: dict """ return self.get_conn().projects().locations().functions().get( name=name).execute(num_retries=self.num_retries)
def get_function(self, name): """ Returns the Cloud Function with the given name. :param name: Name of the function. :type name: str :return: A Cloud Functions object representing the function. :rtype: dict """ return self.get_conn().projects().locations().functions().get( name=name).execute(num_retries=self.num_retries)
[ "Returns", "the", "Cloud", "Function", "with", "the", "given", "name", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_function_hook.py#L76-L86
[ "def", "get_function", "(", "self", ",", "name", ")", ":", "return", "self", ".", "get_conn", "(", ")", ".", "projects", "(", ")", ".", "locations", "(", ")", ".", "functions", "(", ")", ".", "get", "(", "name", "=", "name", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GcfHook.create_new_function
Creates a new function in Cloud Function in the location specified in the body. :param location: The location of the function. :type location: str :param body: The body required by the Cloud Functions insert API. :type body: dict :param project_id: Optional, Google Cloud Project project_id where the function belongs. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :return: None
airflow/contrib/hooks/gcp_function_hook.py
def create_new_function(self, location, body, project_id=None): """ Creates a new function in Cloud Function in the location specified in the body. :param location: The location of the function. :type location: str :param body: The body required by the Cloud Functions insert API. :type body: dict :param project_id: Optional, Google Cloud Project project_id where the function belongs. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :return: None """ response = self.get_conn().projects().locations().functions().create( location=self._full_location(project_id, location), body=body ).execute(num_retries=self.num_retries) operation_name = response["name"] self._wait_for_operation_to_complete(operation_name=operation_name)
def create_new_function(self, location, body, project_id=None): """ Creates a new function in Cloud Function in the location specified in the body. :param location: The location of the function. :type location: str :param body: The body required by the Cloud Functions insert API. :type body: dict :param project_id: Optional, Google Cloud Project project_id where the function belongs. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :return: None """ response = self.get_conn().projects().locations().functions().create( location=self._full_location(project_id, location), body=body ).execute(num_retries=self.num_retries) operation_name = response["name"] self._wait_for_operation_to_complete(operation_name=operation_name)
[ "Creates", "a", "new", "function", "in", "Cloud", "Function", "in", "the", "location", "specified", "in", "the", "body", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_function_hook.py#L89-L107
[ "def", "create_new_function", "(", "self", ",", "location", ",", "body", ",", "project_id", "=", "None", ")", ":", "response", "=", "self", ".", "get_conn", "(", ")", ".", "projects", "(", ")", ".", "locations", "(", ")", ".", "functions", "(", ")", ".", "create", "(", "location", "=", "self", ".", "_full_location", "(", "project_id", ",", "location", ")", ",", "body", "=", "body", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "operation_name", "=", "response", "[", "\"name\"", "]", "self", ".", "_wait_for_operation_to_complete", "(", "operation_name", "=", "operation_name", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GcfHook.update_function
Updates Cloud Functions according to the specified update mask. :param name: The name of the function. :type name: str :param body: The body required by the cloud function patch API. :type body: dict :param update_mask: The update mask - array of fields that should be patched. :type update_mask: [str] :return: None
airflow/contrib/hooks/gcp_function_hook.py
def update_function(self, name, body, update_mask): """ Updates Cloud Functions according to the specified update mask. :param name: The name of the function. :type name: str :param body: The body required by the cloud function patch API. :type body: dict :param update_mask: The update mask - array of fields that should be patched. :type update_mask: [str] :return: None """ response = self.get_conn().projects().locations().functions().patch( updateMask=",".join(update_mask), name=name, body=body ).execute(num_retries=self.num_retries) operation_name = response["name"] self._wait_for_operation_to_complete(operation_name=operation_name)
def update_function(self, name, body, update_mask): """ Updates Cloud Functions according to the specified update mask. :param name: The name of the function. :type name: str :param body: The body required by the cloud function patch API. :type body: dict :param update_mask: The update mask - array of fields that should be patched. :type update_mask: [str] :return: None """ response = self.get_conn().projects().locations().functions().patch( updateMask=",".join(update_mask), name=name, body=body ).execute(num_retries=self.num_retries) operation_name = response["name"] self._wait_for_operation_to_complete(operation_name=operation_name)
[ "Updates", "Cloud", "Functions", "according", "to", "the", "specified", "update", "mask", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_function_hook.py#L109-L127
[ "def", "update_function", "(", "self", ",", "name", ",", "body", ",", "update_mask", ")", ":", "response", "=", "self", ".", "get_conn", "(", ")", ".", "projects", "(", ")", ".", "locations", "(", ")", ".", "functions", "(", ")", ".", "patch", "(", "updateMask", "=", "\",\"", ".", "join", "(", "update_mask", ")", ",", "name", "=", "name", ",", "body", "=", "body", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "operation_name", "=", "response", "[", "\"name\"", "]", "self", ".", "_wait_for_operation_to_complete", "(", "operation_name", "=", "operation_name", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GcfHook.upload_function_zip
Uploads zip file with sources. :param location: The location where the function is created. :type location: str :param zip_path: The path of the valid .zip file to upload. :type zip_path: str :param project_id: Optional, Google Cloud Project project_id where the function belongs. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :return: The upload URL that was returned by generateUploadUrl method.
airflow/contrib/hooks/gcp_function_hook.py
def upload_function_zip(self, location, zip_path, project_id=None): """ Uploads zip file with sources. :param location: The location where the function is created. :type location: str :param zip_path: The path of the valid .zip file to upload. :type zip_path: str :param project_id: Optional, Google Cloud Project project_id where the function belongs. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :return: The upload URL that was returned by generateUploadUrl method. """ response = self.get_conn().projects().locations().functions().generateUploadUrl( parent=self._full_location(project_id, location) ).execute(num_retries=self.num_retries) upload_url = response.get('uploadUrl') with open(zip_path, 'rb') as fp: requests.put( url=upload_url, data=fp, # Those two headers needs to be specified according to: # https://cloud.google.com/functions/docs/reference/rest/v1/projects.locations.functions/generateUploadUrl # nopep8 headers={ 'Content-type': 'application/zip', 'x-goog-content-length-range': '0,104857600', } ) return upload_url
def upload_function_zip(self, location, zip_path, project_id=None): """ Uploads zip file with sources. :param location: The location where the function is created. :type location: str :param zip_path: The path of the valid .zip file to upload. :type zip_path: str :param project_id: Optional, Google Cloud Project project_id where the function belongs. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :return: The upload URL that was returned by generateUploadUrl method. """ response = self.get_conn().projects().locations().functions().generateUploadUrl( parent=self._full_location(project_id, location) ).execute(num_retries=self.num_retries) upload_url = response.get('uploadUrl') with open(zip_path, 'rb') as fp: requests.put( url=upload_url, data=fp, # Those two headers needs to be specified according to: # https://cloud.google.com/functions/docs/reference/rest/v1/projects.locations.functions/generateUploadUrl # nopep8 headers={ 'Content-type': 'application/zip', 'x-goog-content-length-range': '0,104857600', } ) return upload_url
[ "Uploads", "zip", "file", "with", "sources", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_function_hook.py#L130-L159
[ "def", "upload_function_zip", "(", "self", ",", "location", ",", "zip_path", ",", "project_id", "=", "None", ")", ":", "response", "=", "self", ".", "get_conn", "(", ")", ".", "projects", "(", ")", ".", "locations", "(", ")", ".", "functions", "(", ")", ".", "generateUploadUrl", "(", "parent", "=", "self", ".", "_full_location", "(", "project_id", ",", "location", ")", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "upload_url", "=", "response", ".", "get", "(", "'uploadUrl'", ")", "with", "open", "(", "zip_path", ",", "'rb'", ")", "as", "fp", ":", "requests", ".", "put", "(", "url", "=", "upload_url", ",", "data", "=", "fp", ",", "# Those two headers needs to be specified according to:", "# https://cloud.google.com/functions/docs/reference/rest/v1/projects.locations.functions/generateUploadUrl", "# nopep8", "headers", "=", "{", "'Content-type'", ":", "'application/zip'", ",", "'x-goog-content-length-range'", ":", "'0,104857600'", ",", "}", ")", "return", "upload_url" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GcfHook.delete_function
Deletes the specified Cloud Function. :param name: The name of the function. :type name: str :return: None
airflow/contrib/hooks/gcp_function_hook.py
def delete_function(self, name): """ Deletes the specified Cloud Function. :param name: The name of the function. :type name: str :return: None """ response = self.get_conn().projects().locations().functions().delete( name=name).execute(num_retries=self.num_retries) operation_name = response["name"] self._wait_for_operation_to_complete(operation_name=operation_name)
def delete_function(self, name): """ Deletes the specified Cloud Function. :param name: The name of the function. :type name: str :return: None """ response = self.get_conn().projects().locations().functions().delete( name=name).execute(num_retries=self.num_retries) operation_name = response["name"] self._wait_for_operation_to_complete(operation_name=operation_name)
[ "Deletes", "the", "specified", "Cloud", "Function", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_function_hook.py#L161-L172
[ "def", "delete_function", "(", "self", ",", "name", ")", ":", "response", "=", "self", ".", "get_conn", "(", ")", ".", "projects", "(", ")", ".", "locations", "(", ")", ".", "functions", "(", ")", ".", "delete", "(", "name", "=", "name", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "operation_name", "=", "response", "[", "\"name\"", "]", "self", ".", "_wait_for_operation_to_complete", "(", "operation_name", "=", "operation_name", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GcfHook._wait_for_operation_to_complete
Waits for the named operation to complete - checks status of the asynchronous call. :param operation_name: The name of the operation. :type operation_name: str :return: The response returned by the operation. :rtype: dict :exception: AirflowException in case error is returned.
airflow/contrib/hooks/gcp_function_hook.py
def _wait_for_operation_to_complete(self, operation_name): """ Waits for the named operation to complete - checks status of the asynchronous call. :param operation_name: The name of the operation. :type operation_name: str :return: The response returned by the operation. :rtype: dict :exception: AirflowException in case error is returned. """ service = self.get_conn() while True: operation_response = service.operations().get( name=operation_name, ).execute(num_retries=self.num_retries) if operation_response.get("done"): response = operation_response.get("response") error = operation_response.get("error") # Note, according to documentation always either response or error is # set when "done" == True if error: raise AirflowException(str(error)) return response time.sleep(TIME_TO_SLEEP_IN_SECONDS)
def _wait_for_operation_to_complete(self, operation_name): """ Waits for the named operation to complete - checks status of the asynchronous call. :param operation_name: The name of the operation. :type operation_name: str :return: The response returned by the operation. :rtype: dict :exception: AirflowException in case error is returned. """ service = self.get_conn() while True: operation_response = service.operations().get( name=operation_name, ).execute(num_retries=self.num_retries) if operation_response.get("done"): response = operation_response.get("response") error = operation_response.get("error") # Note, according to documentation always either response or error is # set when "done" == True if error: raise AirflowException(str(error)) return response time.sleep(TIME_TO_SLEEP_IN_SECONDS)
[ "Waits", "for", "the", "named", "operation", "to", "complete", "-", "checks", "status", "of", "the", "asynchronous", "call", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_function_hook.py#L174-L198
[ "def", "_wait_for_operation_to_complete", "(", "self", ",", "operation_name", ")", ":", "service", "=", "self", ".", "get_conn", "(", ")", "while", "True", ":", "operation_response", "=", "service", ".", "operations", "(", ")", ".", "get", "(", "name", "=", "operation_name", ",", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "if", "operation_response", ".", "get", "(", "\"done\"", ")", ":", "response", "=", "operation_response", ".", "get", "(", "\"response\"", ")", "error", "=", "operation_response", ".", "get", "(", "\"error\"", ")", "# Note, according to documentation always either response or error is", "# set when \"done\" == True", "if", "error", ":", "raise", "AirflowException", "(", "str", "(", "error", ")", ")", "return", "response", "time", ".", "sleep", "(", "TIME_TO_SLEEP_IN_SECONDS", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PubSubHook.publish
Publishes messages to a Pub/Sub topic. :param project: the GCP project ID in which to publish :type project: str :param topic: the Pub/Sub topic to which to publish; do not include the ``projects/{project}/topics/`` prefix. :type topic: str :param messages: messages to publish; if the data field in a message is set, it should already be base64 encoded. :type messages: list of PubSub messages; see http://cloud.google.com/pubsub/docs/reference/rest/v1/PubsubMessage
airflow/contrib/hooks/gcp_pubsub_hook.py
def publish(self, project, topic, messages): """Publishes messages to a Pub/Sub topic. :param project: the GCP project ID in which to publish :type project: str :param topic: the Pub/Sub topic to which to publish; do not include the ``projects/{project}/topics/`` prefix. :type topic: str :param messages: messages to publish; if the data field in a message is set, it should already be base64 encoded. :type messages: list of PubSub messages; see http://cloud.google.com/pubsub/docs/reference/rest/v1/PubsubMessage """ body = {'messages': messages} full_topic = _format_topic(project, topic) request = self.get_conn().projects().topics().publish( topic=full_topic, body=body) try: request.execute(num_retries=self.num_retries) except HttpError as e: raise PubSubException( 'Error publishing to topic {}'.format(full_topic), e)
def publish(self, project, topic, messages): """Publishes messages to a Pub/Sub topic. :param project: the GCP project ID in which to publish :type project: str :param topic: the Pub/Sub topic to which to publish; do not include the ``projects/{project}/topics/`` prefix. :type topic: str :param messages: messages to publish; if the data field in a message is set, it should already be base64 encoded. :type messages: list of PubSub messages; see http://cloud.google.com/pubsub/docs/reference/rest/v1/PubsubMessage """ body = {'messages': messages} full_topic = _format_topic(project, topic) request = self.get_conn().projects().topics().publish( topic=full_topic, body=body) try: request.execute(num_retries=self.num_retries) except HttpError as e: raise PubSubException( 'Error publishing to topic {}'.format(full_topic), e)
[ "Publishes", "messages", "to", "a", "Pub", "/", "Sub", "topic", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_pubsub_hook.py#L60-L81
[ "def", "publish", "(", "self", ",", "project", ",", "topic", ",", "messages", ")", ":", "body", "=", "{", "'messages'", ":", "messages", "}", "full_topic", "=", "_format_topic", "(", "project", ",", "topic", ")", "request", "=", "self", ".", "get_conn", "(", ")", ".", "projects", "(", ")", ".", "topics", "(", ")", ".", "publish", "(", "topic", "=", "full_topic", ",", "body", "=", "body", ")", "try", ":", "request", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "except", "HttpError", "as", "e", ":", "raise", "PubSubException", "(", "'Error publishing to topic {}'", ".", "format", "(", "full_topic", ")", ",", "e", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PubSubHook.create_topic
Creates a Pub/Sub topic, if it does not already exist. :param project: the GCP project ID in which to create the topic :type project: str :param topic: the Pub/Sub topic name to create; do not include the ``projects/{project}/topics/`` prefix. :type topic: str :param fail_if_exists: if set, raise an exception if the topic already exists :type fail_if_exists: bool
airflow/contrib/hooks/gcp_pubsub_hook.py
def create_topic(self, project, topic, fail_if_exists=False): """Creates a Pub/Sub topic, if it does not already exist. :param project: the GCP project ID in which to create the topic :type project: str :param topic: the Pub/Sub topic name to create; do not include the ``projects/{project}/topics/`` prefix. :type topic: str :param fail_if_exists: if set, raise an exception if the topic already exists :type fail_if_exists: bool """ service = self.get_conn() full_topic = _format_topic(project, topic) try: service.projects().topics().create( name=full_topic, body={}).execute(num_retries=self.num_retries) except HttpError as e: # Status code 409 indicates that the topic already exists. if str(e.resp['status']) == '409': message = 'Topic already exists: {}'.format(full_topic) self.log.warning(message) if fail_if_exists: raise PubSubException(message) else: raise PubSubException( 'Error creating topic {}'.format(full_topic), e)
def create_topic(self, project, topic, fail_if_exists=False): """Creates a Pub/Sub topic, if it does not already exist. :param project: the GCP project ID in which to create the topic :type project: str :param topic: the Pub/Sub topic name to create; do not include the ``projects/{project}/topics/`` prefix. :type topic: str :param fail_if_exists: if set, raise an exception if the topic already exists :type fail_if_exists: bool """ service = self.get_conn() full_topic = _format_topic(project, topic) try: service.projects().topics().create( name=full_topic, body={}).execute(num_retries=self.num_retries) except HttpError as e: # Status code 409 indicates that the topic already exists. if str(e.resp['status']) == '409': message = 'Topic already exists: {}'.format(full_topic) self.log.warning(message) if fail_if_exists: raise PubSubException(message) else: raise PubSubException( 'Error creating topic {}'.format(full_topic), e)
[ "Creates", "a", "Pub", "/", "Sub", "topic", "if", "it", "does", "not", "already", "exist", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_pubsub_hook.py#L83-L110
[ "def", "create_topic", "(", "self", ",", "project", ",", "topic", ",", "fail_if_exists", "=", "False", ")", ":", "service", "=", "self", ".", "get_conn", "(", ")", "full_topic", "=", "_format_topic", "(", "project", ",", "topic", ")", "try", ":", "service", ".", "projects", "(", ")", ".", "topics", "(", ")", ".", "create", "(", "name", "=", "full_topic", ",", "body", "=", "{", "}", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "except", "HttpError", "as", "e", ":", "# Status code 409 indicates that the topic already exists.", "if", "str", "(", "e", ".", "resp", "[", "'status'", "]", ")", "==", "'409'", ":", "message", "=", "'Topic already exists: {}'", ".", "format", "(", "full_topic", ")", "self", ".", "log", ".", "warning", "(", "message", ")", "if", "fail_if_exists", ":", "raise", "PubSubException", "(", "message", ")", "else", ":", "raise", "PubSubException", "(", "'Error creating topic {}'", ".", "format", "(", "full_topic", ")", ",", "e", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PubSubHook.delete_topic
Deletes a Pub/Sub topic if it exists. :param project: the GCP project ID in which to delete the topic :type project: str :param topic: the Pub/Sub topic name to delete; do not include the ``projects/{project}/topics/`` prefix. :type topic: str :param fail_if_not_exists: if set, raise an exception if the topic does not exist :type fail_if_not_exists: bool
airflow/contrib/hooks/gcp_pubsub_hook.py
def delete_topic(self, project, topic, fail_if_not_exists=False): """Deletes a Pub/Sub topic if it exists. :param project: the GCP project ID in which to delete the topic :type project: str :param topic: the Pub/Sub topic name to delete; do not include the ``projects/{project}/topics/`` prefix. :type topic: str :param fail_if_not_exists: if set, raise an exception if the topic does not exist :type fail_if_not_exists: bool """ service = self.get_conn() full_topic = _format_topic(project, topic) try: service.projects().topics().delete(topic=full_topic).execute(num_retries=self.num_retries) except HttpError as e: # Status code 409 indicates that the topic was not found if str(e.resp['status']) == '404': message = 'Topic does not exist: {}'.format(full_topic) self.log.warning(message) if fail_if_not_exists: raise PubSubException(message) else: raise PubSubException( 'Error deleting topic {}'.format(full_topic), e)
def delete_topic(self, project, topic, fail_if_not_exists=False): """Deletes a Pub/Sub topic if it exists. :param project: the GCP project ID in which to delete the topic :type project: str :param topic: the Pub/Sub topic name to delete; do not include the ``projects/{project}/topics/`` prefix. :type topic: str :param fail_if_not_exists: if set, raise an exception if the topic does not exist :type fail_if_not_exists: bool """ service = self.get_conn() full_topic = _format_topic(project, topic) try: service.projects().topics().delete(topic=full_topic).execute(num_retries=self.num_retries) except HttpError as e: # Status code 409 indicates that the topic was not found if str(e.resp['status']) == '404': message = 'Topic does not exist: {}'.format(full_topic) self.log.warning(message) if fail_if_not_exists: raise PubSubException(message) else: raise PubSubException( 'Error deleting topic {}'.format(full_topic), e)
[ "Deletes", "a", "Pub", "/", "Sub", "topic", "if", "it", "exists", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_pubsub_hook.py#L112-L137
[ "def", "delete_topic", "(", "self", ",", "project", ",", "topic", ",", "fail_if_not_exists", "=", "False", ")", ":", "service", "=", "self", ".", "get_conn", "(", ")", "full_topic", "=", "_format_topic", "(", "project", ",", "topic", ")", "try", ":", "service", ".", "projects", "(", ")", ".", "topics", "(", ")", ".", "delete", "(", "topic", "=", "full_topic", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "except", "HttpError", "as", "e", ":", "# Status code 409 indicates that the topic was not found", "if", "str", "(", "e", ".", "resp", "[", "'status'", "]", ")", "==", "'404'", ":", "message", "=", "'Topic does not exist: {}'", ".", "format", "(", "full_topic", ")", "self", ".", "log", ".", "warning", "(", "message", ")", "if", "fail_if_not_exists", ":", "raise", "PubSubException", "(", "message", ")", "else", ":", "raise", "PubSubException", "(", "'Error deleting topic {}'", ".", "format", "(", "full_topic", ")", ",", "e", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PubSubHook.create_subscription
Creates a Pub/Sub subscription, if it does not already exist. :param topic_project: the GCP project ID of the topic that the subscription will be bound to. :type topic_project: str :param topic: the Pub/Sub topic name that the subscription will be bound to create; do not include the ``projects/{project}/subscriptions/`` prefix. :type topic: str :param subscription: the Pub/Sub subscription name. If empty, a random name will be generated using the uuid module :type subscription: str :param subscription_project: the GCP project ID where the subscription will be created. If unspecified, ``topic_project`` will be used. :type subscription_project: str :param ack_deadline_secs: Number of seconds that a subscriber has to acknowledge each message pulled from the subscription :type ack_deadline_secs: int :param fail_if_exists: if set, raise an exception if the topic already exists :type fail_if_exists: bool :return: subscription name which will be the system-generated value if the ``subscription`` parameter is not supplied :rtype: str
airflow/contrib/hooks/gcp_pubsub_hook.py
def create_subscription(self, topic_project, topic, subscription=None, subscription_project=None, ack_deadline_secs=10, fail_if_exists=False): """Creates a Pub/Sub subscription, if it does not already exist. :param topic_project: the GCP project ID of the topic that the subscription will be bound to. :type topic_project: str :param topic: the Pub/Sub topic name that the subscription will be bound to create; do not include the ``projects/{project}/subscriptions/`` prefix. :type topic: str :param subscription: the Pub/Sub subscription name. If empty, a random name will be generated using the uuid module :type subscription: str :param subscription_project: the GCP project ID where the subscription will be created. If unspecified, ``topic_project`` will be used. :type subscription_project: str :param ack_deadline_secs: Number of seconds that a subscriber has to acknowledge each message pulled from the subscription :type ack_deadline_secs: int :param fail_if_exists: if set, raise an exception if the topic already exists :type fail_if_exists: bool :return: subscription name which will be the system-generated value if the ``subscription`` parameter is not supplied :rtype: str """ service = self.get_conn() full_topic = _format_topic(topic_project, topic) if not subscription: subscription = 'sub-{}'.format(uuid4()) if not subscription_project: subscription_project = topic_project full_subscription = _format_subscription(subscription_project, subscription) body = { 'topic': full_topic, 'ackDeadlineSeconds': ack_deadline_secs } try: service.projects().subscriptions().create( name=full_subscription, body=body).execute(num_retries=self.num_retries) except HttpError as e: # Status code 409 indicates that the subscription already exists. if str(e.resp['status']) == '409': message = 'Subscription already exists: {}'.format( full_subscription) self.log.warning(message) if fail_if_exists: raise PubSubException(message) else: raise PubSubException( 'Error creating subscription {}'.format(full_subscription), e) return subscription
def create_subscription(self, topic_project, topic, subscription=None, subscription_project=None, ack_deadline_secs=10, fail_if_exists=False): """Creates a Pub/Sub subscription, if it does not already exist. :param topic_project: the GCP project ID of the topic that the subscription will be bound to. :type topic_project: str :param topic: the Pub/Sub topic name that the subscription will be bound to create; do not include the ``projects/{project}/subscriptions/`` prefix. :type topic: str :param subscription: the Pub/Sub subscription name. If empty, a random name will be generated using the uuid module :type subscription: str :param subscription_project: the GCP project ID where the subscription will be created. If unspecified, ``topic_project`` will be used. :type subscription_project: str :param ack_deadline_secs: Number of seconds that a subscriber has to acknowledge each message pulled from the subscription :type ack_deadline_secs: int :param fail_if_exists: if set, raise an exception if the topic already exists :type fail_if_exists: bool :return: subscription name which will be the system-generated value if the ``subscription`` parameter is not supplied :rtype: str """ service = self.get_conn() full_topic = _format_topic(topic_project, topic) if not subscription: subscription = 'sub-{}'.format(uuid4()) if not subscription_project: subscription_project = topic_project full_subscription = _format_subscription(subscription_project, subscription) body = { 'topic': full_topic, 'ackDeadlineSeconds': ack_deadline_secs } try: service.projects().subscriptions().create( name=full_subscription, body=body).execute(num_retries=self.num_retries) except HttpError as e: # Status code 409 indicates that the subscription already exists. if str(e.resp['status']) == '409': message = 'Subscription already exists: {}'.format( full_subscription) self.log.warning(message) if fail_if_exists: raise PubSubException(message) else: raise PubSubException( 'Error creating subscription {}'.format(full_subscription), e) return subscription
[ "Creates", "a", "Pub", "/", "Sub", "subscription", "if", "it", "does", "not", "already", "exist", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_pubsub_hook.py#L139-L194
[ "def", "create_subscription", "(", "self", ",", "topic_project", ",", "topic", ",", "subscription", "=", "None", ",", "subscription_project", "=", "None", ",", "ack_deadline_secs", "=", "10", ",", "fail_if_exists", "=", "False", ")", ":", "service", "=", "self", ".", "get_conn", "(", ")", "full_topic", "=", "_format_topic", "(", "topic_project", ",", "topic", ")", "if", "not", "subscription", ":", "subscription", "=", "'sub-{}'", ".", "format", "(", "uuid4", "(", ")", ")", "if", "not", "subscription_project", ":", "subscription_project", "=", "topic_project", "full_subscription", "=", "_format_subscription", "(", "subscription_project", ",", "subscription", ")", "body", "=", "{", "'topic'", ":", "full_topic", ",", "'ackDeadlineSeconds'", ":", "ack_deadline_secs", "}", "try", ":", "service", ".", "projects", "(", ")", ".", "subscriptions", "(", ")", ".", "create", "(", "name", "=", "full_subscription", ",", "body", "=", "body", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "except", "HttpError", "as", "e", ":", "# Status code 409 indicates that the subscription already exists.", "if", "str", "(", "e", ".", "resp", "[", "'status'", "]", ")", "==", "'409'", ":", "message", "=", "'Subscription already exists: {}'", ".", "format", "(", "full_subscription", ")", "self", ".", "log", ".", "warning", "(", "message", ")", "if", "fail_if_exists", ":", "raise", "PubSubException", "(", "message", ")", "else", ":", "raise", "PubSubException", "(", "'Error creating subscription {}'", ".", "format", "(", "full_subscription", ")", ",", "e", ")", "return", "subscription" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PubSubHook.delete_subscription
Deletes a Pub/Sub subscription, if it exists. :param project: the GCP project ID where the subscription exists :type project: str :param subscription: the Pub/Sub subscription name to delete; do not include the ``projects/{project}/subscriptions/`` prefix. :type subscription: str :param fail_if_not_exists: if set, raise an exception if the topic does not exist :type fail_if_not_exists: bool
airflow/contrib/hooks/gcp_pubsub_hook.py
def delete_subscription(self, project, subscription, fail_if_not_exists=False): """Deletes a Pub/Sub subscription, if it exists. :param project: the GCP project ID where the subscription exists :type project: str :param subscription: the Pub/Sub subscription name to delete; do not include the ``projects/{project}/subscriptions/`` prefix. :type subscription: str :param fail_if_not_exists: if set, raise an exception if the topic does not exist :type fail_if_not_exists: bool """ service = self.get_conn() full_subscription = _format_subscription(project, subscription) try: service.projects().subscriptions().delete( subscription=full_subscription).execute(num_retries=self.num_retries) except HttpError as e: # Status code 404 indicates that the subscription was not found if str(e.resp['status']) == '404': message = 'Subscription does not exist: {}'.format( full_subscription) self.log.warning(message) if fail_if_not_exists: raise PubSubException(message) else: raise PubSubException( 'Error deleting subscription {}'.format(full_subscription), e)
def delete_subscription(self, project, subscription, fail_if_not_exists=False): """Deletes a Pub/Sub subscription, if it exists. :param project: the GCP project ID where the subscription exists :type project: str :param subscription: the Pub/Sub subscription name to delete; do not include the ``projects/{project}/subscriptions/`` prefix. :type subscription: str :param fail_if_not_exists: if set, raise an exception if the topic does not exist :type fail_if_not_exists: bool """ service = self.get_conn() full_subscription = _format_subscription(project, subscription) try: service.projects().subscriptions().delete( subscription=full_subscription).execute(num_retries=self.num_retries) except HttpError as e: # Status code 404 indicates that the subscription was not found if str(e.resp['status']) == '404': message = 'Subscription does not exist: {}'.format( full_subscription) self.log.warning(message) if fail_if_not_exists: raise PubSubException(message) else: raise PubSubException( 'Error deleting subscription {}'.format(full_subscription), e)
[ "Deletes", "a", "Pub", "/", "Sub", "subscription", "if", "it", "exists", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_pubsub_hook.py#L196-L225
[ "def", "delete_subscription", "(", "self", ",", "project", ",", "subscription", ",", "fail_if_not_exists", "=", "False", ")", ":", "service", "=", "self", ".", "get_conn", "(", ")", "full_subscription", "=", "_format_subscription", "(", "project", ",", "subscription", ")", "try", ":", "service", ".", "projects", "(", ")", ".", "subscriptions", "(", ")", ".", "delete", "(", "subscription", "=", "full_subscription", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "except", "HttpError", "as", "e", ":", "# Status code 404 indicates that the subscription was not found", "if", "str", "(", "e", ".", "resp", "[", "'status'", "]", ")", "==", "'404'", ":", "message", "=", "'Subscription does not exist: {}'", ".", "format", "(", "full_subscription", ")", "self", ".", "log", ".", "warning", "(", "message", ")", "if", "fail_if_not_exists", ":", "raise", "PubSubException", "(", "message", ")", "else", ":", "raise", "PubSubException", "(", "'Error deleting subscription {}'", ".", "format", "(", "full_subscription", ")", ",", "e", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PubSubHook.pull
Pulls up to ``max_messages`` messages from Pub/Sub subscription. :param project: the GCP project ID where the subscription exists :type project: str :param subscription: the Pub/Sub subscription name to pull from; do not include the 'projects/{project}/topics/' prefix. :type subscription: str :param max_messages: The maximum number of messages to return from the Pub/Sub API. :type max_messages: int :param return_immediately: If set, the Pub/Sub API will immediately return if no messages are available. Otherwise, the request will block for an undisclosed, but bounded period of time :type return_immediately: bool :return: A list of Pub/Sub ReceivedMessage objects each containing an ``ackId`` property and a ``message`` property, which includes the base64-encoded message content. See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/pull#ReceivedMessage
airflow/contrib/hooks/gcp_pubsub_hook.py
def pull(self, project, subscription, max_messages, return_immediately=False): """Pulls up to ``max_messages`` messages from Pub/Sub subscription. :param project: the GCP project ID where the subscription exists :type project: str :param subscription: the Pub/Sub subscription name to pull from; do not include the 'projects/{project}/topics/' prefix. :type subscription: str :param max_messages: The maximum number of messages to return from the Pub/Sub API. :type max_messages: int :param return_immediately: If set, the Pub/Sub API will immediately return if no messages are available. Otherwise, the request will block for an undisclosed, but bounded period of time :type return_immediately: bool :return: A list of Pub/Sub ReceivedMessage objects each containing an ``ackId`` property and a ``message`` property, which includes the base64-encoded message content. See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/pull#ReceivedMessage """ service = self.get_conn() full_subscription = _format_subscription(project, subscription) body = { 'maxMessages': max_messages, 'returnImmediately': return_immediately } try: response = service.projects().subscriptions().pull( subscription=full_subscription, body=body).execute(num_retries=self.num_retries) return response.get('receivedMessages', []) except HttpError as e: raise PubSubException( 'Error pulling messages from subscription {}'.format( full_subscription), e)
def pull(self, project, subscription, max_messages, return_immediately=False): """Pulls up to ``max_messages`` messages from Pub/Sub subscription. :param project: the GCP project ID where the subscription exists :type project: str :param subscription: the Pub/Sub subscription name to pull from; do not include the 'projects/{project}/topics/' prefix. :type subscription: str :param max_messages: The maximum number of messages to return from the Pub/Sub API. :type max_messages: int :param return_immediately: If set, the Pub/Sub API will immediately return if no messages are available. Otherwise, the request will block for an undisclosed, but bounded period of time :type return_immediately: bool :return: A list of Pub/Sub ReceivedMessage objects each containing an ``ackId`` property and a ``message`` property, which includes the base64-encoded message content. See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/pull#ReceivedMessage """ service = self.get_conn() full_subscription = _format_subscription(project, subscription) body = { 'maxMessages': max_messages, 'returnImmediately': return_immediately } try: response = service.projects().subscriptions().pull( subscription=full_subscription, body=body).execute(num_retries=self.num_retries) return response.get('receivedMessages', []) except HttpError as e: raise PubSubException( 'Error pulling messages from subscription {}'.format( full_subscription), e)
[ "Pulls", "up", "to", "max_messages", "messages", "from", "Pub", "/", "Sub", "subscription", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_pubsub_hook.py#L227-L261
[ "def", "pull", "(", "self", ",", "project", ",", "subscription", ",", "max_messages", ",", "return_immediately", "=", "False", ")", ":", "service", "=", "self", ".", "get_conn", "(", ")", "full_subscription", "=", "_format_subscription", "(", "project", ",", "subscription", ")", "body", "=", "{", "'maxMessages'", ":", "max_messages", ",", "'returnImmediately'", ":", "return_immediately", "}", "try", ":", "response", "=", "service", ".", "projects", "(", ")", ".", "subscriptions", "(", ")", ".", "pull", "(", "subscription", "=", "full_subscription", ",", "body", "=", "body", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "return", "response", ".", "get", "(", "'receivedMessages'", ",", "[", "]", ")", "except", "HttpError", "as", "e", ":", "raise", "PubSubException", "(", "'Error pulling messages from subscription {}'", ".", "format", "(", "full_subscription", ")", ",", "e", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
PubSubHook.acknowledge
Pulls up to ``max_messages`` messages from Pub/Sub subscription. :param project: the GCP project name or ID in which to create the topic :type project: str :param subscription: the Pub/Sub subscription name to delete; do not include the 'projects/{project}/topics/' prefix. :type subscription: str :param ack_ids: List of ReceivedMessage ackIds from a previous pull response :type ack_ids: list
airflow/contrib/hooks/gcp_pubsub_hook.py
def acknowledge(self, project, subscription, ack_ids): """Pulls up to ``max_messages`` messages from Pub/Sub subscription. :param project: the GCP project name or ID in which to create the topic :type project: str :param subscription: the Pub/Sub subscription name to delete; do not include the 'projects/{project}/topics/' prefix. :type subscription: str :param ack_ids: List of ReceivedMessage ackIds from a previous pull response :type ack_ids: list """ service = self.get_conn() full_subscription = _format_subscription(project, subscription) try: service.projects().subscriptions().acknowledge( subscription=full_subscription, body={'ackIds': ack_ids} ).execute(num_retries=self.num_retries) except HttpError as e: raise PubSubException( 'Error acknowledging {} messages pulled from subscription {}' .format(len(ack_ids), full_subscription), e)
def acknowledge(self, project, subscription, ack_ids): """Pulls up to ``max_messages`` messages from Pub/Sub subscription. :param project: the GCP project name or ID in which to create the topic :type project: str :param subscription: the Pub/Sub subscription name to delete; do not include the 'projects/{project}/topics/' prefix. :type subscription: str :param ack_ids: List of ReceivedMessage ackIds from a previous pull response :type ack_ids: list """ service = self.get_conn() full_subscription = _format_subscription(project, subscription) try: service.projects().subscriptions().acknowledge( subscription=full_subscription, body={'ackIds': ack_ids} ).execute(num_retries=self.num_retries) except HttpError as e: raise PubSubException( 'Error acknowledging {} messages pulled from subscription {}' .format(len(ack_ids), full_subscription), e)
[ "Pulls", "up", "to", "max_messages", "messages", "from", "Pub", "/", "Sub", "subscription", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_pubsub_hook.py#L263-L285
[ "def", "acknowledge", "(", "self", ",", "project", ",", "subscription", ",", "ack_ids", ")", ":", "service", "=", "self", ".", "get_conn", "(", ")", "full_subscription", "=", "_format_subscription", "(", "project", ",", "subscription", ")", "try", ":", "service", ".", "projects", "(", ")", ".", "subscriptions", "(", ")", ".", "acknowledge", "(", "subscription", "=", "full_subscription", ",", "body", "=", "{", "'ackIds'", ":", "ack_ids", "}", ")", ".", "execute", "(", "num_retries", "=", "self", ".", "num_retries", ")", "except", "HttpError", "as", "e", ":", "raise", "PubSubException", "(", "'Error acknowledging {} messages pulled from subscription {}'", ".", "format", "(", "len", "(", "ack_ids", ")", ",", "full_subscription", ")", ",", "e", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
BaseTIDep.get_dep_statuses
Wrapper around the private _get_dep_statuses method that contains some global checks for all dependencies. :param ti: the task instance to get the dependency status for :type ti: airflow.models.TaskInstance :param session: database session :type session: sqlalchemy.orm.session.Session :param dep_context: the context for which this dependency should be evaluated for :type dep_context: DepContext
airflow/ti_deps/deps/base_ti_dep.py
def get_dep_statuses(self, ti, session, dep_context=None): """ Wrapper around the private _get_dep_statuses method that contains some global checks for all dependencies. :param ti: the task instance to get the dependency status for :type ti: airflow.models.TaskInstance :param session: database session :type session: sqlalchemy.orm.session.Session :param dep_context: the context for which this dependency should be evaluated for :type dep_context: DepContext """ # this avoids a circular dependency from airflow.ti_deps.dep_context import DepContext if dep_context is None: dep_context = DepContext() if self.IGNOREABLE and dep_context.ignore_all_deps: yield self._passing_status( reason="Context specified all dependencies should be ignored.") return if self.IS_TASK_DEP and dep_context.ignore_task_deps: yield self._passing_status( reason="Context specified all task dependencies should be ignored.") return for dep_status in self._get_dep_statuses(ti, session, dep_context): yield dep_status
def get_dep_statuses(self, ti, session, dep_context=None): """ Wrapper around the private _get_dep_statuses method that contains some global checks for all dependencies. :param ti: the task instance to get the dependency status for :type ti: airflow.models.TaskInstance :param session: database session :type session: sqlalchemy.orm.session.Session :param dep_context: the context for which this dependency should be evaluated for :type dep_context: DepContext """ # this avoids a circular dependency from airflow.ti_deps.dep_context import DepContext if dep_context is None: dep_context = DepContext() if self.IGNOREABLE and dep_context.ignore_all_deps: yield self._passing_status( reason="Context specified all dependencies should be ignored.") return if self.IS_TASK_DEP and dep_context.ignore_task_deps: yield self._passing_status( reason="Context specified all task dependencies should be ignored.") return for dep_status in self._get_dep_statuses(ti, session, dep_context): yield dep_status
[ "Wrapper", "around", "the", "private", "_get_dep_statuses", "method", "that", "contains", "some", "global", "checks", "for", "all", "dependencies", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/ti_deps/deps/base_ti_dep.py#L78-L107
[ "def", "get_dep_statuses", "(", "self", ",", "ti", ",", "session", ",", "dep_context", "=", "None", ")", ":", "# this avoids a circular dependency", "from", "airflow", ".", "ti_deps", ".", "dep_context", "import", "DepContext", "if", "dep_context", "is", "None", ":", "dep_context", "=", "DepContext", "(", ")", "if", "self", ".", "IGNOREABLE", "and", "dep_context", ".", "ignore_all_deps", ":", "yield", "self", ".", "_passing_status", "(", "reason", "=", "\"Context specified all dependencies should be ignored.\"", ")", "return", "if", "self", ".", "IS_TASK_DEP", "and", "dep_context", ".", "ignore_task_deps", ":", "yield", "self", ".", "_passing_status", "(", "reason", "=", "\"Context specified all task dependencies should be ignored.\"", ")", "return", "for", "dep_status", "in", "self", ".", "_get_dep_statuses", "(", "ti", ",", "session", ",", "dep_context", ")", ":", "yield", "dep_status" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
BaseTIDep.is_met
Returns whether or not this dependency is met for a given task instance. A dependency is considered met if all of the dependency statuses it reports are passing. :param ti: the task instance to see if this dependency is met for :type ti: airflow.models.TaskInstance :param session: database session :type session: sqlalchemy.orm.session.Session :param dep_context: The context this dependency is being checked under that stores state that can be used by this dependency. :type dep_context: BaseDepContext
airflow/ti_deps/deps/base_ti_dep.py
def is_met(self, ti, session, dep_context=None): """ Returns whether or not this dependency is met for a given task instance. A dependency is considered met if all of the dependency statuses it reports are passing. :param ti: the task instance to see if this dependency is met for :type ti: airflow.models.TaskInstance :param session: database session :type session: sqlalchemy.orm.session.Session :param dep_context: The context this dependency is being checked under that stores state that can be used by this dependency. :type dep_context: BaseDepContext """ return all(status.passed for status in self.get_dep_statuses(ti, session, dep_context))
def is_met(self, ti, session, dep_context=None): """ Returns whether or not this dependency is met for a given task instance. A dependency is considered met if all of the dependency statuses it reports are passing. :param ti: the task instance to see if this dependency is met for :type ti: airflow.models.TaskInstance :param session: database session :type session: sqlalchemy.orm.session.Session :param dep_context: The context this dependency is being checked under that stores state that can be used by this dependency. :type dep_context: BaseDepContext """ return all(status.passed for status in self.get_dep_statuses(ti, session, dep_context))
[ "Returns", "whether", "or", "not", "this", "dependency", "is", "met", "for", "a", "given", "task", "instance", ".", "A", "dependency", "is", "considered", "met", "if", "all", "of", "the", "dependency", "statuses", "it", "reports", "are", "passing", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/ti_deps/deps/base_ti_dep.py#L110-L125
[ "def", "is_met", "(", "self", ",", "ti", ",", "session", ",", "dep_context", "=", "None", ")", ":", "return", "all", "(", "status", ".", "passed", "for", "status", "in", "self", ".", "get_dep_statuses", "(", "ti", ",", "session", ",", "dep_context", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
BaseTIDep.get_failure_reasons
Returns an iterable of strings that explain why this dependency wasn't met. :param ti: the task instance to see if this dependency is met for :type ti: airflow.models.TaskInstance :param session: database session :type session: sqlalchemy.orm.session.Session :param dep_context: The context this dependency is being checked under that stores state that can be used by this dependency. :type dep_context: BaseDepContext
airflow/ti_deps/deps/base_ti_dep.py
def get_failure_reasons(self, ti, session, dep_context=None): """ Returns an iterable of strings that explain why this dependency wasn't met. :param ti: the task instance to see if this dependency is met for :type ti: airflow.models.TaskInstance :param session: database session :type session: sqlalchemy.orm.session.Session :param dep_context: The context this dependency is being checked under that stores state that can be used by this dependency. :type dep_context: BaseDepContext """ for dep_status in self.get_dep_statuses(ti, session, dep_context): if not dep_status.passed: yield dep_status.reason
def get_failure_reasons(self, ti, session, dep_context=None): """ Returns an iterable of strings that explain why this dependency wasn't met. :param ti: the task instance to see if this dependency is met for :type ti: airflow.models.TaskInstance :param session: database session :type session: sqlalchemy.orm.session.Session :param dep_context: The context this dependency is being checked under that stores state that can be used by this dependency. :type dep_context: BaseDepContext """ for dep_status in self.get_dep_statuses(ti, session, dep_context): if not dep_status.passed: yield dep_status.reason
[ "Returns", "an", "iterable", "of", "strings", "that", "explain", "why", "this", "dependency", "wasn", "t", "met", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/ti_deps/deps/base_ti_dep.py#L128-L142
[ "def", "get_failure_reasons", "(", "self", ",", "ti", ",", "session", ",", "dep_context", "=", "None", ")", ":", "for", "dep_status", "in", "self", ".", "get_dep_statuses", "(", "ti", ",", "session", ",", "dep_context", ")", ":", "if", "not", "dep_status", ".", "passed", ":", "yield", "dep_status", ".", "reason" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
_parse_s3_config
Parses a config file for s3 credentials. Can currently parse boto, s3cmd.conf and AWS SDK config formats :param config_file_name: path to the config file :type config_file_name: str :param config_format: config type. One of "boto", "s3cmd" or "aws". Defaults to "boto" :type config_format: str :param profile: profile name in AWS type config file :type profile: str
airflow/contrib/hooks/aws_hook.py
def _parse_s3_config(config_file_name, config_format='boto', profile=None): """ Parses a config file for s3 credentials. Can currently parse boto, s3cmd.conf and AWS SDK config formats :param config_file_name: path to the config file :type config_file_name: str :param config_format: config type. One of "boto", "s3cmd" or "aws". Defaults to "boto" :type config_format: str :param profile: profile name in AWS type config file :type profile: str """ config = configparser.ConfigParser() if config.read(config_file_name): # pragma: no cover sections = config.sections() else: raise AirflowException("Couldn't read {0}".format(config_file_name)) # Setting option names depending on file format if config_format is None: config_format = 'boto' conf_format = config_format.lower() if conf_format == 'boto': # pragma: no cover if profile is not None and 'profile ' + profile in sections: cred_section = 'profile ' + profile else: cred_section = 'Credentials' elif conf_format == 'aws' and profile is not None: cred_section = profile else: cred_section = 'default' # Option names if conf_format in ('boto', 'aws'): # pragma: no cover key_id_option = 'aws_access_key_id' secret_key_option = 'aws_secret_access_key' # security_token_option = 'aws_security_token' else: key_id_option = 'access_key' secret_key_option = 'secret_key' # Actual Parsing if cred_section not in sections: raise AirflowException("This config file format is not recognized") else: try: access_key = config.get(cred_section, key_id_option) secret_key = config.get(cred_section, secret_key_option) except Exception: logging.warning("Option Error in parsing s3 config file") raise return access_key, secret_key
def _parse_s3_config(config_file_name, config_format='boto', profile=None): """ Parses a config file for s3 credentials. Can currently parse boto, s3cmd.conf and AWS SDK config formats :param config_file_name: path to the config file :type config_file_name: str :param config_format: config type. One of "boto", "s3cmd" or "aws". Defaults to "boto" :type config_format: str :param profile: profile name in AWS type config file :type profile: str """ config = configparser.ConfigParser() if config.read(config_file_name): # pragma: no cover sections = config.sections() else: raise AirflowException("Couldn't read {0}".format(config_file_name)) # Setting option names depending on file format if config_format is None: config_format = 'boto' conf_format = config_format.lower() if conf_format == 'boto': # pragma: no cover if profile is not None and 'profile ' + profile in sections: cred_section = 'profile ' + profile else: cred_section = 'Credentials' elif conf_format == 'aws' and profile is not None: cred_section = profile else: cred_section = 'default' # Option names if conf_format in ('boto', 'aws'): # pragma: no cover key_id_option = 'aws_access_key_id' secret_key_option = 'aws_secret_access_key' # security_token_option = 'aws_security_token' else: key_id_option = 'access_key' secret_key_option = 'secret_key' # Actual Parsing if cred_section not in sections: raise AirflowException("This config file format is not recognized") else: try: access_key = config.get(cred_section, key_id_option) secret_key = config.get(cred_section, secret_key_option) except Exception: logging.warning("Option Error in parsing s3 config file") raise return access_key, secret_key
[ "Parses", "a", "config", "file", "for", "s3", "credentials", ".", "Can", "currently", "parse", "boto", "s3cmd", ".", "conf", "and", "AWS", "SDK", "config", "formats" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_hook.py#L28-L77
[ "def", "_parse_s3_config", "(", "config_file_name", ",", "config_format", "=", "'boto'", ",", "profile", "=", "None", ")", ":", "config", "=", "configparser", ".", "ConfigParser", "(", ")", "if", "config", ".", "read", "(", "config_file_name", ")", ":", "# pragma: no cover", "sections", "=", "config", ".", "sections", "(", ")", "else", ":", "raise", "AirflowException", "(", "\"Couldn't read {0}\"", ".", "format", "(", "config_file_name", ")", ")", "# Setting option names depending on file format", "if", "config_format", "is", "None", ":", "config_format", "=", "'boto'", "conf_format", "=", "config_format", ".", "lower", "(", ")", "if", "conf_format", "==", "'boto'", ":", "# pragma: no cover", "if", "profile", "is", "not", "None", "and", "'profile '", "+", "profile", "in", "sections", ":", "cred_section", "=", "'profile '", "+", "profile", "else", ":", "cred_section", "=", "'Credentials'", "elif", "conf_format", "==", "'aws'", "and", "profile", "is", "not", "None", ":", "cred_section", "=", "profile", "else", ":", "cred_section", "=", "'default'", "# Option names", "if", "conf_format", "in", "(", "'boto'", ",", "'aws'", ")", ":", "# pragma: no cover", "key_id_option", "=", "'aws_access_key_id'", "secret_key_option", "=", "'aws_secret_access_key'", "# security_token_option = 'aws_security_token'", "else", ":", "key_id_option", "=", "'access_key'", "secret_key_option", "=", "'secret_key'", "# Actual Parsing", "if", "cred_section", "not", "in", "sections", ":", "raise", "AirflowException", "(", "\"This config file format is not recognized\"", ")", "else", ":", "try", ":", "access_key", "=", "config", ".", "get", "(", "cred_section", ",", "key_id_option", ")", "secret_key", "=", "config", ".", "get", "(", "cred_section", ",", "secret_key_option", ")", "except", "Exception", ":", "logging", ".", "warning", "(", "\"Option Error in parsing s3 config file\"", ")", "raise", "return", "access_key", ",", "secret_key" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AwsHook.get_credentials
Get the underlying `botocore.Credentials` object. This contains the following authentication attributes: access_key, secret_key and token.
airflow/contrib/hooks/aws_hook.py
def get_credentials(self, region_name=None): """Get the underlying `botocore.Credentials` object. This contains the following authentication attributes: access_key, secret_key and token. """ session, _ = self._get_credentials(region_name) # Credentials are refreshable, so accessing your access key and # secret key separately can lead to a race condition. # See https://stackoverflow.com/a/36291428/8283373 return session.get_credentials().get_frozen_credentials()
def get_credentials(self, region_name=None): """Get the underlying `botocore.Credentials` object. This contains the following authentication attributes: access_key, secret_key and token. """ session, _ = self._get_credentials(region_name) # Credentials are refreshable, so accessing your access key and # secret key separately can lead to a race condition. # See https://stackoverflow.com/a/36291428/8283373 return session.get_credentials().get_frozen_credentials()
[ "Get", "the", "underlying", "botocore", ".", "Credentials", "object", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_hook.py#L183-L192
[ "def", "get_credentials", "(", "self", ",", "region_name", "=", "None", ")", ":", "session", ",", "_", "=", "self", ".", "_get_credentials", "(", "region_name", ")", "# Credentials are refreshable, so accessing your access key and", "# secret key separately can lead to a race condition.", "# See https://stackoverflow.com/a/36291428/8283373", "return", "session", ".", "get_credentials", "(", ")", ".", "get_frozen_credentials", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AwsHook.expand_role
If the IAM role is a role name, get the Amazon Resource Name (ARN) for the role. If IAM role is already an IAM role ARN, no change is made. :param role: IAM role name or ARN :return: IAM role ARN
airflow/contrib/hooks/aws_hook.py
def expand_role(self, role): """ If the IAM role is a role name, get the Amazon Resource Name (ARN) for the role. If IAM role is already an IAM role ARN, no change is made. :param role: IAM role name or ARN :return: IAM role ARN """ if '/' in role: return role else: return self.get_client_type('iam').get_role(RoleName=role)['Role']['Arn']
def expand_role(self, role): """ If the IAM role is a role name, get the Amazon Resource Name (ARN) for the role. If IAM role is already an IAM role ARN, no change is made. :param role: IAM role name or ARN :return: IAM role ARN """ if '/' in role: return role else: return self.get_client_type('iam').get_role(RoleName=role)['Role']['Arn']
[ "If", "the", "IAM", "role", "is", "a", "role", "name", "get", "the", "Amazon", "Resource", "Name", "(", "ARN", ")", "for", "the", "role", ".", "If", "IAM", "role", "is", "already", "an", "IAM", "role", "ARN", "no", "change", "is", "made", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_hook.py#L194-L205
[ "def", "expand_role", "(", "self", ",", "role", ")", ":", "if", "'/'", "in", "role", ":", "return", "role", "else", ":", "return", "self", ".", "get_client_type", "(", "'iam'", ")", ".", "get_role", "(", "RoleName", "=", "role", ")", "[", "'Role'", "]", "[", "'Arn'", "]" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
VerticaHook.get_conn
Returns verticaql connection object
airflow/contrib/hooks/vertica_hook.py
def get_conn(self): """ Returns verticaql connection object """ conn = self.get_connection(self.vertica_conn_id) conn_config = { "user": conn.login, "password": conn.password or '', "database": conn.schema, "host": conn.host or 'localhost' } if not conn.port: conn_config["port"] = 5433 else: conn_config["port"] = int(conn.port) conn = connect(**conn_config) return conn
def get_conn(self): """ Returns verticaql connection object """ conn = self.get_connection(self.vertica_conn_id) conn_config = { "user": conn.login, "password": conn.password or '', "database": conn.schema, "host": conn.host or 'localhost' } if not conn.port: conn_config["port"] = 5433 else: conn_config["port"] = int(conn.port) conn = connect(**conn_config) return conn
[ "Returns", "verticaql", "connection", "object" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/vertica_hook.py#L35-L53
[ "def", "get_conn", "(", "self", ")", ":", "conn", "=", "self", ".", "get_connection", "(", "self", ".", "vertica_conn_id", ")", "conn_config", "=", "{", "\"user\"", ":", "conn", ".", "login", ",", "\"password\"", ":", "conn", ".", "password", "or", "''", ",", "\"database\"", ":", "conn", ".", "schema", ",", "\"host\"", ":", "conn", ".", "host", "or", "'localhost'", "}", "if", "not", "conn", ".", "port", ":", "conn_config", "[", "\"port\"", "]", "=", "5433", "else", ":", "conn_config", "[", "\"port\"", "]", "=", "int", "(", "conn", ".", "port", ")", "conn", "=", "connect", "(", "*", "*", "conn_config", ")", "return", "conn" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
set_context
Walks the tree of loggers and tries to set the context for each handler :param logger: logger :param value: value to set
airflow/utils/log/logging_mixin.py
def set_context(logger, value): """ Walks the tree of loggers and tries to set the context for each handler :param logger: logger :param value: value to set """ _logger = logger while _logger: for handler in _logger.handlers: try: handler.set_context(value) except AttributeError: # Not all handlers need to have context passed in so we ignore # the error when handlers do not have set_context defined. pass if _logger.propagate is True: _logger = _logger.parent else: _logger = None
def set_context(logger, value): """ Walks the tree of loggers and tries to set the context for each handler :param logger: logger :param value: value to set """ _logger = logger while _logger: for handler in _logger.handlers: try: handler.set_context(value) except AttributeError: # Not all handlers need to have context passed in so we ignore # the error when handlers do not have set_context defined. pass if _logger.propagate is True: _logger = _logger.parent else: _logger = None
[ "Walks", "the", "tree", "of", "loggers", "and", "tries", "to", "set", "the", "context", "for", "each", "handler", ":", "param", "logger", ":", "logger", ":", "param", "value", ":", "value", "to", "set" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/log/logging_mixin.py#L166-L184
[ "def", "set_context", "(", "logger", ",", "value", ")", ":", "_logger", "=", "logger", "while", "_logger", ":", "for", "handler", "in", "_logger", ".", "handlers", ":", "try", ":", "handler", ".", "set_context", "(", "value", ")", "except", "AttributeError", ":", "# Not all handlers need to have context passed in so we ignore", "# the error when handlers do not have set_context defined.", "pass", "if", "_logger", ".", "propagate", "is", "True", ":", "_logger", "=", "_logger", ".", "parent", "else", ":", "_logger", "=", "None" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
StreamLogWriter.write
Do whatever it takes to actually log the specified logging record :param message: message to log
airflow/utils/log/logging_mixin.py
def write(self, message): """ Do whatever it takes to actually log the specified logging record :param message: message to log """ if not message.endswith("\n"): self._buffer += message else: self._buffer += message self.logger.log(self.level, self._buffer.rstrip()) self._buffer = str()
def write(self, message): """ Do whatever it takes to actually log the specified logging record :param message: message to log """ if not message.endswith("\n"): self._buffer += message else: self._buffer += message self.logger.log(self.level, self._buffer.rstrip()) self._buffer = str()
[ "Do", "whatever", "it", "takes", "to", "actually", "log", "the", "specified", "logging", "record", ":", "param", "message", ":", "message", "to", "log" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/log/logging_mixin.py#L92-L102
[ "def", "write", "(", "self", ",", "message", ")", ":", "if", "not", "message", ".", "endswith", "(", "\"\\n\"", ")", ":", "self", ".", "_buffer", "+=", "message", "else", ":", "self", ".", "_buffer", "+=", "message", "self", ".", "logger", ".", "log", "(", "self", ".", "level", ",", "self", ".", "_buffer", ".", "rstrip", "(", ")", ")", "self", ".", "_buffer", "=", "str", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
StreamLogWriter.flush
Ensure all logging output has been flushed
airflow/utils/log/logging_mixin.py
def flush(self): """ Ensure all logging output has been flushed """ if len(self._buffer) > 0: self.logger.log(self.level, self._buffer) self._buffer = str()
def flush(self): """ Ensure all logging output has been flushed """ if len(self._buffer) > 0: self.logger.log(self.level, self._buffer) self._buffer = str()
[ "Ensure", "all", "logging", "output", "has", "been", "flushed" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/log/logging_mixin.py#L104-L110
[ "def", "flush", "(", "self", ")", ":", "if", "len", "(", "self", ".", "_buffer", ")", ">", "0", ":", "self", ".", "logger", ".", "log", "(", "self", ".", "level", ",", "self", ".", "_buffer", ")", "self", ".", "_buffer", "=", "str", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
correct_maybe_zipped
If the path contains a folder with a .zip suffix, then the folder is treated as a zip archive and path to zip is returned.
airflow/utils/dag_processing.py
def correct_maybe_zipped(fileloc): """ If the path contains a folder with a .zip suffix, then the folder is treated as a zip archive and path to zip is returned. """ _, archive, filename = re.search( r'((.*\.zip){})?(.*)'.format(re.escape(os.sep)), fileloc).groups() if archive and zipfile.is_zipfile(archive): return archive else: return fileloc
def correct_maybe_zipped(fileloc): """ If the path contains a folder with a .zip suffix, then the folder is treated as a zip archive and path to zip is returned. """ _, archive, filename = re.search( r'((.*\.zip){})?(.*)'.format(re.escape(os.sep)), fileloc).groups() if archive and zipfile.is_zipfile(archive): return archive else: return fileloc
[ "If", "the", "path", "contains", "a", "folder", "with", "a", ".", "zip", "suffix", "then", "the", "folder", "is", "treated", "as", "a", "zip", "archive", "and", "path", "to", "zip", "is", "returned", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L275-L286
[ "def", "correct_maybe_zipped", "(", "fileloc", ")", ":", "_", ",", "archive", ",", "filename", "=", "re", ".", "search", "(", "r'((.*\\.zip){})?(.*)'", ".", "format", "(", "re", ".", "escape", "(", "os", ".", "sep", ")", ")", ",", "fileloc", ")", ".", "groups", "(", ")", "if", "archive", "and", "zipfile", ".", "is_zipfile", "(", "archive", ")", ":", "return", "archive", "else", ":", "return", "fileloc" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
list_py_file_paths
Traverse a directory and look for Python files. :param directory: the directory to traverse :type directory: unicode :param safe_mode: whether to use a heuristic to determine whether a file contains Airflow DAG definitions :return: a list of paths to Python files in the specified directory :rtype: list[unicode]
airflow/utils/dag_processing.py
def list_py_file_paths(directory, safe_mode=True, include_examples=None): """ Traverse a directory and look for Python files. :param directory: the directory to traverse :type directory: unicode :param safe_mode: whether to use a heuristic to determine whether a file contains Airflow DAG definitions :return: a list of paths to Python files in the specified directory :rtype: list[unicode] """ if include_examples is None: include_examples = conf.getboolean('core', 'LOAD_EXAMPLES') file_paths = [] if directory is None: return [] elif os.path.isfile(directory): return [directory] elif os.path.isdir(directory): patterns_by_dir = {} for root, dirs, files in os.walk(directory, followlinks=True): patterns = patterns_by_dir.get(root, []) ignore_file = os.path.join(root, '.airflowignore') if os.path.isfile(ignore_file): with open(ignore_file, 'r') as f: # If we have new patterns create a copy so we don't change # the previous list (which would affect other subdirs) patterns += [re.compile(p) for p in f.read().split('\n') if p] # If we can ignore any subdirs entirely we should - fewer paths # to walk is better. We have to modify the ``dirs`` array in # place for this to affect os.walk dirs[:] = [ d for d in dirs if not any(p.search(os.path.join(root, d)) for p in patterns) ] # We want patterns defined in a parent folder's .airflowignore to # apply to subdirs too for d in dirs: patterns_by_dir[os.path.join(root, d)] = patterns for f in files: try: file_path = os.path.join(root, f) if not os.path.isfile(file_path): continue mod_name, file_ext = os.path.splitext( os.path.split(file_path)[-1]) if file_ext != '.py' and not zipfile.is_zipfile(file_path): continue if any([re.findall(p, file_path) for p in patterns]): continue # Heuristic that guesses whether a Python file contains an # Airflow DAG definition. might_contain_dag = True if safe_mode and not zipfile.is_zipfile(file_path): with open(file_path, 'rb') as fp: content = fp.read() might_contain_dag = all( [s in content for s in (b'DAG', b'airflow')]) if not might_contain_dag: continue file_paths.append(file_path) except Exception: log = LoggingMixin().log log.exception("Error while examining %s", f) if include_examples: import airflow.example_dags example_dag_folder = airflow.example_dags.__path__[0] file_paths.extend(list_py_file_paths(example_dag_folder, safe_mode, False)) return file_paths
def list_py_file_paths(directory, safe_mode=True, include_examples=None): """ Traverse a directory and look for Python files. :param directory: the directory to traverse :type directory: unicode :param safe_mode: whether to use a heuristic to determine whether a file contains Airflow DAG definitions :return: a list of paths to Python files in the specified directory :rtype: list[unicode] """ if include_examples is None: include_examples = conf.getboolean('core', 'LOAD_EXAMPLES') file_paths = [] if directory is None: return [] elif os.path.isfile(directory): return [directory] elif os.path.isdir(directory): patterns_by_dir = {} for root, dirs, files in os.walk(directory, followlinks=True): patterns = patterns_by_dir.get(root, []) ignore_file = os.path.join(root, '.airflowignore') if os.path.isfile(ignore_file): with open(ignore_file, 'r') as f: # If we have new patterns create a copy so we don't change # the previous list (which would affect other subdirs) patterns += [re.compile(p) for p in f.read().split('\n') if p] # If we can ignore any subdirs entirely we should - fewer paths # to walk is better. We have to modify the ``dirs`` array in # place for this to affect os.walk dirs[:] = [ d for d in dirs if not any(p.search(os.path.join(root, d)) for p in patterns) ] # We want patterns defined in a parent folder's .airflowignore to # apply to subdirs too for d in dirs: patterns_by_dir[os.path.join(root, d)] = patterns for f in files: try: file_path = os.path.join(root, f) if not os.path.isfile(file_path): continue mod_name, file_ext = os.path.splitext( os.path.split(file_path)[-1]) if file_ext != '.py' and not zipfile.is_zipfile(file_path): continue if any([re.findall(p, file_path) for p in patterns]): continue # Heuristic that guesses whether a Python file contains an # Airflow DAG definition. might_contain_dag = True if safe_mode and not zipfile.is_zipfile(file_path): with open(file_path, 'rb') as fp: content = fp.read() might_contain_dag = all( [s in content for s in (b'DAG', b'airflow')]) if not might_contain_dag: continue file_paths.append(file_path) except Exception: log = LoggingMixin().log log.exception("Error while examining %s", f) if include_examples: import airflow.example_dags example_dag_folder = airflow.example_dags.__path__[0] file_paths.extend(list_py_file_paths(example_dag_folder, safe_mode, False)) return file_paths
[ "Traverse", "a", "directory", "and", "look", "for", "Python", "files", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L289-L365
[ "def", "list_py_file_paths", "(", "directory", ",", "safe_mode", "=", "True", ",", "include_examples", "=", "None", ")", ":", "if", "include_examples", "is", "None", ":", "include_examples", "=", "conf", ".", "getboolean", "(", "'core'", ",", "'LOAD_EXAMPLES'", ")", "file_paths", "=", "[", "]", "if", "directory", "is", "None", ":", "return", "[", "]", "elif", "os", ".", "path", ".", "isfile", "(", "directory", ")", ":", "return", "[", "directory", "]", "elif", "os", ".", "path", ".", "isdir", "(", "directory", ")", ":", "patterns_by_dir", "=", "{", "}", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "directory", ",", "followlinks", "=", "True", ")", ":", "patterns", "=", "patterns_by_dir", ".", "get", "(", "root", ",", "[", "]", ")", "ignore_file", "=", "os", ".", "path", ".", "join", "(", "root", ",", "'.airflowignore'", ")", "if", "os", ".", "path", ".", "isfile", "(", "ignore_file", ")", ":", "with", "open", "(", "ignore_file", ",", "'r'", ")", "as", "f", ":", "# If we have new patterns create a copy so we don't change", "# the previous list (which would affect other subdirs)", "patterns", "+=", "[", "re", ".", "compile", "(", "p", ")", "for", "p", "in", "f", ".", "read", "(", ")", ".", "split", "(", "'\\n'", ")", "if", "p", "]", "# If we can ignore any subdirs entirely we should - fewer paths", "# to walk is better. We have to modify the ``dirs`` array in", "# place for this to affect os.walk", "dirs", "[", ":", "]", "=", "[", "d", "for", "d", "in", "dirs", "if", "not", "any", "(", "p", ".", "search", "(", "os", ".", "path", ".", "join", "(", "root", ",", "d", ")", ")", "for", "p", "in", "patterns", ")", "]", "# We want patterns defined in a parent folder's .airflowignore to", "# apply to subdirs too", "for", "d", "in", "dirs", ":", "patterns_by_dir", "[", "os", ".", "path", ".", "join", "(", "root", ",", "d", ")", "]", "=", "patterns", "for", "f", "in", "files", ":", "try", ":", "file_path", "=", "os", ".", "path", ".", "join", "(", "root", ",", "f", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "file_path", ")", ":", "continue", "mod_name", ",", "file_ext", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "split", "(", "file_path", ")", "[", "-", "1", "]", ")", "if", "file_ext", "!=", "'.py'", "and", "not", "zipfile", ".", "is_zipfile", "(", "file_path", ")", ":", "continue", "if", "any", "(", "[", "re", ".", "findall", "(", "p", ",", "file_path", ")", "for", "p", "in", "patterns", "]", ")", ":", "continue", "# Heuristic that guesses whether a Python file contains an", "# Airflow DAG definition.", "might_contain_dag", "=", "True", "if", "safe_mode", "and", "not", "zipfile", ".", "is_zipfile", "(", "file_path", ")", ":", "with", "open", "(", "file_path", ",", "'rb'", ")", "as", "fp", ":", "content", "=", "fp", ".", "read", "(", ")", "might_contain_dag", "=", "all", "(", "[", "s", "in", "content", "for", "s", "in", "(", "b'DAG'", ",", "b'airflow'", ")", "]", ")", "if", "not", "might_contain_dag", ":", "continue", "file_paths", ".", "append", "(", "file_path", ")", "except", "Exception", ":", "log", "=", "LoggingMixin", "(", ")", ".", "log", "log", ".", "exception", "(", "\"Error while examining %s\"", ",", "f", ")", "if", "include_examples", ":", "import", "airflow", ".", "example_dags", "example_dag_folder", "=", "airflow", ".", "example_dags", ".", "__path__", "[", "0", "]", "file_paths", ".", "extend", "(", "list_py_file_paths", "(", "example_dag_folder", ",", "safe_mode", ",", "False", ")", ")", "return", "file_paths" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SimpleTaskInstance.construct_task_instance
Construct a TaskInstance from the database based on the primary key :param session: DB session. :param lock_for_update: if True, indicates that the database should lock the TaskInstance (issuing a FOR UPDATE clause) until the session is committed.
airflow/utils/dag_processing.py
def construct_task_instance(self, session=None, lock_for_update=False): """ Construct a TaskInstance from the database based on the primary key :param session: DB session. :param lock_for_update: if True, indicates that the database should lock the TaskInstance (issuing a FOR UPDATE clause) until the session is committed. """ TI = airflow.models.TaskInstance qry = session.query(TI).filter( TI.dag_id == self._dag_id, TI.task_id == self._task_id, TI.execution_date == self._execution_date) if lock_for_update: ti = qry.with_for_update().first() else: ti = qry.first() return ti
def construct_task_instance(self, session=None, lock_for_update=False): """ Construct a TaskInstance from the database based on the primary key :param session: DB session. :param lock_for_update: if True, indicates that the database should lock the TaskInstance (issuing a FOR UPDATE clause) until the session is committed. """ TI = airflow.models.TaskInstance qry = session.query(TI).filter( TI.dag_id == self._dag_id, TI.task_id == self._task_id, TI.execution_date == self._execution_date) if lock_for_update: ti = qry.with_for_update().first() else: ti = qry.first() return ti
[ "Construct", "a", "TaskInstance", "from", "the", "database", "based", "on", "the", "primary", "key" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L213-L233
[ "def", "construct_task_instance", "(", "self", ",", "session", "=", "None", ",", "lock_for_update", "=", "False", ")", ":", "TI", "=", "airflow", ".", "models", ".", "TaskInstance", "qry", "=", "session", ".", "query", "(", "TI", ")", ".", "filter", "(", "TI", ".", "dag_id", "==", "self", ".", "_dag_id", ",", "TI", ".", "task_id", "==", "self", ".", "_task_id", ",", "TI", ".", "execution_date", "==", "self", ".", "_execution_date", ")", "if", "lock_for_update", ":", "ti", "=", "qry", ".", "with_for_update", "(", ")", ".", "first", "(", ")", "else", ":", "ti", "=", "qry", ".", "first", "(", ")", "return", "ti" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SimpleDagBag.get_dag
:param dag_id: DAG ID :type dag_id: unicode :return: if the given DAG ID exists in the bag, return the BaseDag corresponding to that ID. Otherwise, throw an Exception :rtype: airflow.utils.dag_processing.SimpleDag
airflow/utils/dag_processing.py
def get_dag(self, dag_id): """ :param dag_id: DAG ID :type dag_id: unicode :return: if the given DAG ID exists in the bag, return the BaseDag corresponding to that ID. Otherwise, throw an Exception :rtype: airflow.utils.dag_processing.SimpleDag """ if dag_id not in self.dag_id_to_simple_dag: raise AirflowException("Unknown DAG ID {}".format(dag_id)) return self.dag_id_to_simple_dag[dag_id]
def get_dag(self, dag_id): """ :param dag_id: DAG ID :type dag_id: unicode :return: if the given DAG ID exists in the bag, return the BaseDag corresponding to that ID. Otherwise, throw an Exception :rtype: airflow.utils.dag_processing.SimpleDag """ if dag_id not in self.dag_id_to_simple_dag: raise AirflowException("Unknown DAG ID {}".format(dag_id)) return self.dag_id_to_simple_dag[dag_id]
[ ":", "param", "dag_id", ":", "DAG", "ID", ":", "type", "dag_id", ":", "unicode", ":", "return", ":", "if", "the", "given", "DAG", "ID", "exists", "in", "the", "bag", "return", "the", "BaseDag", "corresponding", "to", "that", "ID", ".", "Otherwise", "throw", "an", "Exception", ":", "rtype", ":", "airflow", ".", "utils", ".", "dag_processing", ".", "SimpleDag" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L262-L272
[ "def", "get_dag", "(", "self", ",", "dag_id", ")", ":", "if", "dag_id", "not", "in", "self", ".", "dag_id_to_simple_dag", ":", "raise", "AirflowException", "(", "\"Unknown DAG ID {}\"", ".", "format", "(", "dag_id", ")", ")", "return", "self", ".", "dag_id_to_simple_dag", "[", "dag_id", "]" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorAgent.start
Launch DagFileProcessorManager processor and start DAG parsing loop in manager.
airflow/utils/dag_processing.py
def start(self): """ Launch DagFileProcessorManager processor and start DAG parsing loop in manager. """ self._process = self._launch_process(self._dag_directory, self._file_paths, self._max_runs, self._processor_factory, self._child_signal_conn, self._stat_queue, self._result_queue, self._async_mode) self.log.info("Launched DagFileProcessorManager with pid: %s", self._process.pid)
def start(self): """ Launch DagFileProcessorManager processor and start DAG parsing loop in manager. """ self._process = self._launch_process(self._dag_directory, self._file_paths, self._max_runs, self._processor_factory, self._child_signal_conn, self._stat_queue, self._result_queue, self._async_mode) self.log.info("Launched DagFileProcessorManager with pid: %s", self._process.pid)
[ "Launch", "DagFileProcessorManager", "processor", "and", "start", "DAG", "parsing", "loop", "in", "manager", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L512-L524
[ "def", "start", "(", "self", ")", ":", "self", ".", "_process", "=", "self", ".", "_launch_process", "(", "self", ".", "_dag_directory", ",", "self", ".", "_file_paths", ",", "self", ".", "_max_runs", ",", "self", ".", "_processor_factory", ",", "self", ".", "_child_signal_conn", ",", "self", ".", "_stat_queue", ",", "self", ".", "_result_queue", ",", "self", ".", "_async_mode", ")", "self", ".", "log", ".", "info", "(", "\"Launched DagFileProcessorManager with pid: %s\"", ",", "self", ".", "_process", ".", "pid", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorAgent.harvest_simple_dags
Harvest DAG parsing results from result queue and sync metadata from stat queue. :return: List of parsing result in SimpleDag format.
airflow/utils/dag_processing.py
def harvest_simple_dags(self): """ Harvest DAG parsing results from result queue and sync metadata from stat queue. :return: List of parsing result in SimpleDag format. """ # Metadata and results to be harvested can be inconsistent, # but it should not be a big problem. self._sync_metadata() # Heartbeating after syncing metadata so we do not restart manager # if it processed all files for max_run times and exit normally. self._heartbeat_manager() simple_dags = [] # multiprocessing.Queue().qsize will not work on MacOS. if sys.platform == "darwin": qsize = self._result_count else: qsize = self._result_queue.qsize() for _ in range(qsize): simple_dags.append(self._result_queue.get()) self._result_count = 0 return simple_dags
def harvest_simple_dags(self): """ Harvest DAG parsing results from result queue and sync metadata from stat queue. :return: List of parsing result in SimpleDag format. """ # Metadata and results to be harvested can be inconsistent, # but it should not be a big problem. self._sync_metadata() # Heartbeating after syncing metadata so we do not restart manager # if it processed all files for max_run times and exit normally. self._heartbeat_manager() simple_dags = [] # multiprocessing.Queue().qsize will not work on MacOS. if sys.platform == "darwin": qsize = self._result_count else: qsize = self._result_queue.qsize() for _ in range(qsize): simple_dags.append(self._result_queue.get()) self._result_count = 0 return simple_dags
[ "Harvest", "DAG", "parsing", "results", "from", "result", "queue", "and", "sync", "metadata", "from", "stat", "queue", ".", ":", "return", ":", "List", "of", "parsing", "result", "in", "SimpleDag", "format", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L580-L602
[ "def", "harvest_simple_dags", "(", "self", ")", ":", "# Metadata and results to be harvested can be inconsistent,", "# but it should not be a big problem.", "self", ".", "_sync_metadata", "(", ")", "# Heartbeating after syncing metadata so we do not restart manager", "# if it processed all files for max_run times and exit normally.", "self", ".", "_heartbeat_manager", "(", ")", "simple_dags", "=", "[", "]", "# multiprocessing.Queue().qsize will not work on MacOS.", "if", "sys", ".", "platform", "==", "\"darwin\"", ":", "qsize", "=", "self", ".", "_result_count", "else", ":", "qsize", "=", "self", ".", "_result_queue", ".", "qsize", "(", ")", "for", "_", "in", "range", "(", "qsize", ")", ":", "simple_dags", ".", "append", "(", "self", ".", "_result_queue", ".", "get", "(", ")", ")", "self", ".", "_result_count", "=", "0", "return", "simple_dags" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorAgent._heartbeat_manager
Heartbeat DAG file processor and start it if it is not alive. :return:
airflow/utils/dag_processing.py
def _heartbeat_manager(self): """ Heartbeat DAG file processor and start it if it is not alive. :return: """ if self._process and not self._process.is_alive() and not self.done: self.start()
def _heartbeat_manager(self): """ Heartbeat DAG file processor and start it if it is not alive. :return: """ if self._process and not self._process.is_alive() and not self.done: self.start()
[ "Heartbeat", "DAG", "file", "processor", "and", "start", "it", "if", "it", "is", "not", "alive", ".", ":", "return", ":" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L604-L610
[ "def", "_heartbeat_manager", "(", "self", ")", ":", "if", "self", ".", "_process", "and", "not", "self", ".", "_process", ".", "is_alive", "(", ")", "and", "not", "self", ".", "done", ":", "self", ".", "start", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorAgent._sync_metadata
Sync metadata from stat queue and only keep the latest stat. :return:
airflow/utils/dag_processing.py
def _sync_metadata(self): """ Sync metadata from stat queue and only keep the latest stat. :return: """ while not self._stat_queue.empty(): stat = self._stat_queue.get() self._file_paths = stat.file_paths self._all_pids = stat.all_pids self._done = stat.done self._all_files_processed = stat.all_files_processed self._result_count += stat.result_count
def _sync_metadata(self): """ Sync metadata from stat queue and only keep the latest stat. :return: """ while not self._stat_queue.empty(): stat = self._stat_queue.get() self._file_paths = stat.file_paths self._all_pids = stat.all_pids self._done = stat.done self._all_files_processed = stat.all_files_processed self._result_count += stat.result_count
[ "Sync", "metadata", "from", "stat", "queue", "and", "only", "keep", "the", "latest", "stat", ".", ":", "return", ":" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L612-L623
[ "def", "_sync_metadata", "(", "self", ")", ":", "while", "not", "self", ".", "_stat_queue", ".", "empty", "(", ")", ":", "stat", "=", "self", ".", "_stat_queue", ".", "get", "(", ")", "self", ".", "_file_paths", "=", "stat", ".", "file_paths", "self", ".", "_all_pids", "=", "stat", ".", "all_pids", "self", ".", "_done", "=", "stat", ".", "done", "self", ".", "_all_files_processed", "=", "stat", ".", "all_files_processed", "self", ".", "_result_count", "+=", "stat", ".", "result_count" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorAgent.terminate
Send termination signal to DAG parsing processor manager and expect it to terminate all DAG file processors.
airflow/utils/dag_processing.py
def terminate(self): """ Send termination signal to DAG parsing processor manager and expect it to terminate all DAG file processors. """ self.log.info("Sending termination message to manager.") self._child_signal_conn.send(DagParsingSignal.TERMINATE_MANAGER)
def terminate(self): """ Send termination signal to DAG parsing processor manager and expect it to terminate all DAG file processors. """ self.log.info("Sending termination message to manager.") self._child_signal_conn.send(DagParsingSignal.TERMINATE_MANAGER)
[ "Send", "termination", "signal", "to", "DAG", "parsing", "processor", "manager", "and", "expect", "it", "to", "terminate", "all", "DAG", "file", "processors", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L637-L643
[ "def", "terminate", "(", "self", ")", ":", "self", ".", "log", ".", "info", "(", "\"Sending termination message to manager.\"", ")", "self", ".", "_child_signal_conn", ".", "send", "(", "DagParsingSignal", ".", "TERMINATE_MANAGER", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorAgent.end
Terminate (and then kill) the manager process launched. :return:
airflow/utils/dag_processing.py
def end(self): """ Terminate (and then kill) the manager process launched. :return: """ if not self._process: self.log.warn('Ending without manager process.') return this_process = psutil.Process(os.getpid()) try: manager_process = psutil.Process(self._process.pid) except psutil.NoSuchProcess: self.log.info("Manager process not running.") return # First try SIGTERM if manager_process.is_running() \ and manager_process.pid in [x.pid for x in this_process.children()]: self.log.info("Terminating manager process: %s", manager_process.pid) manager_process.terminate() # TODO: Remove magic number timeout = 5 self.log.info("Waiting up to %ss for manager process to exit...", timeout) try: psutil.wait_procs({manager_process}, timeout) except psutil.TimeoutExpired: self.log.debug("Ran out of time while waiting for " "processes to exit") # Then SIGKILL if manager_process.is_running() \ and manager_process.pid in [x.pid for x in this_process.children()]: self.log.info("Killing manager process: %s", manager_process.pid) manager_process.kill() manager_process.wait()
def end(self): """ Terminate (and then kill) the manager process launched. :return: """ if not self._process: self.log.warn('Ending without manager process.') return this_process = psutil.Process(os.getpid()) try: manager_process = psutil.Process(self._process.pid) except psutil.NoSuchProcess: self.log.info("Manager process not running.") return # First try SIGTERM if manager_process.is_running() \ and manager_process.pid in [x.pid for x in this_process.children()]: self.log.info("Terminating manager process: %s", manager_process.pid) manager_process.terminate() # TODO: Remove magic number timeout = 5 self.log.info("Waiting up to %ss for manager process to exit...", timeout) try: psutil.wait_procs({manager_process}, timeout) except psutil.TimeoutExpired: self.log.debug("Ran out of time while waiting for " "processes to exit") # Then SIGKILL if manager_process.is_running() \ and manager_process.pid in [x.pid for x in this_process.children()]: self.log.info("Killing manager process: %s", manager_process.pid) manager_process.kill() manager_process.wait()
[ "Terminate", "(", "and", "then", "kill", ")", "the", "manager", "process", "launched", ".", ":", "return", ":" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L645-L679
[ "def", "end", "(", "self", ")", ":", "if", "not", "self", ".", "_process", ":", "self", ".", "log", ".", "warn", "(", "'Ending without manager process.'", ")", "return", "this_process", "=", "psutil", ".", "Process", "(", "os", ".", "getpid", "(", ")", ")", "try", ":", "manager_process", "=", "psutil", ".", "Process", "(", "self", ".", "_process", ".", "pid", ")", "except", "psutil", ".", "NoSuchProcess", ":", "self", ".", "log", ".", "info", "(", "\"Manager process not running.\"", ")", "return", "# First try SIGTERM", "if", "manager_process", ".", "is_running", "(", ")", "and", "manager_process", ".", "pid", "in", "[", "x", ".", "pid", "for", "x", "in", "this_process", ".", "children", "(", ")", "]", ":", "self", ".", "log", ".", "info", "(", "\"Terminating manager process: %s\"", ",", "manager_process", ".", "pid", ")", "manager_process", ".", "terminate", "(", ")", "# TODO: Remove magic number", "timeout", "=", "5", "self", ".", "log", ".", "info", "(", "\"Waiting up to %ss for manager process to exit...\"", ",", "timeout", ")", "try", ":", "psutil", ".", "wait_procs", "(", "{", "manager_process", "}", ",", "timeout", ")", "except", "psutil", ".", "TimeoutExpired", ":", "self", ".", "log", ".", "debug", "(", "\"Ran out of time while waiting for \"", "\"processes to exit\"", ")", "# Then SIGKILL", "if", "manager_process", ".", "is_running", "(", ")", "and", "manager_process", ".", "pid", "in", "[", "x", ".", "pid", "for", "x", "in", "this_process", ".", "children", "(", ")", "]", ":", "self", ".", "log", ".", "info", "(", "\"Killing manager process: %s\"", ",", "manager_process", ".", "pid", ")", "manager_process", ".", "kill", "(", ")", "manager_process", ".", "wait", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorManager._exit_gracefully
Helper method to clean up DAG file processors to avoid leaving orphan processes.
airflow/utils/dag_processing.py
def _exit_gracefully(self, signum, frame): """ Helper method to clean up DAG file processors to avoid leaving orphan processes. """ self.log.info("Exiting gracefully upon receiving signal %s", signum) self.terminate() self.end() self.log.debug("Finished terminating DAG processors.") sys.exit(os.EX_OK)
def _exit_gracefully(self, signum, frame): """ Helper method to clean up DAG file processors to avoid leaving orphan processes. """ self.log.info("Exiting gracefully upon receiving signal %s", signum) self.terminate() self.end() self.log.debug("Finished terminating DAG processors.") sys.exit(os.EX_OK)
[ "Helper", "method", "to", "clean", "up", "DAG", "file", "processors", "to", "avoid", "leaving", "orphan", "processes", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L779-L787
[ "def", "_exit_gracefully", "(", "self", ",", "signum", ",", "frame", ")", ":", "self", ".", "log", ".", "info", "(", "\"Exiting gracefully upon receiving signal %s\"", ",", "signum", ")", "self", ".", "terminate", "(", ")", "self", ".", "end", "(", ")", "self", ".", "log", ".", "debug", "(", "\"Finished terminating DAG processors.\"", ")", "sys", ".", "exit", "(", "os", ".", "EX_OK", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorManager.start
Use multiple processes to parse and generate tasks for the DAGs in parallel. By processing them in separate processes, we can get parallelism and isolation from potentially harmful user code.
airflow/utils/dag_processing.py
def start(self): """ Use multiple processes to parse and generate tasks for the DAGs in parallel. By processing them in separate processes, we can get parallelism and isolation from potentially harmful user code. """ self.log.info("Processing files using up to %s processes at a time ", self._parallelism) self.log.info("Process each file at most once every %s seconds", self._file_process_interval) self.log.info( "Checking for new files in %s every %s seconds", self._dag_directory, self.dag_dir_list_interval ) if self._async_mode: self.log.debug("Starting DagFileProcessorManager in async mode") self.start_in_async() else: self.log.debug("Starting DagFileProcessorManager in sync mode") self.start_in_sync()
def start(self): """ Use multiple processes to parse and generate tasks for the DAGs in parallel. By processing them in separate processes, we can get parallelism and isolation from potentially harmful user code. """ self.log.info("Processing files using up to %s processes at a time ", self._parallelism) self.log.info("Process each file at most once every %s seconds", self._file_process_interval) self.log.info( "Checking for new files in %s every %s seconds", self._dag_directory, self.dag_dir_list_interval ) if self._async_mode: self.log.debug("Starting DagFileProcessorManager in async mode") self.start_in_async() else: self.log.debug("Starting DagFileProcessorManager in sync mode") self.start_in_sync()
[ "Use", "multiple", "processes", "to", "parse", "and", "generate", "tasks", "for", "the", "DAGs", "in", "parallel", ".", "By", "processing", "them", "in", "separate", "processes", "we", "can", "get", "parallelism", "and", "isolation", "from", "potentially", "harmful", "user", "code", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L789-L808
[ "def", "start", "(", "self", ")", ":", "self", ".", "log", ".", "info", "(", "\"Processing files using up to %s processes at a time \"", ",", "self", ".", "_parallelism", ")", "self", ".", "log", ".", "info", "(", "\"Process each file at most once every %s seconds\"", ",", "self", ".", "_file_process_interval", ")", "self", ".", "log", ".", "info", "(", "\"Checking for new files in %s every %s seconds\"", ",", "self", ".", "_dag_directory", ",", "self", ".", "dag_dir_list_interval", ")", "if", "self", ".", "_async_mode", ":", "self", ".", "log", ".", "debug", "(", "\"Starting DagFileProcessorManager in async mode\"", ")", "self", ".", "start_in_async", "(", ")", "else", ":", "self", ".", "log", ".", "debug", "(", "\"Starting DagFileProcessorManager in sync mode\"", ")", "self", ".", "start_in_sync", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorManager.start_in_async
Parse DAG files repeatedly in a standalone loop.
airflow/utils/dag_processing.py
def start_in_async(self): """ Parse DAG files repeatedly in a standalone loop. """ while True: loop_start_time = time.time() if self._signal_conn.poll(): agent_signal = self._signal_conn.recv() if agent_signal == DagParsingSignal.TERMINATE_MANAGER: self.terminate() break elif agent_signal == DagParsingSignal.END_MANAGER: self.end() sys.exit(os.EX_OK) self._refresh_dag_dir() simple_dags = self.heartbeat() for simple_dag in simple_dags: self._result_queue.put(simple_dag) self._print_stat() all_files_processed = all(self.get_last_finish_time(x) is not None for x in self.file_paths) max_runs_reached = self.max_runs_reached() dag_parsing_stat = DagParsingStat(self._file_paths, self.get_all_pids(), max_runs_reached, all_files_processed, len(simple_dags)) self._stat_queue.put(dag_parsing_stat) if max_runs_reached: self.log.info("Exiting dag parsing loop as all files " "have been processed %s times", self._max_runs) break loop_duration = time.time() - loop_start_time if loop_duration < 1: sleep_length = 1 - loop_duration self.log.debug("Sleeping for %.2f seconds to prevent excessive logging", sleep_length) time.sleep(sleep_length)
def start_in_async(self): """ Parse DAG files repeatedly in a standalone loop. """ while True: loop_start_time = time.time() if self._signal_conn.poll(): agent_signal = self._signal_conn.recv() if agent_signal == DagParsingSignal.TERMINATE_MANAGER: self.terminate() break elif agent_signal == DagParsingSignal.END_MANAGER: self.end() sys.exit(os.EX_OK) self._refresh_dag_dir() simple_dags = self.heartbeat() for simple_dag in simple_dags: self._result_queue.put(simple_dag) self._print_stat() all_files_processed = all(self.get_last_finish_time(x) is not None for x in self.file_paths) max_runs_reached = self.max_runs_reached() dag_parsing_stat = DagParsingStat(self._file_paths, self.get_all_pids(), max_runs_reached, all_files_processed, len(simple_dags)) self._stat_queue.put(dag_parsing_stat) if max_runs_reached: self.log.info("Exiting dag parsing loop as all files " "have been processed %s times", self._max_runs) break loop_duration = time.time() - loop_start_time if loop_duration < 1: sleep_length = 1 - loop_duration self.log.debug("Sleeping for %.2f seconds to prevent excessive logging", sleep_length) time.sleep(sleep_length)
[ "Parse", "DAG", "files", "repeatedly", "in", "a", "standalone", "loop", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L810-L854
[ "def", "start_in_async", "(", "self", ")", ":", "while", "True", ":", "loop_start_time", "=", "time", ".", "time", "(", ")", "if", "self", ".", "_signal_conn", ".", "poll", "(", ")", ":", "agent_signal", "=", "self", ".", "_signal_conn", ".", "recv", "(", ")", "if", "agent_signal", "==", "DagParsingSignal", ".", "TERMINATE_MANAGER", ":", "self", ".", "terminate", "(", ")", "break", "elif", "agent_signal", "==", "DagParsingSignal", ".", "END_MANAGER", ":", "self", ".", "end", "(", ")", "sys", ".", "exit", "(", "os", ".", "EX_OK", ")", "self", ".", "_refresh_dag_dir", "(", ")", "simple_dags", "=", "self", ".", "heartbeat", "(", ")", "for", "simple_dag", "in", "simple_dags", ":", "self", ".", "_result_queue", ".", "put", "(", "simple_dag", ")", "self", ".", "_print_stat", "(", ")", "all_files_processed", "=", "all", "(", "self", ".", "get_last_finish_time", "(", "x", ")", "is", "not", "None", "for", "x", "in", "self", ".", "file_paths", ")", "max_runs_reached", "=", "self", ".", "max_runs_reached", "(", ")", "dag_parsing_stat", "=", "DagParsingStat", "(", "self", ".", "_file_paths", ",", "self", ".", "get_all_pids", "(", ")", ",", "max_runs_reached", ",", "all_files_processed", ",", "len", "(", "simple_dags", ")", ")", "self", ".", "_stat_queue", ".", "put", "(", "dag_parsing_stat", ")", "if", "max_runs_reached", ":", "self", ".", "log", ".", "info", "(", "\"Exiting dag parsing loop as all files \"", "\"have been processed %s times\"", ",", "self", ".", "_max_runs", ")", "break", "loop_duration", "=", "time", ".", "time", "(", ")", "-", "loop_start_time", "if", "loop_duration", "<", "1", ":", "sleep_length", "=", "1", "-", "loop_duration", "self", ".", "log", ".", "debug", "(", "\"Sleeping for %.2f seconds to prevent excessive logging\"", ",", "sleep_length", ")", "time", ".", "sleep", "(", "sleep_length", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorManager.start_in_sync
Parse DAG files in a loop controlled by DagParsingSignal. Actual DAG parsing loop will run once upon receiving one agent heartbeat message and will report done when finished the loop.
airflow/utils/dag_processing.py
def start_in_sync(self): """ Parse DAG files in a loop controlled by DagParsingSignal. Actual DAG parsing loop will run once upon receiving one agent heartbeat message and will report done when finished the loop. """ while True: agent_signal = self._signal_conn.recv() if agent_signal == DagParsingSignal.TERMINATE_MANAGER: self.terminate() break elif agent_signal == DagParsingSignal.END_MANAGER: self.end() sys.exit(os.EX_OK) elif agent_signal == DagParsingSignal.AGENT_HEARTBEAT: self._refresh_dag_dir() simple_dags = self.heartbeat() for simple_dag in simple_dags: self._result_queue.put(simple_dag) self._print_stat() all_files_processed = all(self.get_last_finish_time(x) is not None for x in self.file_paths) max_runs_reached = self.max_runs_reached() dag_parsing_stat = DagParsingStat(self._file_paths, self.get_all_pids(), self.max_runs_reached(), all_files_processed, len(simple_dags)) self._stat_queue.put(dag_parsing_stat) self.wait_until_finished() self._signal_conn.send(DagParsingSignal.MANAGER_DONE) if max_runs_reached: self.log.info("Exiting dag parsing loop as all files " "have been processed %s times", self._max_runs) self._signal_conn.send(DagParsingSignal.MANAGER_DONE) break
def start_in_sync(self): """ Parse DAG files in a loop controlled by DagParsingSignal. Actual DAG parsing loop will run once upon receiving one agent heartbeat message and will report done when finished the loop. """ while True: agent_signal = self._signal_conn.recv() if agent_signal == DagParsingSignal.TERMINATE_MANAGER: self.terminate() break elif agent_signal == DagParsingSignal.END_MANAGER: self.end() sys.exit(os.EX_OK) elif agent_signal == DagParsingSignal.AGENT_HEARTBEAT: self._refresh_dag_dir() simple_dags = self.heartbeat() for simple_dag in simple_dags: self._result_queue.put(simple_dag) self._print_stat() all_files_processed = all(self.get_last_finish_time(x) is not None for x in self.file_paths) max_runs_reached = self.max_runs_reached() dag_parsing_stat = DagParsingStat(self._file_paths, self.get_all_pids(), self.max_runs_reached(), all_files_processed, len(simple_dags)) self._stat_queue.put(dag_parsing_stat) self.wait_until_finished() self._signal_conn.send(DagParsingSignal.MANAGER_DONE) if max_runs_reached: self.log.info("Exiting dag parsing loop as all files " "have been processed %s times", self._max_runs) self._signal_conn.send(DagParsingSignal.MANAGER_DONE) break
[ "Parse", "DAG", "files", "in", "a", "loop", "controlled", "by", "DagParsingSignal", ".", "Actual", "DAG", "parsing", "loop", "will", "run", "once", "upon", "receiving", "one", "agent", "heartbeat", "message", "and", "will", "report", "done", "when", "finished", "the", "loop", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L856-L898
[ "def", "start_in_sync", "(", "self", ")", ":", "while", "True", ":", "agent_signal", "=", "self", ".", "_signal_conn", ".", "recv", "(", ")", "if", "agent_signal", "==", "DagParsingSignal", ".", "TERMINATE_MANAGER", ":", "self", ".", "terminate", "(", ")", "break", "elif", "agent_signal", "==", "DagParsingSignal", ".", "END_MANAGER", ":", "self", ".", "end", "(", ")", "sys", ".", "exit", "(", "os", ".", "EX_OK", ")", "elif", "agent_signal", "==", "DagParsingSignal", ".", "AGENT_HEARTBEAT", ":", "self", ".", "_refresh_dag_dir", "(", ")", "simple_dags", "=", "self", ".", "heartbeat", "(", ")", "for", "simple_dag", "in", "simple_dags", ":", "self", ".", "_result_queue", ".", "put", "(", "simple_dag", ")", "self", ".", "_print_stat", "(", ")", "all_files_processed", "=", "all", "(", "self", ".", "get_last_finish_time", "(", "x", ")", "is", "not", "None", "for", "x", "in", "self", ".", "file_paths", ")", "max_runs_reached", "=", "self", ".", "max_runs_reached", "(", ")", "dag_parsing_stat", "=", "DagParsingStat", "(", "self", ".", "_file_paths", ",", "self", ".", "get_all_pids", "(", ")", ",", "self", ".", "max_runs_reached", "(", ")", ",", "all_files_processed", ",", "len", "(", "simple_dags", ")", ")", "self", ".", "_stat_queue", ".", "put", "(", "dag_parsing_stat", ")", "self", ".", "wait_until_finished", "(", ")", "self", ".", "_signal_conn", ".", "send", "(", "DagParsingSignal", ".", "MANAGER_DONE", ")", "if", "max_runs_reached", ":", "self", ".", "log", ".", "info", "(", "\"Exiting dag parsing loop as all files \"", "\"have been processed %s times\"", ",", "self", ".", "_max_runs", ")", "self", ".", "_signal_conn", ".", "send", "(", "DagParsingSignal", ".", "MANAGER_DONE", ")", "break" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorManager._refresh_dag_dir
Refresh file paths from dag dir if we haven't done it for too long.
airflow/utils/dag_processing.py
def _refresh_dag_dir(self): """ Refresh file paths from dag dir if we haven't done it for too long. """ elapsed_time_since_refresh = (timezone.utcnow() - self.last_dag_dir_refresh_time).total_seconds() if elapsed_time_since_refresh > self.dag_dir_list_interval: # Build up a list of Python files that could contain DAGs self.log.info("Searching for files in %s", self._dag_directory) self._file_paths = list_py_file_paths(self._dag_directory) self.last_dag_dir_refresh_time = timezone.utcnow() self.log.info("There are %s files in %s", len(self._file_paths), self._dag_directory) self.set_file_paths(self._file_paths) try: self.log.debug("Removing old import errors") self.clear_nonexistent_import_errors() except Exception: self.log.exception("Error removing old import errors")
def _refresh_dag_dir(self): """ Refresh file paths from dag dir if we haven't done it for too long. """ elapsed_time_since_refresh = (timezone.utcnow() - self.last_dag_dir_refresh_time).total_seconds() if elapsed_time_since_refresh > self.dag_dir_list_interval: # Build up a list of Python files that could contain DAGs self.log.info("Searching for files in %s", self._dag_directory) self._file_paths = list_py_file_paths(self._dag_directory) self.last_dag_dir_refresh_time = timezone.utcnow() self.log.info("There are %s files in %s", len(self._file_paths), self._dag_directory) self.set_file_paths(self._file_paths) try: self.log.debug("Removing old import errors") self.clear_nonexistent_import_errors() except Exception: self.log.exception("Error removing old import errors")
[ "Refresh", "file", "paths", "from", "dag", "dir", "if", "we", "haven", "t", "done", "it", "for", "too", "long", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L900-L918
[ "def", "_refresh_dag_dir", "(", "self", ")", ":", "elapsed_time_since_refresh", "=", "(", "timezone", ".", "utcnow", "(", ")", "-", "self", ".", "last_dag_dir_refresh_time", ")", ".", "total_seconds", "(", ")", "if", "elapsed_time_since_refresh", ">", "self", ".", "dag_dir_list_interval", ":", "# Build up a list of Python files that could contain DAGs", "self", ".", "log", ".", "info", "(", "\"Searching for files in %s\"", ",", "self", ".", "_dag_directory", ")", "self", ".", "_file_paths", "=", "list_py_file_paths", "(", "self", ".", "_dag_directory", ")", "self", ".", "last_dag_dir_refresh_time", "=", "timezone", ".", "utcnow", "(", ")", "self", ".", "log", ".", "info", "(", "\"There are %s files in %s\"", ",", "len", "(", "self", ".", "_file_paths", ")", ",", "self", ".", "_dag_directory", ")", "self", ".", "set_file_paths", "(", "self", ".", "_file_paths", ")", "try", ":", "self", ".", "log", ".", "debug", "(", "\"Removing old import errors\"", ")", "self", ".", "clear_nonexistent_import_errors", "(", ")", "except", "Exception", ":", "self", ".", "log", ".", "exception", "(", "\"Error removing old import errors\"", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorManager._print_stat
Occasionally print out stats about how fast the files are getting processed
airflow/utils/dag_processing.py
def _print_stat(self): """ Occasionally print out stats about how fast the files are getting processed """ if ((timezone.utcnow() - self.last_stat_print_time).total_seconds() > self.print_stats_interval): if len(self._file_paths) > 0: self._log_file_processing_stats(self._file_paths) self.last_stat_print_time = timezone.utcnow()
def _print_stat(self): """ Occasionally print out stats about how fast the files are getting processed """ if ((timezone.utcnow() - self.last_stat_print_time).total_seconds() > self.print_stats_interval): if len(self._file_paths) > 0: self._log_file_processing_stats(self._file_paths) self.last_stat_print_time = timezone.utcnow()
[ "Occasionally", "print", "out", "stats", "about", "how", "fast", "the", "files", "are", "getting", "processed" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L920-L928
[ "def", "_print_stat", "(", "self", ")", ":", "if", "(", "(", "timezone", ".", "utcnow", "(", ")", "-", "self", ".", "last_stat_print_time", ")", ".", "total_seconds", "(", ")", ">", "self", ".", "print_stats_interval", ")", ":", "if", "len", "(", "self", ".", "_file_paths", ")", ">", "0", ":", "self", ".", "_log_file_processing_stats", "(", "self", ".", "_file_paths", ")", "self", ".", "last_stat_print_time", "=", "timezone", ".", "utcnow", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorManager.clear_nonexistent_import_errors
Clears import errors for files that no longer exist. :param session: session for ORM operations :type session: sqlalchemy.orm.session.Session
airflow/utils/dag_processing.py
def clear_nonexistent_import_errors(self, session): """ Clears import errors for files that no longer exist. :param session: session for ORM operations :type session: sqlalchemy.orm.session.Session """ query = session.query(errors.ImportError) if self._file_paths: query = query.filter( ~errors.ImportError.filename.in_(self._file_paths) ) query.delete(synchronize_session='fetch') session.commit()
def clear_nonexistent_import_errors(self, session): """ Clears import errors for files that no longer exist. :param session: session for ORM operations :type session: sqlalchemy.orm.session.Session """ query = session.query(errors.ImportError) if self._file_paths: query = query.filter( ~errors.ImportError.filename.in_(self._file_paths) ) query.delete(synchronize_session='fetch') session.commit()
[ "Clears", "import", "errors", "for", "files", "that", "no", "longer", "exist", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L931-L944
[ "def", "clear_nonexistent_import_errors", "(", "self", ",", "session", ")", ":", "query", "=", "session", ".", "query", "(", "errors", ".", "ImportError", ")", "if", "self", ".", "_file_paths", ":", "query", "=", "query", ".", "filter", "(", "~", "errors", ".", "ImportError", ".", "filename", ".", "in_", "(", "self", ".", "_file_paths", ")", ")", "query", ".", "delete", "(", "synchronize_session", "=", "'fetch'", ")", "session", ".", "commit", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorManager._log_file_processing_stats
Print out stats about how files are getting processed. :param known_file_paths: a list of file paths that may contain Airflow DAG definitions :type known_file_paths: list[unicode] :return: None
airflow/utils/dag_processing.py
def _log_file_processing_stats(self, known_file_paths): """ Print out stats about how files are getting processed. :param known_file_paths: a list of file paths that may contain Airflow DAG definitions :type known_file_paths: list[unicode] :return: None """ # File Path: Path to the file containing the DAG definition # PID: PID associated with the process that's processing the file. May # be empty. # Runtime: If the process is currently running, how long it's been # running for in seconds. # Last Runtime: If the process ran before, how long did it take to # finish in seconds # Last Run: When the file finished processing in the previous run. headers = ["File Path", "PID", "Runtime", "Last Runtime", "Last Run"] rows = [] for file_path in known_file_paths: last_runtime = self.get_last_runtime(file_path) file_name = os.path.basename(file_path) file_name = os.path.splitext(file_name)[0].replace(os.sep, '.') if last_runtime: Stats.gauge( 'dag_processing.last_runtime.{}'.format(file_name), last_runtime ) processor_pid = self.get_pid(file_path) processor_start_time = self.get_start_time(file_path) runtime = ((timezone.utcnow() - processor_start_time).total_seconds() if processor_start_time else None) last_run = self.get_last_finish_time(file_path) if last_run: seconds_ago = (timezone.utcnow() - last_run).total_seconds() Stats.gauge( 'dag_processing.last_run.seconds_ago.{}'.format(file_name), seconds_ago ) rows.append((file_path, processor_pid, runtime, last_runtime, last_run)) # Sort by longest last runtime. (Can't sort None values in python3) rows = sorted(rows, key=lambda x: x[3] or 0.0) formatted_rows = [] for file_path, pid, runtime, last_runtime, last_run in rows: formatted_rows.append((file_path, pid, "{:.2f}s".format(runtime) if runtime else None, "{:.2f}s".format(last_runtime) if last_runtime else None, last_run.strftime("%Y-%m-%dT%H:%M:%S") if last_run else None)) log_str = ("\n" + "=" * 80 + "\n" + "DAG File Processing Stats\n\n" + tabulate(formatted_rows, headers=headers) + "\n" + "=" * 80) self.log.info(log_str)
def _log_file_processing_stats(self, known_file_paths): """ Print out stats about how files are getting processed. :param known_file_paths: a list of file paths that may contain Airflow DAG definitions :type known_file_paths: list[unicode] :return: None """ # File Path: Path to the file containing the DAG definition # PID: PID associated with the process that's processing the file. May # be empty. # Runtime: If the process is currently running, how long it's been # running for in seconds. # Last Runtime: If the process ran before, how long did it take to # finish in seconds # Last Run: When the file finished processing in the previous run. headers = ["File Path", "PID", "Runtime", "Last Runtime", "Last Run"] rows = [] for file_path in known_file_paths: last_runtime = self.get_last_runtime(file_path) file_name = os.path.basename(file_path) file_name = os.path.splitext(file_name)[0].replace(os.sep, '.') if last_runtime: Stats.gauge( 'dag_processing.last_runtime.{}'.format(file_name), last_runtime ) processor_pid = self.get_pid(file_path) processor_start_time = self.get_start_time(file_path) runtime = ((timezone.utcnow() - processor_start_time).total_seconds() if processor_start_time else None) last_run = self.get_last_finish_time(file_path) if last_run: seconds_ago = (timezone.utcnow() - last_run).total_seconds() Stats.gauge( 'dag_processing.last_run.seconds_ago.{}'.format(file_name), seconds_ago ) rows.append((file_path, processor_pid, runtime, last_runtime, last_run)) # Sort by longest last runtime. (Can't sort None values in python3) rows = sorted(rows, key=lambda x: x[3] or 0.0) formatted_rows = [] for file_path, pid, runtime, last_runtime, last_run in rows: formatted_rows.append((file_path, pid, "{:.2f}s".format(runtime) if runtime else None, "{:.2f}s".format(last_runtime) if last_runtime else None, last_run.strftime("%Y-%m-%dT%H:%M:%S") if last_run else None)) log_str = ("\n" + "=" * 80 + "\n" + "DAG File Processing Stats\n\n" + tabulate(formatted_rows, headers=headers) + "\n" + "=" * 80) self.log.info(log_str)
[ "Print", "out", "stats", "about", "how", "files", "are", "getting", "processed", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L946-L1020
[ "def", "_log_file_processing_stats", "(", "self", ",", "known_file_paths", ")", ":", "# File Path: Path to the file containing the DAG definition", "# PID: PID associated with the process that's processing the file. May", "# be empty.", "# Runtime: If the process is currently running, how long it's been", "# running for in seconds.", "# Last Runtime: If the process ran before, how long did it take to", "# finish in seconds", "# Last Run: When the file finished processing in the previous run.", "headers", "=", "[", "\"File Path\"", ",", "\"PID\"", ",", "\"Runtime\"", ",", "\"Last Runtime\"", ",", "\"Last Run\"", "]", "rows", "=", "[", "]", "for", "file_path", "in", "known_file_paths", ":", "last_runtime", "=", "self", ".", "get_last_runtime", "(", "file_path", ")", "file_name", "=", "os", ".", "path", ".", "basename", "(", "file_path", ")", "file_name", "=", "os", ".", "path", ".", "splitext", "(", "file_name", ")", "[", "0", "]", ".", "replace", "(", "os", ".", "sep", ",", "'.'", ")", "if", "last_runtime", ":", "Stats", ".", "gauge", "(", "'dag_processing.last_runtime.{}'", ".", "format", "(", "file_name", ")", ",", "last_runtime", ")", "processor_pid", "=", "self", ".", "get_pid", "(", "file_path", ")", "processor_start_time", "=", "self", ".", "get_start_time", "(", "file_path", ")", "runtime", "=", "(", "(", "timezone", ".", "utcnow", "(", ")", "-", "processor_start_time", ")", ".", "total_seconds", "(", ")", "if", "processor_start_time", "else", "None", ")", "last_run", "=", "self", ".", "get_last_finish_time", "(", "file_path", ")", "if", "last_run", ":", "seconds_ago", "=", "(", "timezone", ".", "utcnow", "(", ")", "-", "last_run", ")", ".", "total_seconds", "(", ")", "Stats", ".", "gauge", "(", "'dag_processing.last_run.seconds_ago.{}'", ".", "format", "(", "file_name", ")", ",", "seconds_ago", ")", "rows", ".", "append", "(", "(", "file_path", ",", "processor_pid", ",", "runtime", ",", "last_runtime", ",", "last_run", ")", ")", "# Sort by longest last runtime. (Can't sort None values in python3)", "rows", "=", "sorted", "(", "rows", ",", "key", "=", "lambda", "x", ":", "x", "[", "3", "]", "or", "0.0", ")", "formatted_rows", "=", "[", "]", "for", "file_path", ",", "pid", ",", "runtime", ",", "last_runtime", ",", "last_run", "in", "rows", ":", "formatted_rows", ".", "append", "(", "(", "file_path", ",", "pid", ",", "\"{:.2f}s\"", ".", "format", "(", "runtime", ")", "if", "runtime", "else", "None", ",", "\"{:.2f}s\"", ".", "format", "(", "last_runtime", ")", "if", "last_runtime", "else", "None", ",", "last_run", ".", "strftime", "(", "\"%Y-%m-%dT%H:%M:%S\"", ")", "if", "last_run", "else", "None", ")", ")", "log_str", "=", "(", "\"\\n\"", "+", "\"=\"", "*", "80", "+", "\"\\n\"", "+", "\"DAG File Processing Stats\\n\\n\"", "+", "tabulate", "(", "formatted_rows", ",", "headers", "=", "headers", ")", "+", "\"\\n\"", "+", "\"=\"", "*", "80", ")", "self", ".", "log", ".", "info", "(", "log_str", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorManager.get_pid
:param file_path: the path to the file that's being processed :type file_path: unicode :return: the PID of the process processing the given file or None if the specified file is not being processed :rtype: int
airflow/utils/dag_processing.py
def get_pid(self, file_path): """ :param file_path: the path to the file that's being processed :type file_path: unicode :return: the PID of the process processing the given file or None if the specified file is not being processed :rtype: int """ if file_path in self._processors: return self._processors[file_path].pid return None
def get_pid(self, file_path): """ :param file_path: the path to the file that's being processed :type file_path: unicode :return: the PID of the process processing the given file or None if the specified file is not being processed :rtype: int """ if file_path in self._processors: return self._processors[file_path].pid return None
[ ":", "param", "file_path", ":", "the", "path", "to", "the", "file", "that", "s", "being", "processed", ":", "type", "file_path", ":", "unicode", ":", "return", ":", "the", "PID", "of", "the", "process", "processing", "the", "given", "file", "or", "None", "if", "the", "specified", "file", "is", "not", "being", "processed", ":", "rtype", ":", "int" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L1026-L1036
[ "def", "get_pid", "(", "self", ",", "file_path", ")", ":", "if", "file_path", "in", "self", ".", "_processors", ":", "return", "self", ".", "_processors", "[", "file_path", "]", ".", "pid", "return", "None" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorManager.get_runtime
:param file_path: the path to the file that's being processed :type file_path: unicode :return: the current runtime (in seconds) of the process that's processing the specified file or None if the file is not currently being processed
airflow/utils/dag_processing.py
def get_runtime(self, file_path): """ :param file_path: the path to the file that's being processed :type file_path: unicode :return: the current runtime (in seconds) of the process that's processing the specified file or None if the file is not currently being processed """ if file_path in self._processors: return (timezone.utcnow() - self._processors[file_path].start_time)\ .total_seconds() return None
def get_runtime(self, file_path): """ :param file_path: the path to the file that's being processed :type file_path: unicode :return: the current runtime (in seconds) of the process that's processing the specified file or None if the file is not currently being processed """ if file_path in self._processors: return (timezone.utcnow() - self._processors[file_path].start_time)\ .total_seconds() return None
[ ":", "param", "file_path", ":", "the", "path", "to", "the", "file", "that", "s", "being", "processed", ":", "type", "file_path", ":", "unicode", ":", "return", ":", "the", "current", "runtime", "(", "in", "seconds", ")", "of", "the", "process", "that", "s", "processing", "the", "specified", "file", "or", "None", "if", "the", "file", "is", "not", "currently", "being", "processed" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L1045-L1056
[ "def", "get_runtime", "(", "self", ",", "file_path", ")", ":", "if", "file_path", "in", "self", ".", "_processors", ":", "return", "(", "timezone", ".", "utcnow", "(", ")", "-", "self", ".", "_processors", "[", "file_path", "]", ".", "start_time", ")", ".", "total_seconds", "(", ")", "return", "None" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorManager.get_start_time
:param file_path: the path to the file that's being processed :type file_path: unicode :return: the start time of the process that's processing the specified file or None if the file is not currently being processed :rtype: datetime
airflow/utils/dag_processing.py
def get_start_time(self, file_path): """ :param file_path: the path to the file that's being processed :type file_path: unicode :return: the start time of the process that's processing the specified file or None if the file is not currently being processed :rtype: datetime """ if file_path in self._processors: return self._processors[file_path].start_time return None
def get_start_time(self, file_path): """ :param file_path: the path to the file that's being processed :type file_path: unicode :return: the start time of the process that's processing the specified file or None if the file is not currently being processed :rtype: datetime """ if file_path in self._processors: return self._processors[file_path].start_time return None
[ ":", "param", "file_path", ":", "the", "path", "to", "the", "file", "that", "s", "being", "processed", ":", "type", "file_path", ":", "unicode", ":", "return", ":", "the", "start", "time", "of", "the", "process", "that", "s", "processing", "the", "specified", "file", "or", "None", "if", "the", "file", "is", "not", "currently", "being", "processed", ":", "rtype", ":", "datetime" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L1078-L1088
[ "def", "get_start_time", "(", "self", ",", "file_path", ")", ":", "if", "file_path", "in", "self", ".", "_processors", ":", "return", "self", ".", "_processors", "[", "file_path", "]", ".", "start_time", "return", "None" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorManager.set_file_paths
Update this with a new set of paths to DAG definition files. :param new_file_paths: list of paths to DAG definition files :type new_file_paths: list[unicode] :return: None
airflow/utils/dag_processing.py
def set_file_paths(self, new_file_paths): """ Update this with a new set of paths to DAG definition files. :param new_file_paths: list of paths to DAG definition files :type new_file_paths: list[unicode] :return: None """ self._file_paths = new_file_paths self._file_path_queue = [x for x in self._file_path_queue if x in new_file_paths] # Stop processors that are working on deleted files filtered_processors = {} for file_path, processor in self._processors.items(): if file_path in new_file_paths: filtered_processors[file_path] = processor else: self.log.warning("Stopping processor for %s", file_path) processor.terminate() self._processors = filtered_processors
def set_file_paths(self, new_file_paths): """ Update this with a new set of paths to DAG definition files. :param new_file_paths: list of paths to DAG definition files :type new_file_paths: list[unicode] :return: None """ self._file_paths = new_file_paths self._file_path_queue = [x for x in self._file_path_queue if x in new_file_paths] # Stop processors that are working on deleted files filtered_processors = {} for file_path, processor in self._processors.items(): if file_path in new_file_paths: filtered_processors[file_path] = processor else: self.log.warning("Stopping processor for %s", file_path) processor.terminate() self._processors = filtered_processors
[ "Update", "this", "with", "a", "new", "set", "of", "paths", "to", "DAG", "definition", "files", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L1090-L1109
[ "def", "set_file_paths", "(", "self", ",", "new_file_paths", ")", ":", "self", ".", "_file_paths", "=", "new_file_paths", "self", ".", "_file_path_queue", "=", "[", "x", "for", "x", "in", "self", ".", "_file_path_queue", "if", "x", "in", "new_file_paths", "]", "# Stop processors that are working on deleted files", "filtered_processors", "=", "{", "}", "for", "file_path", ",", "processor", "in", "self", ".", "_processors", ".", "items", "(", ")", ":", "if", "file_path", "in", "new_file_paths", ":", "filtered_processors", "[", "file_path", "]", "=", "processor", "else", ":", "self", ".", "log", ".", "warning", "(", "\"Stopping processor for %s\"", ",", "file_path", ")", "processor", ".", "terminate", "(", ")", "self", ".", "_processors", "=", "filtered_processors" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorManager.wait_until_finished
Sleeps until all the processors are done.
airflow/utils/dag_processing.py
def wait_until_finished(self): """ Sleeps until all the processors are done. """ for file_path, processor in self._processors.items(): while not processor.done: time.sleep(0.1)
def wait_until_finished(self): """ Sleeps until all the processors are done. """ for file_path, processor in self._processors.items(): while not processor.done: time.sleep(0.1)
[ "Sleeps", "until", "all", "the", "processors", "are", "done", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L1118-L1124
[ "def", "wait_until_finished", "(", "self", ")", ":", "for", "file_path", ",", "processor", "in", "self", ".", "_processors", ".", "items", "(", ")", ":", "while", "not", "processor", ".", "done", ":", "time", ".", "sleep", "(", "0.1", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorManager.heartbeat
This should be periodically called by the manager loop. This method will kick off new processes to process DAG definition files and read the results from the finished processors. :return: a list of SimpleDags that were produced by processors that have finished since the last time this was called :rtype: list[airflow.utils.dag_processing.SimpleDag]
airflow/utils/dag_processing.py
def heartbeat(self): """ This should be periodically called by the manager loop. This method will kick off new processes to process DAG definition files and read the results from the finished processors. :return: a list of SimpleDags that were produced by processors that have finished since the last time this was called :rtype: list[airflow.utils.dag_processing.SimpleDag] """ finished_processors = {} """:type : dict[unicode, AbstractDagFileProcessor]""" running_processors = {} """:type : dict[unicode, AbstractDagFileProcessor]""" for file_path, processor in self._processors.items(): if processor.done: self.log.debug("Processor for %s finished", file_path) now = timezone.utcnow() finished_processors[file_path] = processor self._last_runtime[file_path] = (now - processor.start_time).total_seconds() self._last_finish_time[file_path] = now self._run_count[file_path] += 1 else: running_processors[file_path] = processor self._processors = running_processors self.log.debug("%s/%s DAG parsing processes running", len(self._processors), self._parallelism) self.log.debug("%s file paths queued for processing", len(self._file_path_queue)) # Collect all the DAGs that were found in the processed files simple_dags = [] for file_path, processor in finished_processors.items(): if processor.result is None: self.log.warning( "Processor for %s exited with return code %s.", processor.file_path, processor.exit_code ) else: for simple_dag in processor.result: simple_dags.append(simple_dag) # Generate more file paths to process if we processed all the files # already. if len(self._file_path_queue) == 0: # If the file path is already being processed, or if a file was # processed recently, wait until the next batch file_paths_in_progress = self._processors.keys() now = timezone.utcnow() file_paths_recently_processed = [] for file_path in self._file_paths: last_finish_time = self.get_last_finish_time(file_path) if (last_finish_time is not None and (now - last_finish_time).total_seconds() < self._file_process_interval): file_paths_recently_processed.append(file_path) files_paths_at_run_limit = [file_path for file_path, num_runs in self._run_count.items() if num_runs == self._max_runs] files_paths_to_queue = list(set(self._file_paths) - set(file_paths_in_progress) - set(file_paths_recently_processed) - set(files_paths_at_run_limit)) for file_path, processor in self._processors.items(): self.log.debug( "File path %s is still being processed (started: %s)", processor.file_path, processor.start_time.isoformat() ) self.log.debug( "Queuing the following files for processing:\n\t%s", "\n\t".join(files_paths_to_queue) ) self._file_path_queue.extend(files_paths_to_queue) zombies = self._find_zombies() # Start more processors if we have enough slots and files to process while (self._parallelism - len(self._processors) > 0 and len(self._file_path_queue) > 0): file_path = self._file_path_queue.pop(0) processor = self._processor_factory(file_path, zombies) processor.start() self.log.debug( "Started a process (PID: %s) to generate tasks for %s", processor.pid, file_path ) self._processors[file_path] = processor # Update heartbeat count. self._run_count[self._heart_beat_key] += 1 return simple_dags
def heartbeat(self): """ This should be periodically called by the manager loop. This method will kick off new processes to process DAG definition files and read the results from the finished processors. :return: a list of SimpleDags that were produced by processors that have finished since the last time this was called :rtype: list[airflow.utils.dag_processing.SimpleDag] """ finished_processors = {} """:type : dict[unicode, AbstractDagFileProcessor]""" running_processors = {} """:type : dict[unicode, AbstractDagFileProcessor]""" for file_path, processor in self._processors.items(): if processor.done: self.log.debug("Processor for %s finished", file_path) now = timezone.utcnow() finished_processors[file_path] = processor self._last_runtime[file_path] = (now - processor.start_time).total_seconds() self._last_finish_time[file_path] = now self._run_count[file_path] += 1 else: running_processors[file_path] = processor self._processors = running_processors self.log.debug("%s/%s DAG parsing processes running", len(self._processors), self._parallelism) self.log.debug("%s file paths queued for processing", len(self._file_path_queue)) # Collect all the DAGs that were found in the processed files simple_dags = [] for file_path, processor in finished_processors.items(): if processor.result is None: self.log.warning( "Processor for %s exited with return code %s.", processor.file_path, processor.exit_code ) else: for simple_dag in processor.result: simple_dags.append(simple_dag) # Generate more file paths to process if we processed all the files # already. if len(self._file_path_queue) == 0: # If the file path is already being processed, or if a file was # processed recently, wait until the next batch file_paths_in_progress = self._processors.keys() now = timezone.utcnow() file_paths_recently_processed = [] for file_path in self._file_paths: last_finish_time = self.get_last_finish_time(file_path) if (last_finish_time is not None and (now - last_finish_time).total_seconds() < self._file_process_interval): file_paths_recently_processed.append(file_path) files_paths_at_run_limit = [file_path for file_path, num_runs in self._run_count.items() if num_runs == self._max_runs] files_paths_to_queue = list(set(self._file_paths) - set(file_paths_in_progress) - set(file_paths_recently_processed) - set(files_paths_at_run_limit)) for file_path, processor in self._processors.items(): self.log.debug( "File path %s is still being processed (started: %s)", processor.file_path, processor.start_time.isoformat() ) self.log.debug( "Queuing the following files for processing:\n\t%s", "\n\t".join(files_paths_to_queue) ) self._file_path_queue.extend(files_paths_to_queue) zombies = self._find_zombies() # Start more processors if we have enough slots and files to process while (self._parallelism - len(self._processors) > 0 and len(self._file_path_queue) > 0): file_path = self._file_path_queue.pop(0) processor = self._processor_factory(file_path, zombies) processor.start() self.log.debug( "Started a process (PID: %s) to generate tasks for %s", processor.pid, file_path ) self._processors[file_path] = processor # Update heartbeat count. self._run_count[self._heart_beat_key] += 1 return simple_dags
[ "This", "should", "be", "periodically", "called", "by", "the", "manager", "loop", ".", "This", "method", "will", "kick", "off", "new", "processes", "to", "process", "DAG", "definition", "files", "and", "read", "the", "results", "from", "the", "finished", "processors", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L1126-L1227
[ "def", "heartbeat", "(", "self", ")", ":", "finished_processors", "=", "{", "}", "\"\"\":type : dict[unicode, AbstractDagFileProcessor]\"\"\"", "running_processors", "=", "{", "}", "\"\"\":type : dict[unicode, AbstractDagFileProcessor]\"\"\"", "for", "file_path", ",", "processor", "in", "self", ".", "_processors", ".", "items", "(", ")", ":", "if", "processor", ".", "done", ":", "self", ".", "log", ".", "debug", "(", "\"Processor for %s finished\"", ",", "file_path", ")", "now", "=", "timezone", ".", "utcnow", "(", ")", "finished_processors", "[", "file_path", "]", "=", "processor", "self", ".", "_last_runtime", "[", "file_path", "]", "=", "(", "now", "-", "processor", ".", "start_time", ")", ".", "total_seconds", "(", ")", "self", ".", "_last_finish_time", "[", "file_path", "]", "=", "now", "self", ".", "_run_count", "[", "file_path", "]", "+=", "1", "else", ":", "running_processors", "[", "file_path", "]", "=", "processor", "self", ".", "_processors", "=", "running_processors", "self", ".", "log", ".", "debug", "(", "\"%s/%s DAG parsing processes running\"", ",", "len", "(", "self", ".", "_processors", ")", ",", "self", ".", "_parallelism", ")", "self", ".", "log", ".", "debug", "(", "\"%s file paths queued for processing\"", ",", "len", "(", "self", ".", "_file_path_queue", ")", ")", "# Collect all the DAGs that were found in the processed files", "simple_dags", "=", "[", "]", "for", "file_path", ",", "processor", "in", "finished_processors", ".", "items", "(", ")", ":", "if", "processor", ".", "result", "is", "None", ":", "self", ".", "log", ".", "warning", "(", "\"Processor for %s exited with return code %s.\"", ",", "processor", ".", "file_path", ",", "processor", ".", "exit_code", ")", "else", ":", "for", "simple_dag", "in", "processor", ".", "result", ":", "simple_dags", ".", "append", "(", "simple_dag", ")", "# Generate more file paths to process if we processed all the files", "# already.", "if", "len", "(", "self", ".", "_file_path_queue", ")", "==", "0", ":", "# If the file path is already being processed, or if a file was", "# processed recently, wait until the next batch", "file_paths_in_progress", "=", "self", ".", "_processors", ".", "keys", "(", ")", "now", "=", "timezone", ".", "utcnow", "(", ")", "file_paths_recently_processed", "=", "[", "]", "for", "file_path", "in", "self", ".", "_file_paths", ":", "last_finish_time", "=", "self", ".", "get_last_finish_time", "(", "file_path", ")", "if", "(", "last_finish_time", "is", "not", "None", "and", "(", "now", "-", "last_finish_time", ")", ".", "total_seconds", "(", ")", "<", "self", ".", "_file_process_interval", ")", ":", "file_paths_recently_processed", ".", "append", "(", "file_path", ")", "files_paths_at_run_limit", "=", "[", "file_path", "for", "file_path", ",", "num_runs", "in", "self", ".", "_run_count", ".", "items", "(", ")", "if", "num_runs", "==", "self", ".", "_max_runs", "]", "files_paths_to_queue", "=", "list", "(", "set", "(", "self", ".", "_file_paths", ")", "-", "set", "(", "file_paths_in_progress", ")", "-", "set", "(", "file_paths_recently_processed", ")", "-", "set", "(", "files_paths_at_run_limit", ")", ")", "for", "file_path", ",", "processor", "in", "self", ".", "_processors", ".", "items", "(", ")", ":", "self", ".", "log", ".", "debug", "(", "\"File path %s is still being processed (started: %s)\"", ",", "processor", ".", "file_path", ",", "processor", ".", "start_time", ".", "isoformat", "(", ")", ")", "self", ".", "log", ".", "debug", "(", "\"Queuing the following files for processing:\\n\\t%s\"", ",", "\"\\n\\t\"", ".", "join", "(", "files_paths_to_queue", ")", ")", "self", ".", "_file_path_queue", ".", "extend", "(", "files_paths_to_queue", ")", "zombies", "=", "self", ".", "_find_zombies", "(", ")", "# Start more processors if we have enough slots and files to process", "while", "(", "self", ".", "_parallelism", "-", "len", "(", "self", ".", "_processors", ")", ">", "0", "and", "len", "(", "self", ".", "_file_path_queue", ")", ">", "0", ")", ":", "file_path", "=", "self", ".", "_file_path_queue", ".", "pop", "(", "0", ")", "processor", "=", "self", ".", "_processor_factory", "(", "file_path", ",", "zombies", ")", "processor", ".", "start", "(", ")", "self", ".", "log", ".", "debug", "(", "\"Started a process (PID: %s) to generate tasks for %s\"", ",", "processor", ".", "pid", ",", "file_path", ")", "self", ".", "_processors", "[", "file_path", "]", "=", "processor", "# Update heartbeat count.", "self", ".", "_run_count", "[", "self", ".", "_heart_beat_key", "]", "+=", "1", "return", "simple_dags" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorManager._find_zombies
Find zombie task instances, which are tasks haven't heartbeated for too long. :return: Zombie task instances in SimpleTaskInstance format.
airflow/utils/dag_processing.py
def _find_zombies(self, session): """ Find zombie task instances, which are tasks haven't heartbeated for too long. :return: Zombie task instances in SimpleTaskInstance format. """ now = timezone.utcnow() zombies = [] if (now - self._last_zombie_query_time).total_seconds() \ > self._zombie_query_interval: # to avoid circular imports from airflow.jobs import LocalTaskJob as LJ self.log.info("Finding 'running' jobs without a recent heartbeat") TI = airflow.models.TaskInstance limit_dttm = timezone.utcnow() - timedelta( seconds=self._zombie_threshold_secs) self.log.info("Failing jobs without heartbeat after %s", limit_dttm) tis = ( session.query(TI) .join(LJ, TI.job_id == LJ.id) .filter(TI.state == State.RUNNING) .filter( or_( LJ.state != State.RUNNING, LJ.latest_heartbeat < limit_dttm, ) ).all() ) self._last_zombie_query_time = timezone.utcnow() for ti in tis: zombies.append(SimpleTaskInstance(ti)) return zombies
def _find_zombies(self, session): """ Find zombie task instances, which are tasks haven't heartbeated for too long. :return: Zombie task instances in SimpleTaskInstance format. """ now = timezone.utcnow() zombies = [] if (now - self._last_zombie_query_time).total_seconds() \ > self._zombie_query_interval: # to avoid circular imports from airflow.jobs import LocalTaskJob as LJ self.log.info("Finding 'running' jobs without a recent heartbeat") TI = airflow.models.TaskInstance limit_dttm = timezone.utcnow() - timedelta( seconds=self._zombie_threshold_secs) self.log.info("Failing jobs without heartbeat after %s", limit_dttm) tis = ( session.query(TI) .join(LJ, TI.job_id == LJ.id) .filter(TI.state == State.RUNNING) .filter( or_( LJ.state != State.RUNNING, LJ.latest_heartbeat < limit_dttm, ) ).all() ) self._last_zombie_query_time = timezone.utcnow() for ti in tis: zombies.append(SimpleTaskInstance(ti)) return zombies
[ "Find", "zombie", "task", "instances", "which", "are", "tasks", "haven", "t", "heartbeated", "for", "too", "long", ".", ":", "return", ":", "Zombie", "task", "instances", "in", "SimpleTaskInstance", "format", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L1230-L1262
[ "def", "_find_zombies", "(", "self", ",", "session", ")", ":", "now", "=", "timezone", ".", "utcnow", "(", ")", "zombies", "=", "[", "]", "if", "(", "now", "-", "self", ".", "_last_zombie_query_time", ")", ".", "total_seconds", "(", ")", ">", "self", ".", "_zombie_query_interval", ":", "# to avoid circular imports", "from", "airflow", ".", "jobs", "import", "LocalTaskJob", "as", "LJ", "self", ".", "log", ".", "info", "(", "\"Finding 'running' jobs without a recent heartbeat\"", ")", "TI", "=", "airflow", ".", "models", ".", "TaskInstance", "limit_dttm", "=", "timezone", ".", "utcnow", "(", ")", "-", "timedelta", "(", "seconds", "=", "self", ".", "_zombie_threshold_secs", ")", "self", ".", "log", ".", "info", "(", "\"Failing jobs without heartbeat after %s\"", ",", "limit_dttm", ")", "tis", "=", "(", "session", ".", "query", "(", "TI", ")", ".", "join", "(", "LJ", ",", "TI", ".", "job_id", "==", "LJ", ".", "id", ")", ".", "filter", "(", "TI", ".", "state", "==", "State", ".", "RUNNING", ")", ".", "filter", "(", "or_", "(", "LJ", ".", "state", "!=", "State", ".", "RUNNING", ",", "LJ", ".", "latest_heartbeat", "<", "limit_dttm", ",", ")", ")", ".", "all", "(", ")", ")", "self", ".", "_last_zombie_query_time", "=", "timezone", ".", "utcnow", "(", ")", "for", "ti", "in", "tis", ":", "zombies", ".", "append", "(", "SimpleTaskInstance", "(", "ti", ")", ")", "return", "zombies" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorManager.max_runs_reached
:return: whether all file paths have been processed max_runs times
airflow/utils/dag_processing.py
def max_runs_reached(self): """ :return: whether all file paths have been processed max_runs times """ if self._max_runs == -1: # Unlimited runs. return False for file_path in self._file_paths: if self._run_count[file_path] < self._max_runs: return False if self._run_count[self._heart_beat_key] < self._max_runs: return False return True
def max_runs_reached(self): """ :return: whether all file paths have been processed max_runs times """ if self._max_runs == -1: # Unlimited runs. return False for file_path in self._file_paths: if self._run_count[file_path] < self._max_runs: return False if self._run_count[self._heart_beat_key] < self._max_runs: return False return True
[ ":", "return", ":", "whether", "all", "file", "paths", "have", "been", "processed", "max_runs", "times" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L1264-L1275
[ "def", "max_runs_reached", "(", "self", ")", ":", "if", "self", ".", "_max_runs", "==", "-", "1", ":", "# Unlimited runs.", "return", "False", "for", "file_path", "in", "self", ".", "_file_paths", ":", "if", "self", ".", "_run_count", "[", "file_path", "]", "<", "self", ".", "_max_runs", ":", "return", "False", "if", "self", ".", "_run_count", "[", "self", ".", "_heart_beat_key", "]", "<", "self", ".", "_max_runs", ":", "return", "False", "return", "True" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagFileProcessorManager.end
Kill all child processes on exit since we don't want to leave them as orphaned.
airflow/utils/dag_processing.py
def end(self): """ Kill all child processes on exit since we don't want to leave them as orphaned. """ pids_to_kill = self.get_all_pids() if len(pids_to_kill) > 0: # First try SIGTERM this_process = psutil.Process(os.getpid()) # Only check child processes to ensure that we don't have a case # where we kill the wrong process because a child process died # but the PID got reused. child_processes = [x for x in this_process.children(recursive=True) if x.is_running() and x.pid in pids_to_kill] for child in child_processes: self.log.info("Terminating child PID: %s", child.pid) child.terminate() # TODO: Remove magic number timeout = 5 self.log.info("Waiting up to %s seconds for processes to exit...", timeout) try: psutil.wait_procs( child_processes, timeout=timeout, callback=lambda x: self.log.info('Terminated PID %s', x.pid)) except psutil.TimeoutExpired: self.log.debug("Ran out of time while waiting for processes to exit") # Then SIGKILL child_processes = [x for x in this_process.children(recursive=True) if x.is_running() and x.pid in pids_to_kill] if len(child_processes) > 0: self.log.info("SIGKILL processes that did not terminate gracefully") for child in child_processes: self.log.info("Killing child PID: %s", child.pid) child.kill() child.wait()
def end(self): """ Kill all child processes on exit since we don't want to leave them as orphaned. """ pids_to_kill = self.get_all_pids() if len(pids_to_kill) > 0: # First try SIGTERM this_process = psutil.Process(os.getpid()) # Only check child processes to ensure that we don't have a case # where we kill the wrong process because a child process died # but the PID got reused. child_processes = [x for x in this_process.children(recursive=True) if x.is_running() and x.pid in pids_to_kill] for child in child_processes: self.log.info("Terminating child PID: %s", child.pid) child.terminate() # TODO: Remove magic number timeout = 5 self.log.info("Waiting up to %s seconds for processes to exit...", timeout) try: psutil.wait_procs( child_processes, timeout=timeout, callback=lambda x: self.log.info('Terminated PID %s', x.pid)) except psutil.TimeoutExpired: self.log.debug("Ran out of time while waiting for processes to exit") # Then SIGKILL child_processes = [x for x in this_process.children(recursive=True) if x.is_running() and x.pid in pids_to_kill] if len(child_processes) > 0: self.log.info("SIGKILL processes that did not terminate gracefully") for child in child_processes: self.log.info("Killing child PID: %s", child.pid) child.kill() child.wait()
[ "Kill", "all", "child", "processes", "on", "exit", "since", "we", "don", "t", "want", "to", "leave", "them", "as", "orphaned", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/dag_processing.py#L1285-L1320
[ "def", "end", "(", "self", ")", ":", "pids_to_kill", "=", "self", ".", "get_all_pids", "(", ")", "if", "len", "(", "pids_to_kill", ")", ">", "0", ":", "# First try SIGTERM", "this_process", "=", "psutil", ".", "Process", "(", "os", ".", "getpid", "(", ")", ")", "# Only check child processes to ensure that we don't have a case", "# where we kill the wrong process because a child process died", "# but the PID got reused.", "child_processes", "=", "[", "x", "for", "x", "in", "this_process", ".", "children", "(", "recursive", "=", "True", ")", "if", "x", ".", "is_running", "(", ")", "and", "x", ".", "pid", "in", "pids_to_kill", "]", "for", "child", "in", "child_processes", ":", "self", ".", "log", ".", "info", "(", "\"Terminating child PID: %s\"", ",", "child", ".", "pid", ")", "child", ".", "terminate", "(", ")", "# TODO: Remove magic number", "timeout", "=", "5", "self", ".", "log", ".", "info", "(", "\"Waiting up to %s seconds for processes to exit...\"", ",", "timeout", ")", "try", ":", "psutil", ".", "wait_procs", "(", "child_processes", ",", "timeout", "=", "timeout", ",", "callback", "=", "lambda", "x", ":", "self", ".", "log", ".", "info", "(", "'Terminated PID %s'", ",", "x", ".", "pid", ")", ")", "except", "psutil", ".", "TimeoutExpired", ":", "self", ".", "log", ".", "debug", "(", "\"Ran out of time while waiting for processes to exit\"", ")", "# Then SIGKILL", "child_processes", "=", "[", "x", "for", "x", "in", "this_process", ".", "children", "(", "recursive", "=", "True", ")", "if", "x", ".", "is_running", "(", ")", "and", "x", ".", "pid", "in", "pids_to_kill", "]", "if", "len", "(", "child_processes", ")", ">", "0", ":", "self", ".", "log", ".", "info", "(", "\"SIGKILL processes that did not terminate gracefully\"", ")", "for", "child", "in", "child_processes", ":", "self", ".", "log", ".", "info", "(", "\"Killing child PID: %s\"", ",", "child", ".", "pid", ")", "child", ".", "kill", "(", ")", "child", ".", "wait", "(", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597