
concurrency check uses the postgres db to look at dag_runs table to determine what is currently running. update_site and deploy_site have been added/refactored to be a linear flow, with a function as an error handler. This was done to prevent runs of downstream error handling steps when their parent was set to upstream failed status. Added simple test for failure handler. Added testing for concurrency check logic Added requirements for testing purposes Change-Id: I29a2509df999b4714a9ade2ebabac7522504e24e
32 lines
1.1 KiB
Python
32 lines
1.1 KiB
Python
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
|
|
#
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
# you may not use this file except in compliance with the License.
|
|
# You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
|
|
from airflow.models import DAG
|
|
from airflow.operators import ConcurrencyCheckOperator
|
|
|
|
|
|
def dag_concurrency_check(parent_dag_name, child_dag_name, args):
|
|
'''
|
|
dag_concurrency_check is a sub-DAG that will will allow for a DAG to
|
|
determine if it is already running, and result in an error if so.
|
|
'''
|
|
dag = DAG(
|
|
'{}.{}'.format(parent_dag_name, child_dag_name),
|
|
default_args=args, )
|
|
|
|
dag_concurrency_check_operator = ConcurrencyCheckOperator(
|
|
task_id='dag_concurrency_check', dag=dag)
|
|
|
|
return dag
|