dag3_hpc_pipelines.py 1.1 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243
  1. from datetime import timedelta
  2. from airflow.models import DAG,Variable
  3. from airflow.utils.dates import days_ago
  4. from airflow.operators.bash_operator import BashOperator
  5. default_args = {
  6. 'owner': 'airflow',
  7. 'depends_on_past': False,
  8. 'start_date': days_ago(2),
  9. 'email_on_failure': False,
  10. 'email_on_retry': False,
  11. 'retries': 1,
  12. 'retry_delay': timedelta(minutes=2),
  13. }
  14. dag = \
  15. DAG(
  16. dag_id='dag3_hpc_pipelines',
  17. catchup=False,
  18. schedule_interval="*/5 * * * *",
  19. max_active_runs=1,
  20. tags=['hpc'],
  21. default_args=default_args)
  22. with dag:
  23. run_demultiplexing_pipeline = \
  24. BashOperator(
  25. task_id='run_demultiplexing_pipeline',
  26. dag=dag,
  27. queue='hpc_4G',
  28. bash_command='bash /rds/general/user/igf/home/git_repo/IGF-cron-scripts/hpc/run_demultiplexing_pipeline.sh '
  29. )
  30. run_primary_analysis_pipeline = \
  31. BashOperator(
  32. task_id='run_primary_analysis_pipeline',
  33. dag=dag,
  34. queue='hpc_4G',
  35. bash_command='bash /rds/general/user/igf/home/git_repo/IGF-cron-scripts/hpc/run_primary_analysis_pipeline.sh '
  36. )
  37. run_demultiplexing_pipeline >> run_primary_analysis_pipeline