dag5_primary_analysis_and_qc_processing.py 1.7 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061
  1. from datetime import timedelta
  2. from airflow.models import DAG,Variable
  3. from airflow.utils.dates import days_ago
  4. from airflow.operators.bash_operator import BashOperator
  5. from airflow.contrib.operators.ssh_operator import SSHOperator
  6. from airflow.contrib.hooks.ssh_hook import SSHHook
  7. default_args = {
  8. 'owner': 'airflow',
  9. 'depends_on_past': False,
  10. 'start_date': days_ago(2),
  11. 'email_on_failure': False,
  12. 'email_on_retry': False,
  13. 'retries': 1,
  14. 'retry_delay': timedelta(minutes=5),
  15. }
  16. orwell_ssh_hook = \
  17. SSHHook(
  18. key_file=Variable.get('hpc_ssh_key_file'),
  19. username=Variable.get('hpc_user'),
  20. remote_host='orwell.hh.med.ic.ac.uk')
  21. hpc_hook = SSHHook(ssh_conn_id='hpc_conn')
  22. dag = \
  23. DAG(
  24. dag_id='dag5_primary_analysis_and_qc_processing',
  25. schedule_interval="@hourly",
  26. max_active_runs=1,
  27. tags=['hpc','orwell'],
  28. default_args=default_args)
  29. with dag:
  30. update_exp_metadata = \
  31. BashOperator(
  32. task_id = 'update_exp_metadata',
  33. dag = dag,
  34. queue='hpc_4G',
  35. bash_command = 'bash /rds/general/user/igf/home/git_repo/IGF-cron-scripts/hpc/update_exp_metadata.sh '
  36. )
  37. find_new_exp_for_analysis = \
  38. SSHOperator(
  39. task_id = 'find_new_exp_for_analysis',
  40. dag = dag,
  41. ssh_hook = orwell_ssh_hook,
  42. queue='hpc_4G',
  43. command = 'bash /home/igf/igf_code/IGF-cron-scripts/orwell/find_new_exp_for_analysis.sh '
  44. )
  45. seed_analysis_pipeline = \
  46. SSHOperator(
  47. task_id = 'seed_analysis_pipeline',
  48. dag = dag,
  49. ssh_hook=hpc_hook,
  50. queue='hpc_4G',
  51. command = 'bash /rds/general/user/igf/home/git_repo/IGF-cron-scripts/hpc/seed_analysis_pipeline.sh '
  52. )
  53. update_exp_metadata >> find_new_exp_for_analysis >> seed_analysis_pipeline