dag14_crick_seqrun_transfer.py 3.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111
  1. import argparse
  2. from datetime import timedelta
  3. from airflow.models import DAG,Variable
  4. from airflow.utils.dates import days_ago
  5. from airflow.contrib.operators.ssh_operator import SSHOperator
  6. from airflow.contrib.hooks.ssh_hook import SSHHook
  7. from airflow.operators.python_operator import PythonOperator
  8. from igf_airflow.utils.dag14_crick_seqrun_transfer_utils import check_and_transfer_run_func
  9. from igf_airflow.utils.dag14_crick_seqrun_transfer_utils import extract_tar_file_func
  10. FTP_SEQRUN_SERVER = Variable.get('crick_ftp_seqrun_hostname')
  11. FTP_CONFIG_FILE = Variable.get('crick_ftp_config_file')
  12. SEQRUN_BASE_PATH = Variable.get('seqrun_base_path')
  13. HPC_SEQRUN_BASE_PATH = Variable.get('hpc_seqrun_path')
  14. args = {
  15. 'owner': 'airflow',
  16. 'start_date': days_ago(2),
  17. 'retries': 1,
  18. 'retry_delay': timedelta(minutes=5),
  19. 'provide_context': True,
  20. 'email_on_failure': False,
  21. 'email_on_retry': False,
  22. 'catchup': False,
  23. 'max_active_runs': 1,
  24. }
  25. ## SSH HOOK
  26. orwell_ssh_hook = \
  27. SSHHook(
  28. key_file=Variable.get('hpc_ssh_key_file'),
  29. username=Variable.get('hpc_user'),
  30. remote_host=Variable.get('orwell_server_hostname'))
  31. dag = \
  32. DAG(
  33. dag_id='dag14_crick_seqrun_transfer',
  34. schedule_interval=None,
  35. default_args=args,
  36. tags=['ftp', 'hpc', 'orwell'])
  37. with dag:
  38. ## TASK
  39. # not working on HPC
  40. #check_and_transfer_run = \
  41. # PythonOperator(
  42. # task_id='check_and_transfer_run',
  43. # dag=dag,
  44. # pool='crick_ftp_pool',
  45. # queue='hpc_4G',
  46. # params={'ftp_seqrun_server': FTP_SEQRUN_SERVER,
  47. # 'hpc_seqrun_base_path': HPC_SEQRUN_BASE_PATH,
  48. # 'ftp_config_file': FTP_CONFIG_FILE},
  49. # python_callable=check_and_transfer_run_func)
  50. ## TASK
  51. #extract_tar_file = \
  52. # PythonOperator(
  53. # task_id='extract_tar_file',
  54. # dag=dag,
  55. # queue='hpc_4G',
  56. # params={'hpc_seqrun_base_path': HPC_SEQRUN_BASE_PATH},
  57. # python_callable=extract_tar_file_func)
  58. check_and_transfer_run = \
  59. SSHOperator(
  60. task_id='check_and_transfer_run',
  61. dag=dag,
  62. pool='crick_ftp_pool',
  63. ssh_hook=orwell_ssh_hook,
  64. do_xcom_push=False,
  65. queue='hpc_4G',
  66. params={'ftp_seqrun_server': FTP_SEQRUN_SERVER,
  67. 'seqrun_base_path': SEQRUN_BASE_PATH,
  68. 'ftp_config_file': FTP_CONFIG_FILE},
  69. command="""
  70. source /home/igf/igf_code/airflow/env.sh;
  71. python /home/igf/igf_code/airflow/data-management-python/scripts/ftp_seqrun_transfer/transfer_seqrun_from_crick.py \
  72. -f {{ params.ftp_seqrun_server }} \
  73. -s {{ dag_run.conf["seqrun_id"] }} \
  74. -d {{ params.seqrun_base_path }} \
  75. -c {{ params.ftp_config_file }}
  76. """)
  77. # TASK
  78. extract_tar_file = \
  79. SSHOperator(
  80. task_id='extract_tar_file',
  81. dag=dag,
  82. pool='orwell_exe_pool',
  83. ssh_hook=orwell_ssh_hook,
  84. do_xcom_push=False,
  85. queue='hpc_4G',
  86. params={'seqrun_base_path': SEQRUN_BASE_PATH},
  87. command="""
  88. cd {{ params.seqrun_base_path }};
  89. if [ -d {{ dag_run.conf["seqrun_id"] }} ];
  90. then
  91. echo "Seqrun dir exists";
  92. exit 1;
  93. else
  94. mkdir -p temp_{{ dag_run.conf["seqrun_id"] }};
  95. tar \
  96. --no-same-owner \
  97. --no-same-permissions \
  98. --owner=igf \
  99. -xzf {{ dag_run.conf["seqrun_id"] }}.tar.gz -C temp_{{ dag_run.conf["seqrun_id"] }}
  100. fi
  101. """
  102. )
  103. ## PIPELINE
  104. check_and_transfer_run >> extract_tar_file