Browse Source

added dag for lims data parsing

Avik Datta 4 years ago
parent
commit
8a06b15664
2 changed files with 37 additions and 0 deletions
  1. 2 0
      dags/dag3_hpc_pipelines.py
  2. 35 0
      dags/dag4_lims_metadata.py

+ 2 - 0
dags/dag3_hpc_pipelines.py

@@ -27,6 +27,7 @@ with dag:
     BashOperator(
       task_id='run_demultiplexing_pipeline',
       dag=dag,
+      queue='hpc_4G',
       bash_command='bash /rds/general/user/igf/home/git_repo/IGF-cron-scripts/hpc/run_demultiplexing_pipeline.sh '
     )
 
@@ -34,6 +35,7 @@ with dag:
     BashOperator(
       task_id='run_primary_analysis_pipeline',
       dag=dag,
+      queue='hpc_4G',
       bash_command='bash /rds/general/user/igf/home/git_repo/IGF-cron-scripts/hpc/run_primary_analysis_pipeline.sh '
     )
 

+ 35 - 0
dags/dag4_lims_metadata.py

@@ -0,0 +1,35 @@
+from datetime import timedelta
+
+from airflow.models import DAG,Variable
+from airflow.utils.dates import days_ago
+from airflow.operators.bash_operator import BashOperator
+
+default_args = {
+    'owner': 'airflow',
+    'depends_on_past': False,
+    'start_date': days_ago(2),
+    'email_on_failure': False,
+    'email_on_retry': False,
+    'retries': 1,
+    'retry_delay': timedelta(minutes=5)
+}
+
+dag = \
+  DAG(
+    dag_id='dag4_lims_metadata',
+    catchup=False,
+    schedule_interval=None,
+    max_active_runs=1,
+    default_args=default_args)
+
+with dag:
+  submit_metadata_fetch_job = \
+    BashOperator(
+      task_id = 'submit_metadata_fetch_job',
+      dag = dag,
+      xcom_push=True,
+      queue='hpc_4G',
+      bash_command = 'bash /rds/general/user/igf/home/git_repo/IGF-cron-scripts/hpc/lims_metadata/fetch_lims_metadata_qsub.sh '
+    )
+
+    submit_metadata_fetch_job