Forráskód Böngészése

updates for dags and vars

Avik Datta 4 éve
szülő
commit
abeaff2457

+ 6 - 0
airflow_var/var.json

@@ -4,6 +4,12 @@
   "hpc_user":"igf",
   "igf_lims_ssh_key_file":"/SSH/id_rsa",
   "hpc_ssh_key_file":"/home/igf/.ssh/id_rsa",
+  "seqrun_server":"",
+  "seqrun_base_path":"",
+  "database_config_file":"",
+  "slack_conf":"",
+  "ms_teams_conf":"",
+  "hpc_seqrun_path":"",
   "hpc_queue_list":{
     "hpc_1G": {
       "pbs_resource":"-lselect=1:ncpus=1:mem=1gb -lwalltime=01:00:00",

+ 22 - 1
dags/dag1_calculate_hpc_worker.py

@@ -5,7 +5,7 @@ from airflow.operators.python_operator import PythonOperator,BranchPythonOperato
 from airflow.contrib.operators.ssh_operator import SSHOperator
 from airflow.contrib.hooks.ssh_hook import SSHHook
 from airflow.utils.dates import days_ago
-from igf_airflow.check_celery_queue import fetch_queue_list_from_redis_server,airflow_utils_for_redis
+from igf_airflow.check_celery_queue import fetch_queue_list_from_redis_server
 from igf_airflow.check_celery_queue import calculate_new_workers
 
 args = {
@@ -26,6 +26,27 @@ dag = DAG(
       )
 
 
+def airflow_utils_for_redis(**kwargs):
+  """
+  A function for dag1, TO DO
+  """
+  try:
+    if 'redis_conf_file' not in kwargs:
+      raise ValueError('redis_conf_file info is not present in the kwargs')
+
+    redis_conf_file = kwargs.get('redis_conf_file')
+    json_data = dict()
+    with open(redis_conf_file,'r') as jp:
+      json_data = json.load(jp)
+    if 'redis_db' not in json_data:
+      raise ValueError('redis_db key not present in the conf file')
+    url = json_data.get('redis_db')
+    queue_list = fetch_queue_list_from_redis_server(url=url)
+    return queue_list
+  except Exception as e:
+    raise ValueError('Failed to run, error:{0}'.format(e))
+
+
 def get_new_workers(**kwargs):
   try:
     if 'ti' not in kwargs:

+ 1 - 1
dags/dag7_hpc_scheduler.py

@@ -20,7 +20,7 @@ dag = \
   DAG(
     dag_id='dag7_hpc_scheduler',
     catchup=False,
-    schedule_interval="30 * * * *",
+    schedule_interval="*/30 * * * *",
     max_active_runs=1,
     tags=['igf-lims'],
     default_args=default_args)

+ 1 - 1
dags/dag8_copy_ongoing_seqrun.py

@@ -40,7 +40,7 @@ dag = \
     catchup=False,
     schedule_interval="0 */2 * * *",
     max_active_runs=1,
-    tags=['orwell','hpc'],
+    tags=['hpc'],
     default_args=default_args)