Browse Source

moved hpc job settings to variable conf file

Avik Datta 4 years ago
parent
commit
4715c4d4d6
2 changed files with 5 additions and 3 deletions
  1. 2 0
      airflow_var/var.json
  2. 3 3
      dags/dag1_calculate_hpc_worker.py

+ 2 - 0
airflow_var/var.json

@@ -4,6 +4,8 @@
   "hpc_user":"igf",
   "igf_lims_ssh_key_file":"/SSH/id_rsa",
   "hpc_ssh_key_file":"/home/igf/.ssh/id_rsa",
+  "hpc_max_workers_per_queue":2,
+  "hpc_max_total_workers":20,
   "seqrun_base_path":"",
   "seqrun_server_user":"",
   "database_config_file":"",

+ 3 - 3
dags/dag1_calculate_hpc_worker.py

@@ -20,7 +20,7 @@ dag = DAG(
         dag_id='dag1_calculate_hpc_worker',
         catchup=False,
         max_active_runs=1,
-        schedule_interval="*/10 * * * *",
+        schedule_interval="*/15 * * * *",
         default_args=args,
         tags=['igf-lims',]
       )
@@ -61,8 +61,8 @@ def get_new_workers(**kwargs):
       calculate_new_workers(
         queue_list=queued_tasks,
         active_jobs_dict=active_tasks,
-        max_workers_per_queue=3,
-        max_total_workers=20)
+        max_workers_per_queue=Variable.get('hpc_max_workers_per_queue'),
+        max_total_workers=Variable.get('hpc_max_total_workers'))
     for key,value in worker_to_submit.items():
       ti.xcom_push(key=key,value=value)
     unique_queue_list = \