check_celery_queue.py 3.2 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980
  1. import redis
  2. def fetch_queue_list_from_redis_server(url):
  3. try:
  4. queue_list = list()
  5. print(url)
  6. print(redis.__version__)
  7. r = redis.from_url(url)
  8. for i in r.keys():
  9. queue = i.decode()
  10. if not queue.startswith('_') and \
  11. not queue.startswith('unacked'):
  12. print(queue)
  13. q_len = r.llen(queue)
  14. queue_list.append({queue:q_len})
  15. return queue_list
  16. except Exception as e:
  17. raise ValueError('Failed to fetch from redis server, error: {0}'.format(e))
  18. def calculate_new_workers(queue_list,active_jobs_dict,max_workers_per_queue=10,max_total_workers=70):
  19. '''
  20. A function for calculating new worker size
  21. :param queue_list: A list dictionary containing all the queued jobs
  22. [{queue_name:job_count}]
  23. :param active_jobs_dict: A dictionary containing all job counts for each queues
  24. {queue_name:{job_state:job_count}}
  25. :param max_workers_per_queue: Max allowed worker per queue, default 10
  26. :param max_total_workers: Max total worker for the queue, default 70
  27. :returns: A dictionary containing all the target jobs
  28. {queue_name:target_job_counts}
  29. and a list of unique queue names
  30. [queue_name]
  31. '''
  32. try:
  33. worker_to_submit = dict()
  34. unique_queue_list = list()
  35. total_active_jobs = 0
  36. for _,job_data in active_jobs_dict.items():
  37. for job_state,job_count in job_data.items():
  38. if job_state in ('Q','R'):
  39. total_active_jobs += job_count
  40. if isinstance(queue_list,list) and \
  41. len(queue_list) > 0 and \
  42. total_active_jobs < max_total_workers:
  43. for entry in queue_list: # this list should be unique
  44. for queue_name,waiting_jobs in entry.items():
  45. if waiting_jobs > max_workers_per_queue:
  46. waiting_jobs = max_workers_per_queue
  47. active_job = active_jobs_dict.get(queue_name)
  48. total_running_for_queue = 0
  49. active_queued_job = 0
  50. if active_job is not None:
  51. for job_state,job_counts in active_job.items():
  52. if job_state in ('Q','R'):
  53. total_running_for_queue += job_counts
  54. if job_state == 'Q':
  55. active_queued_job += job_counts
  56. if active_queued_job < 1:
  57. if total_running_for_queue==0 and \
  58. (total_active_jobs + waiting_jobs) < max_total_workers:
  59. worker_to_submit.\
  60. update({queue_name : waiting_jobs})
  61. if total_running_for_queue > 0:
  62. if waiting_jobs > total_running_for_queue:
  63. waiting_jobs = waiting_jobs - total_running_for_queue
  64. if (total_active_jobs + waiting_jobs) < max_total_workers:
  65. worker_to_submit.\
  66. update({queue_name : waiting_jobs})
  67. total_active_jobs += waiting_jobs
  68. else:
  69. print('Not submitting new jobs for queue {0}'.format(queue_name))
  70. if len(worker_to_submit.keys()) > 0:
  71. unique_queue_list = worker_to_submit.keys()
  72. return worker_to_submit,unique_queue_list
  73. except Exception as e:
  74. raise ValueError('Failed to calculate airflow worker size, error: {0}'.format(e))