streamlit_app.py 2.9 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788
  1. import os
  2. import av
  3. import threading
  4. import streamlit as st
  5. import streamlit_nested_layout
  6. from streamlit_webrtc import VideoHTMLAttributes, webrtc_streamer
  7. from audio_handling import AudioFrameHandler
  8. from drowsy_detection import VideoFrameHandler
  9. from ads import css_string
  10. # Define the audio file to use.
  11. alarm_file_path = os.path.join("audio", "wake_up.wav")
  12. # Streamlit Components
  13. st.set_page_config(
  14. page_title="Drowsiness Detection | LearnOpenCV",
  15. page_icon="https://learnopencv.com/wp-content/uploads/2017/12/favicon.png",
  16. layout="wide", # centered, wide
  17. initial_sidebar_state="expanded",
  18. menu_items={
  19. "About": "### Visit www.learnopencv.com for more exciting tutorials!!!",
  20. },
  21. )
  22. col1, col2 = st.columns(spec=[6, 2], gap="medium")
  23. with col1:
  24. st.title("Drowsiness Detection!!!🥱😪😴")
  25. with st.container():
  26. c1, c2 = st.columns(spec=[1, 1])
  27. with c1:
  28. # The amount of time (in seconds) to wait before sounding the alarm.
  29. WAIT_TIME = st.slider("Seconds to wait before sounding alarm:", 0.0, 5.0, 1.0, 0.25)
  30. with c2:
  31. # Lowest valid value of Eye Aspect Ratio. Ideal values [0.15, 0.2].
  32. EAR_THRESH = st.slider("Eye Aspect Ratio threshold:", 0.0, 0.4, 0.18, 0.01)
  33. thresholds = {
  34. "EAR_THRESH": EAR_THRESH,
  35. "WAIT_TIME": WAIT_TIME,
  36. }
  37. # For streamlit-webrtc
  38. video_handler = VideoFrameHandler()
  39. audio_handler = AudioFrameHandler(sound_file_path=alarm_file_path)
  40. lock = threading.Lock() # For thread-safe access & to prevent race-condition.
  41. shared_state = {"play_alarm": False}
  42. def video_frame_callback(frame: av.VideoFrame):
  43. frame = frame.to_ndarray(format="bgr24") # Decode and convert frame to RGB
  44. frame, play_alarm = video_handler.process(frame, thresholds) # Process frame
  45. with lock:
  46. shared_state["play_alarm"] = play_alarm # Update shared state
  47. return av.VideoFrame.from_ndarray(frame, format="bgr24") # Encode and return BGR frame
  48. def audio_frame_callback(frame: av.AudioFrame):
  49. with lock: # access the current “play_alarm” state
  50. play_alarm = shared_state["play_alarm"]
  51. new_frame: av.AudioFrame = audio_handler.process(frame, play_sound=play_alarm)
  52. return new_frame
  53. # https://github.com/whitphx/streamlit-webrtc/blob/main/streamlit_webrtc/config.py
  54. with col1:
  55. ctx = webrtc_streamer(
  56. key="drowsiness-detection",
  57. video_frame_callback=video_frame_callback,
  58. audio_frame_callback=audio_frame_callback,
  59. rtc_configuration={"iceServers": [{"urls": ["stun:stun.l.google.com:19302"]}]}, # Add this to config for cloud deployment.
  60. media_stream_constraints={"video": {"height": {"ideal": 480}}, "audio": True},
  61. video_html_attrs=VideoHTMLAttributes(autoPlay=True, controls=False, muted=False),
  62. )
  63. with col2:
  64. # Banner for newsletter subscription, jobs, and consulting.
  65. st.markdown(css_string, unsafe_allow_html=True)