aus10powell commited on
Commit
62b7bc7
·
1 Parent(s): 7954299

Update app_utils.py

Browse files
Files changed (1) hide show
  1. app_utils.py +121 -51
app_utils.py CHANGED
@@ -1,6 +1,13 @@
1
  """
2
- Description: Functions that directly support the Streamlit app
 
 
 
 
 
 
3
  """
 
4
  import pandas as pd
5
  import altair as alt
6
  import io
@@ -8,7 +15,21 @@ import av
8
  from tqdm import tqdm
9
  import numpy as np
10
  import logging
 
 
 
 
 
 
 
11
 
 
 
 
 
 
 
 
12
 
13
  def frames_to_video(frames=None, fps=12):
14
  """
@@ -16,68 +37,117 @@ def frames_to_video(frames=None, fps=12):
16
 
17
  Args:
18
  frames: frame from cv2.VideoCapture as numpy. E.g. frame.astype(np.uint8)
19
- fps: Frames per second. Usefull if inference video is compressed to slow down for analysis
20
  """
21
- height, width, layers = frames[0].shape # grab info from first frame
22
- output_memory_file = io.BytesIO() # Create BytesIO "in memory file".
23
-
24
- output = av.open(
25
- output_memory_file, "w", format="mp4"
26
- ) # Open "in memory file" as MP4 video output
27
- stream = output.add_stream(
28
- "h264", str(fps)
29
- ) # Add H.264 video stream to the MP4 container, with framerate = fps.
30
- stream.width = width # Set frame width
31
- stream.height = height # Set frame height
32
- stream.pix_fmt = "yuv420p" # NOTE: yuv444p doesn't work on mac. Select yuv444p pixel format (better quality than default yuv420p).
 
 
 
 
 
 
 
 
 
33
  stream.options = {
34
  "crf": "17"
35
- } # Select low crf for high quality (the price is larger file size).
36
 
37
- # Iterate the created images, encode and write to MP4 memory file.
38
  logging.info("INFO: Encoding frames and writing to MP4 format.")
39
  for frame in tqdm(frames):
 
40
  frame = av.VideoFrame.from_ndarray(frame.astype(np.uint8), format="bgr24")
41
- packet = stream.encode(frame) # Encode video frame
42
- output.mux(
43
- packet
44
- ) # "Mux" the encoded frame (add the encoded frame to MP4 file).
45
 
46
- packet = stream.encode(None) # Flush the encoder
 
 
 
 
 
 
 
47
  output.mux(packet)
 
 
48
  output.close()
49
 
 
50
  output_memory_file.seek(0)
 
 
51
  return output_memory_file
52
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
 
54
- def plot_historical_data(dataframe):
55
- """Returns altair plot of historical counts to be rendered on main dashboard."""
56
- dataframe["Date"] = pd.to_datetime(dataframe["Date"])
57
- s = (
58
- dataframe.resample(rule="D", on="Date")["Count"].sum().reset_index()
59
- ) # Resample on day
60
- return (
61
- alt.Chart(s, title="Historical Video Counts of Herring")
62
- .mark_bar()
63
- .transform_window(
64
- # The field to average
65
- rolling_mean="mean(Count)",
66
- # The number of values before and after the current value to include.
67
- frame=[-9, 0],
68
- )
69
- .encode(x="Date", y="Count", tooltip=["Count", "Date"])
70
- .interactive()
71
- )
72
-
73
-
74
- def plot_count_date(dataframe):
75
- """Plots counts vs relative time for uploaded video."""
76
- dataframe["seconds"] = dataframe["timestamps"] / 1000
77
- dataframe["class"] = "Herring" # TBD: Hard-coded for now
78
- return (
79
- alt.Chart(dataframe, title="Processed video detected fish")
80
- .mark_line()
81
- .encode(x="seconds", y="fish_count", color="class")
82
- .interactive()
83
- )
 
1
  """
2
+ app_utils.py
3
+
4
+ Description: This file contains utility functions to support the Streamlit app.
5
+ These functions handle file processing, video conversion, and inference running
6
+ on uploaded images and videos.
7
+
8
+ Author: Austin Powell
9
  """
10
+
11
  import pandas as pd
12
  import altair as alt
13
  import io
 
15
  from tqdm import tqdm
16
  import numpy as np
17
  import logging
18
+ import streamlit as st
19
+
20
+ def extract_file_datetime(fname):
21
+ """Extract datetime from file name
22
+
23
+ Args:
24
+ fname (str): File name
25
 
26
+ Returns:
27
+ pd.datetime: Datetime extracted from file name
28
+ """
29
+ fname = os.path.basename(fname)
30
+ dt = fname.split("_")[1]
31
+ h,m,s = fname.split("_")[2].split(".")[0].split("-")
32
+ return pd.to_datetime(f"{dt} {h}:{m}:{s}")
33
 
34
  def frames_to_video(frames=None, fps=12):
35
  """
 
37
 
38
  Args:
39
  frames: frame from cv2.VideoCapture as numpy. E.g. frame.astype(np.uint8)
40
+ fps: Frames per second. Useful if the inference video is compressed to slow down for analysis
41
  """
42
+
43
+ # Grab information from the first frame
44
+ height, width, layers = frames[0].shape
45
+
46
+ # Create a BytesIO "in memory file"
47
+ output_memory_file = io.BytesIO()
48
+
49
+ # Open "in memory file" as MP4 video output
50
+ output = av.open(output_memory_file, "w", format="mp4")
51
+
52
+ # Add H.264 video stream to the MP4 container, with framerate = fps
53
+ stream = output.add_stream("h264", str(fps))
54
+
55
+ # Set frame width and height
56
+ stream.width = width
57
+ stream.height = height
58
+
59
+ # Set pixel format (yuv420p for better compatibility)
60
+ stream.pix_fmt = "yuv420p"
61
+
62
+ # Select low crf for high quality (the price is larger file size)
63
  stream.options = {
64
  "crf": "17"
65
+ }
66
 
67
+ # Iterate through the frames, encode, and write to MP4 memory file
68
  logging.info("INFO: Encoding frames and writing to MP4 format.")
69
  for frame in tqdm(frames):
70
+ # Convert frame to av.VideoFrame format
71
  frame = av.VideoFrame.from_ndarray(frame.astype(np.uint8), format="bgr24")
 
 
 
 
72
 
73
+ # Encode the video frame
74
+ packet = stream.encode(frame)
75
+
76
+ # "Mux" the encoded frame (add the encoded frame to MP4 file)
77
+ output.mux(packet)
78
+
79
+ # Flush the encoder
80
+ packet = stream.encode(None)
81
  output.mux(packet)
82
+
83
+ # Close the output video file
84
  output.close()
85
 
86
+ # Reset the file pointer to the beginning of the memory file
87
  output_memory_file.seek(0)
88
+
89
+ # Return the output memory file
90
  return output_memory_file
91
 
92
+ def process_uploaded_file():
93
+ st.subheader("Upload your own video...")
94
+
95
+ # Initialize accepted file types for upload
96
+ img_types = ["jpg", "png", "jpeg"]
97
+ video_types = ["mp4", "avi"]
98
+
99
+ # Allow user to upload an image or video file
100
+ uploaded_file = st.file_uploader("Select an image or video file...", type=img_types + video_types)
101
+
102
+ # Display the uploaded file
103
+ if uploaded_file is not None:
104
+ if str(uploaded_file.type).split("/")[-1] in img_types:
105
+ # Display uploaded image
106
+ image = Image.open(uploaded_file)
107
+ st.image(image, caption="Uploaded image", use_column_width=True)
108
+
109
+ # TBD: Inference code to run and display for single image
110
+
111
+ elif str(uploaded_file.type).split("/")[-1] in video_types:
112
+ # Display uploaded video
113
+ st.video(uploaded_file)
114
+
115
+ # Convert streamlit video object to OpenCV format to run inferences
116
+ tfile = tempfile.NamedTemporaryFile(delete=False)
117
+ tfile.write(uploaded_file.read())
118
+ vf = cv.VideoCapture(tfile.name)
119
+
120
+ # Run inference on the uploaded video
121
+ with st.spinner("Running inference..."):
122
+ frames, counts, timestamps = inference.main(vf)
123
+ logging.info("INFO: Completed running inference on frames")
124
+ st.balloons()
125
+
126
+ # Convert OpenCV Numpy frames in-memory to IO Bytes for streamlit
127
+ streamlit_video_file = frames_to_video(frames=frames, fps=11)
128
+
129
+ # Show processed video and provide download button
130
+ st.video(streamlit_video_file)
131
+ st.download_button(
132
+ label="Download processed video",
133
+ data=streamlit_video_file,
134
+ mime="mp4",
135
+ file_name="processed_video.mp4",
136
+ )
137
+
138
+ # Create dataframe for fish counts and timestamps
139
+ df_counts_time = pd.DataFrame(
140
+ data={"fish_count": counts, "timestamps": timestamps[1:]}
141
+ )
142
+
143
+ # Display fish count vs. timestamp chart
144
+ st.altair_chart(
145
+ plot_count_date(dataframe=df_counts_time),
146
+ use_container_width=True,
147
+ )
148
+
149
+ else:
150
+ st.write("No file uploaded")
151
+
152
+
153