inference
ifiguero 2025-07-21 18:36:17 -04:00
parent 169f92c3fb
commit 221e050c90
62 changed files with 376 additions and 87 deletions

View File

@ -14,15 +14,15 @@ services:
depends_on:
- redis
environment:
# stream_url: "rtsp://admin:labvision2019@10.1.8.182:554"
stream_url: "/videos/video.mp4"
stream_url: "rtsp://admin:labvision2019@10.1.8.182:554"
# stream_url: "/videos/video.mp4"
stream_label: "pelea3"
redis_host: "redis"
redis_port: "6379"
redis_db: "0"
restart: unless-stopped
volumes:
- /home/ifiguero/DIA/dia3/DIA0205 - VISIÓN COMPUTACIONAL APLICADA/Profesor Cristian Aguilera/dataset/pelea3.mp4:/videos/video.mp4:ro
# volumes:
# - /home/ifiguero/DIA/dia3/DIA0205 - VISIÓN COMPUTACIONAL APLICADA/Profesor Cristian Aguilera/dataset/pelea3.mp4:/videos/video.mp4:ro
stream_preprocess:
build:
@ -32,6 +32,7 @@ services:
depends_on:
- redis
- stream_reader
- stream_inference
environment:
stream_label: "pelea3"
redis_host: "redis"
@ -39,15 +40,45 @@ services:
redis_db: "0"
restart: unless-stopped
dataset_builder:
stream_inference:
build:
dockerfile: Dockerfile
context: dataset
container_name: dataset_builder
context: inference
container_name: stream_inference
# deploy:
# resources:
# reservations:
# devices:
# - driver: nvidia
# count: all
# capabilities: [gpu]
depends_on:
- redis
- stream_preprocess
environment:
model_folder: "/app/models"
model_name: "tuned"
out_folder: "/app/out_folder"
stream_label: "pelea3"
threshold: "0.75"
redis_host: "redis"
redis_port: "6379"
redis_db: "0"
restart: unless-stopped
volumes:
- ./rt_out:/app/out_folder
- ./models:/app/models
web_inference:
build:
dockerfile: Dockerfile
context: web
container_name: web_inference
depends_on:
- redis
- stream_inference
environment:
stream_url: "rtsp://admin:labvision2019@10.1.8.182:554"
# stream_url: "/videos/video.mp4"
out_folder: "/app/out_folder"
stream_label: "pelea3"
redis_host: "redis"
@ -55,4 +86,6 @@ services:
redis_db: "0"
restart: unless-stopped
volumes:
- ./out_folder:/app/out_folder
- ./rt_out:/app/out_folder
ports:
- "8080:8080"

View File

@ -26,10 +26,10 @@ RUN /opt/conda/bin/pip install --upgrade pip && \
pillow
RUN /opt/conda/bin/conda install -y -c conda-forge opencv
RUN /opt/conda/bin/pip install --force-reinstall --no-cache-dir tqdm
RUN /opt/conda/bin/pip install --force-reinstall --no-cache-dir redis
# Copy project files into container
COPY evaluate.py /app/evaluate.py
COPY inference.py /app/inference.py
# Default command (you can override with docker run)
CMD ["python", "/app/evaluate.py"]
CMD ["python", "/app/inference.py"]

View File

@ -17,16 +17,19 @@ redis_host = os.getenv("redis_host", "localhost")
redis_port = int(os.getenv("redis_port", 6379))
stream_label = os.getenv("stream_label", "cropped_stream")
output_folder = os.getenv("out_folder", "/app/out_folder")
model_folder = os.getenv("model_folder", "base_model")
model_path = os.path.join(output_folder, model_folder)
threshold = float(os.getenv("threshold", "0.5"))
queue_label = f"{stream_label}_cubes"
model_folder = os.getenv("model_folder", "/app/models")
model_name = os.getenv("model_name", "base_model")
threshold = float(os.getenv("threshold", "0.5"))
queue_label = f"{stream_label}_cubes"
model_path = os.path.join(model_folder, model_name)
sqlite_path = os.path.join(output_folder, f"{stream_label}_results.db")
stream_folder = os.path.join(output_folder, stream_label)
os.makedirs(output_folder, exist_ok=True)
os.makedirs(stream_folder, exist_ok=True)
os.makedirs(model_path, exist_ok=True)
# -----------------------------
# CONNECT TO REDIS
@ -43,7 +46,7 @@ model.eval()
# Warm up
with torch.no_grad():
dummy = torch.randn(1, 3, 16, 224, 224).to(device)
dummy = torch.randn(1, 16, 3, 224, 224).to(device)
_ = model(pixel_values=dummy)
# -----------------------------
@ -55,7 +58,7 @@ cursor.execute("""
CREATE TABLE IF NOT EXISTS inference_results (
id INTEGER PRIMARY KEY AUTOINCREMENT,
gif_name TEXT,
timestamp TEXT,
timestamp TIMESTAMP,
prediction INTEGER
)
""")
@ -69,6 +72,7 @@ def from_redis_list(queue_label):
while True:
compressed_data = redis_conn.lpop(queue_label)
if compressed_data:
retry = 0
buffer = io.BytesIO(compressed_data)
loaded_data = np.load(buffer)
frames = [loaded_data[key] for key in sorted(loaded_data.files)]
@ -78,29 +82,33 @@ def from_redis_list(queue_label):
if retry % 50 == 0:
print(f"[WAIT] Queue {queue_label} empty for {retry/50:.1f} seconds...")
time.sleep(1/50.0)
if retry > 2000:
raise TimeoutError(f"Queue {queue_label} empty for over 40s")
if retry > 3000:
raise TimeoutError(f"Queue {queue_label} empty for over 1m")
# -----------------------------
# SAVE GIF
# -----------------------------
def save_gif(frames, gif_path):
images = [Image.fromarray(frame.astype(np.uint8)) for frame in frames]
images[0].save(gif_path, save_all=True, append_images=images[1:], duration=50, loop=0)
images = [Image.fromarray(frame[:, :, ::-1]) for frame in frames]
images[0].save(gif_path, save_all=True, append_images=images[1:], duration=1, loop=0)
# images = [Image.fromarray(frame.astype(np.uint8)) for frame in frames]
# images[0].save(gif_path, save_all=True, append_images=images[1:], duration=50, loop=0)
# -----------------------------
# MAIN LOOP
# -----------------------------
print(f"[INFO] Listening on Redis queue: {queue_label}")
if __name__ == "__main__":
while True:
try:
frames = from_redis_list(queue_label)
if not all(frame.shape == (224, 224, 3) for frame in frames):
print("[WARN] Skipped frame batch due to incorrect shape")
continue
# if not all(frame.shape == (224, 224, 3) for frame in frames):
# print("[WARN] Skipped frame batch due to incorrect shape")
# continue
timestamp = datetime.utcnow().strftime("%y%m%d_%H%M%S_%f")[:-3]
gif_filename = f"{stream_label}/{stream_label}_{timestamp}.gif"
now = datetime.utcnow()
timestamp = now.strftime("%y%m%d_%H%M%S_%f")[:-3]
gif_filename = f"{stream_label}_{timestamp}.gif"
gif_path = os.path.join(stream_folder, gif_filename)
# Save GIF
@ -116,11 +124,11 @@ while True:
# Insert into SQLite
cursor.execute("INSERT INTO inference_results (gif_name, timestamp, prediction) VALUES (?, ?, ?)",
(gif_filename, timestamp, prediction))
(gif_filename, now, prediction))
conn.commit()
print(f"[INFO] Saved {gif_filename} | Class={prediction} | Prob={prob:.3f}")
except TimeoutError as e:
print(f"[TimeoutError] {e}")
except Exception as e:
print(f"[ERROR] {e}")
time.sleep(1)

View File

@ -31,7 +31,7 @@ def fromRedis(queue_label):
compressed_data = redis_conn.lpop(queue_label)
if compressed_data:
retry = 0
print(f"Popped data from queue: {queue_label}")
# print(f"Popped data from queue: {queue_label}")
buffer = io.BytesIO(compressed_data)
loaded_data = np.load(buffer)
return loaded_data['data']
@ -41,10 +41,10 @@ def fromRedis(queue_label):
print(f"Queue {queue_label} empty for {retry/50} seconds")
time.sleep(1/50.0)
if retry > 1000:
raise(f'Queue {queue_label} 20s empty')
raise TimeoutError(f'Queue {queue_label} 20s empty')
def toRedisList(queue_label, data_list):
print(f"Pushed data to queue: {queue_label}")
# print(f"Pushed data to queue: {queue_label}")
buffer = io.BytesIO()
np.savez(buffer, *data_list) # Use *data_list to unpack the list into arguments for savez_compressed
compressed_data = buffer.getvalue()
@ -57,7 +57,7 @@ def fromRedisList(queue_label):
compressed_data = redis_conn.lpop(queue_label)
if compressed_data:
retry = 0
print(f"Popped data from queue: {queue_label}")
# print(f"Popped data from queue: {queue_label}")
buffer = io.BytesIO(compressed_data)
loaded_data = np.load(buffer)
data_list = [loaded_data[key] for key in loaded_data.files]
@ -69,7 +69,7 @@ def fromRedisList(queue_label):
time.sleep(1/50.0)
if retry > 1000:
raise(f'Queue {queue_label} 20s empty')
raise TimeoutError(f'Queue {queue_label} 20s empty')
import mediapipe as mp
@ -163,6 +163,8 @@ def crop_and_resize_frames(frames, box, target_size=224):
cropped.append(crop)
return cropped
import collections
if __name__ == "__main__":
frame_list = []
frame_count = 0
@ -173,24 +175,26 @@ if __name__ == "__main__":
lap_time = start_time
print(f"[INFO] Starting consumer for stream: {stream_label}")
while frame_lag < 100:
try:
while True:
frame = fromRedis(stream_label)
if frame is None:
frame_lag += 1
wait_time = frame_lag * 0.01
time.sleep(wait_time)
continue
frame_lag = 0
frame_list.append(frame)
frame_count += 1
if len(frame_list) == 16:
hits = frame_hits
for box in detect_person_bbox(frame_list[0]):
frame_hits += 1
cropped_frames = crop_and_resize_frames(frame_list, box)
cropped_frames = crop_and_resize_frames(list(frame_list), box)
toRedisList(stream_label_queue, cropped_frames)
if frame_hits > hits:
frame_list.clear()
else:
frame_list.pop(0)
frame_list.pop(0)
frame_list.pop(0)
frame_list.pop(0)
if frame_count % 15 == 0:
@ -200,6 +204,10 @@ if __name__ == "__main__":
lap_time = time.time()
elapsed = lap_time - last_lap
print(f"[INFO] {frame_count} frames, {frame_hits} hits. {current_hits} in {elapsed:.2f} seconds.")
except TimeoutError as e:
print(f"[TimeoutError] {e}")
except Exception as e:
print(f"[ERROR] {e}")
total_time = time.time() - start_time
print(f"[INFO] Finished. Total frames: {frame_count}. Total Hits {frame_hits}. Total time: {total_time:.2f} seconds.")

View File

@ -23,7 +23,7 @@ redis_conn.delete(stream_label)
print(f"[INFO] Cleared Redis queue for label: {stream_label}")
def toRedis(queue_label, data):
print(f"Pushed data to queue: {queue_label}")
# print(f"Pushed data to queue: {queue_label}")
buffer = io.BytesIO()
np.savez(buffer, data=data)
compressed_data = buffer.getvalue()
@ -38,11 +38,9 @@ def fromRedis(queue_label):
buffer = io.BytesIO(compressed_data)
loaded_data = np.load(buffer)
return loaded_data['data']
else:
time.sleep(1/50.0)
def toRedisList(queue_label, data_list):
print(f"Pushed data to queue: {queue_label}")
# print(f"Pushed data to queue: {queue_label}")
buffer = io.BytesIO()
np.savez(buffer, *data_list) # Use *data_list to unpack the list into arguments for savez_compressed
compressed_data = buffer.getvalue()
@ -58,8 +56,6 @@ def fromRedisList(queue_label):
loaded_data = np.load(buffer)
data_list = [loaded_data[key] for key in loaded_data.files]
return data_list
else:
time.sleep(1/50.0)
# Main frame processing function
@ -86,14 +82,14 @@ def stream_frames(video_url, label):
toRedis(label,frame)
frame_count += 1
time.sleep(1/30.0)
# time.sleep(1/30.0)
# Log every 500 frames
if frame_count % 100 == 0:
if frame_count % 16 == 0:
last_lap = lap_time
lap_time = time.time()
elapsed = lap_time - last_lap
print(f"[INFO] 100 frames processed in {elapsed:.2f} seconds.")
print(f"[INFO] 16 frames processed in {elapsed:.2f} seconds.")
# if frame_count % 16 and (redis_len := redis_conn.llen(label)) > 75:
# print(f"[WARN] {redis_len} items in queue. Pruning to 50 latest.")

Binary file not shown.

After

Width:  |  Height:  |  Size: 656 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 656 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 628 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 700 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 716 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 717 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 722 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 691 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 716 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 698 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 666 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 612 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 676 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 656 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 628 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 700 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 716 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 717 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 722 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 691 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 716 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 698 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 666 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 612 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 676 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 652 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 623 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 694 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 708 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 713 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 719 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 686 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 715 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 690 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 662 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 606 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 677 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 675 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 636 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 686 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 629 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 638 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 553 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 670 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 660 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 684 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 667 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 662 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 666 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 664 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 643 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 683 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 681 KiB

Binary file not shown.

21
web/Dockerfile 100644
View File

@ -0,0 +1,21 @@
# Base Python image
FROM python:3.10-slim
# Set workdir
WORKDIR /app
# Install dependencies
RUN pip install --no-cache-dir flask redis
# Copy files into the container
COPY templates /app/templates
COPY web.py /app/web.py
# Expose port for Flask app
EXPOSE 8080
# Run the Flask app
CMD ["python", "web.py"]

View File

@ -0,0 +1,126 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Stream Monitor</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- Bootstrap 5 -->
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css" rel="stylesheet">
<style>
body {
background-color: #f8f9fa;
}
.video-frame {
width: 100%;
aspect-ratio: 16 / 9;
background: black;
}
.gif-preview {
width: 100%;
border-radius: 5px;
}
.detection-list {
max-height: 80vh;
overflow-y: auto;
}
.section-title {
margin-top: 1rem;
margin-bottom: 1rem;
font-weight: bold;
}
.small-text {
font-size: 0.85rem;
color: #666;
}
</style>
</head>
<body>
<div class="container-fluid mt-4">
<div class="row">
<!-- Live Stream -->
<div class="col-md-6">
<h4 class="section-title">Live Stream</h4>
<div class="video-frame">
<!-- Placeholder RTSP (replace with actual player integration) -->
<video controls autoplay muted width="100%" height="100%">
<source src="{{stream_url}}" type="video/mp4">
RTSP playback not supported.
</video>
</div>
<p class="mt-2 small-text">Frame queue length: <span id="queue-length-f">...</span></p>
<p class="mt-2 small-text">Inference Queue length: <span id="queue-length-i">...</span></p>
</div>
<!-- Negative Detections -->
<div class="col-md-3">
<h4 class="section-title text-danger">Negative Detections</h4>
<div class="detection-list" id="negatives">
<p class="text-muted small">Waiting for data...</p>
</div>
</div>
<!-- Positive Detections -->
<div class="col-md-3">
<h4 class="section-title text-success">Positive Detections</h4>
<div class="detection-list" id="positives">
<p class="text-muted small">Waiting for data...</p>
</div>
</div>
</div>
</div>
<!-- Bootstrap JS + Fetch -->
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/js/bootstrap.bundle.min.js"></script>
<script>
async function fetchStatus() {
try {
const response = await fetch('/status');
const data = await response.json();
// Update queue length
document.getElementById('queue-length-f').textContent = data.stream_1_queue;
document.getElementById('queue-length-i').textContent = data.stream_2_queue;
// Update negatives
const negContainer = document.getElementById('negatives');
negContainer.innerHTML = '';
data.negatives.forEach(item => {
negContainer.innerHTML += `
<div class="card mb-2">
<img src="/gifs/${item.gif}" class="gif-preview" alt="negative">
<div class="card-body p-2">
<small class="text-muted">${item.timestamp}</small>
</div>
</div>
`;
});
// Update positives
const posContainer = document.getElementById('positives');
posContainer.innerHTML = '';
data.positives.forEach(item => {
posContainer.innerHTML += `
<div class="card mb-2">
<img src="/gifs/${item.gif}" class="gif-preview" alt="positive">
<div class="card-body p-2">
<small class="text-muted">${item.timestamp}</small>
</div>
</div>
`;
});
} catch (err) {
console.error('Error fetching status:', err);
}
}
// Fetch every 2 seconds
setInterval(fetchStatus, 2000);
fetchStatus(); // Initial call
</script>
</body>
</html>

97
web/web.py 100644
View File

@ -0,0 +1,97 @@
import os
import io
import redis
import sqlite3
import json
from flask import Flask, jsonify, send_from_directory, render_template, render_template_string
# -----------------------------
# ENVIRONMENT CONFIG
# -----------------------------
redis_host = os.getenv("redis_host", "localhost")
redis_port = int(os.getenv("redis_port", "6379"))
stream_url = os.getenv("stream_url", "/path/to/stream")
output_folder = os.getenv("out_folder", "/app/out_folder")
stream_label = os.getenv("stream_label", "default_stream")
stream_label_queue = f"{stream_label}_cubes"
sqlite_path = os.path.join(output_folder, f"{stream_label}_results.db")
gif_folder = os.path.join(output_folder, stream_label)
sqlite_path = os.path.join(output_folder, f"{stream_label}_results.db")
gif_folder = os.path.join(output_folder, stream_label)
# -----------------------------
# REDIS + SQLITE CONNECTIONS
# -----------------------------
redis_conn = redis.Redis(host=redis_host, port=redis_port, db=0, decode_responses=False)
conn = sqlite3.connect(sqlite_path, check_same_thread=False)
cursor = conn.cursor()
cursor.execute('''
CREATE TABLE IF NOT EXISTS inference_results (
id INTEGER PRIMARY KEY AUTOINCREMENT,
gif_name TEXT,
timestamp TIMESTAMP,
prediction INTEGER
)
''')
conn.commit()
# -----------------------------
# REDIS PUSH FUNCTION
# -----------------------------
def toRedis(queue_label, data):
print(f"Pushed data to queue: {queue_label}")
buffer = io.BytesIO()
np.savez(buffer, data=data)
compressed_data = buffer.getvalue()
return redis_conn.rpush(queue_label, compressed_data)
# -----------------------------
# FLASK SETUP
# -----------------------------
app = Flask(__name__, template_folder='/app/templates')
# Root returns a placeholder HTML template
@app.route("/")
def home():
return render_template("index.html", stream_url=stream_url)
# JSON API endpoint for status monitoring
@app.route("/status")
def api_status():
try:
queue_len_1 = redis_conn.llen(stream_label)
queue_len_2 = redis_conn.llen(stream_label_queue)
# Fetch 10 latest positive and negative predictions
cursor.execute("SELECT gif_name, timestamp FROM inference_results WHERE prediction = 1 ORDER BY timestamp DESC LIMIT 10")
positives = [{"gif": row[0], "timestamp": row[1]} for row in cursor.fetchall()]
cursor.execute("SELECT gif_name, timestamp FROM inference_results WHERE prediction = 0 ORDER BY timestamp DESC LIMIT 10")
negatives = [{"gif": row[0], "timestamp": row[1]} for row in cursor.fetchall()]
return jsonify({
"stream_1_queue": queue_len_1,
"stream_2_queue": queue_len_2,
"positives": positives,
"negatives": negatives
})
except Exception as e:
return jsonify({"error": str(e)}), 500
# Serve static GIFs
@app.route("/gifs/<path:filename>")
def serve_gif(filename):
return send_from_directory(gif_folder, filename)
# -----------------------------
# MAIN ENTRY
# -----------------------------
if __name__ == "__main__":
print(f"[INFO] Flask app running — monitoring queues: {stream_label}, {stream_label_queue}")
print(f"[INFO] Serving GIFs from: {gif_folder}")
app.run(host="0.0.0.0", port=8080, debug=True)