Add MQTT publish request and response models, and implement publish route

- Introduced MQTTPublishRequest and MQTTPublishResponse models for handling MQTT message publishing.
- Implemented a new POST route for publishing MQTT messages, including error handling and logging.
- Enhanced the StandaloneAutoRecorder with improved logging during manual recording start.
- Updated the frontend to include an MQTT Debug Panel for better monitoring and debugging capabilities.
This commit is contained in:
salirezav
2025-12-01 13:07:36 -05:00
parent 5070d9b2ca
commit 73849b40a8
11 changed files with 950 additions and 19 deletions

View File

@@ -20,10 +20,10 @@ transcoding_semaphore = threading.Semaphore(MAX_CONCURRENT_TRANSCODING)
app = FastAPI(title="Media API", version="0.1.0")
# CORS for dashboard at exp-dash:8080 (and localhost for convenience)
# CORS for dashboard - allow all origins to support access from different IPs/hostnames
app.add_middleware(
CORSMiddleware,
allow_origins=["http://exp-dash:8080", "http://localhost:8080"],
allow_origins=["*"], # Allow all origins for flexibility
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"]
@@ -143,15 +143,25 @@ def get_video_mime_type(path: pathlib.Path) -> str:
return mime_types.get(ext, "video/mp4")
def open_file_range(path: pathlib.Path, start: int, end: Optional[int]):
def generate_file_range(path: pathlib.Path, start: int, end: Optional[int], chunk_size: int = 8192):
"""
Generator that yields file chunks in a memory-efficient way.
This prevents MemoryError when streaming large video files.
"""
file_size = path.stat().st_size
if end is None or end >= file_size:
end = file_size - 1
length = end - start + 1
remaining = end - start + 1
with open(path, 'rb') as f:
f.seek(start)
chunk = f.read(length)
return chunk, file_size, start, end
while remaining > 0:
read_size = min(chunk_size, remaining)
chunk = f.read(read_size)
if not chunk:
break
yield chunk
remaining -= len(chunk)
@app.head("/videos/{file_id:path}/stream")
@@ -212,14 +222,22 @@ def stream_file(request: Request, file_id: str):
if start > end:
raise HTTPException(status_code=416, detail="Range Not Satisfiable")
chunk, size, actual_start, actual_end = open_file_range(p, start, end)
# Calculate content length
content_length = end - start + 1
# Use streaming response to avoid loading entire chunk into memory
# This prevents MemoryError for large video files
headers = {
**base_headers,
"Content-Range": f"bytes {actual_start}-{actual_end}/{size}",
"Content-Length": str(len(chunk)),
"Content-Range": f"bytes {start}-{end}/{file_size}",
"Content-Length": str(content_length),
}
return Response(content=chunk, status_code=206, headers=headers)
return StreamingResponse(
generate_file_range(p, start, end),
media_type=content_type,
headers=headers,
status_code=206
)
# Convenience endpoint: pass file_id via query instead of path (accepts raw or URL-encoded)
@app.head("/videos/stream")