diff --git a/Dockerfile b/Dockerfile
index 47d637d..53c870f 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -60,6 +60,7 @@ COPY --chown=app:app ./alembic.ini /app/alembic.ini
COPY --chown=app:app ./backend /app/backend
COPY --chown=app:app --from=node_builder /app/exported /app/frontend/exported
COPY --chown=app:app --from=python_builder /opt/python /opt/python
+COPY --from=ghcr.io/arabcoders/jellyfin-ffmpeg /usr/bin/ffmpeg /usr/bin/ffmpeg
COPY --from=ghcr.io/arabcoders/jellyfin-ffmpeg /usr/bin/ffprobe /usr/bin/ffprobe
# Install fbc CLI script
diff --git a/backend/app/cleanup.py b/backend/app/cleanup.py
index 538c7d7..3554eab 100644
--- a/backend/app/cleanup.py
+++ b/backend/app/cleanup.py
@@ -76,7 +76,9 @@ async def _remove_stale_uploads(session: AsyncSession) -> int:
total_removed = 0
stmt: Select[tuple[models.UploadRecord]] = (
- select(models.UploadRecord).where(models.UploadRecord.status != "completed").where(models.UploadRecord.created_at < cutoff_naive)
+ select(models.UploadRecord)
+ .where(models.UploadRecord.status.in_(["pending", "in_progress"]))
+ .where(models.UploadRecord.created_at < cutoff_naive)
)
res: Result[tuple[models.UploadRecord]] = await session.execute(stmt)
diff --git a/backend/app/main.py b/backend/app/main.py
index eb66f59..53cb788 100644
--- a/backend/app/main.py
+++ b/backend/app/main.py
@@ -3,11 +3,13 @@
import os
from contextlib import asynccontextmanager, suppress
from pathlib import Path
+from typing import Annotated
-from fastapi import FastAPI, HTTPException, Request, status
+from fastapi import FastAPI, Header, HTTPException, Request, status
from fastapi.concurrency import run_in_threadpool
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import FileResponse, JSONResponse
+from fastapi.templating import Jinja2Templates
from backend.app import version
@@ -16,6 +18,7 @@
from .config import settings
from .db import engine
from .migrate import run_migrations
+from .postprocessing import ProcessingQueue
def create_app() -> FastAPI:
@@ -24,6 +27,8 @@ def create_app() -> FastAPI:
Path(settings.storage_path).mkdir(parents=True, exist_ok=True)
Path(settings.config_path).mkdir(parents=True, exist_ok=True)
+ templates = Jinja2Templates(directory=str(Path(__file__).parent / "templates"))
+
@asynccontextmanager
async def lifespan(app: FastAPI):
"""
@@ -36,11 +41,17 @@ async def lifespan(app: FastAPI):
if not settings.skip_migrations:
await run_in_threadpool(run_migrations)
+ queue = ProcessingQueue()
+ queue.start_worker()
+ app.state.processing_queue = queue
+
if not settings.skip_cleanup:
app.state.cleanup_task = asyncio.create_task(start_cleanup_loop(), name="cleanup_loop")
yield
+ await queue.stop_worker()
+
if not settings.skip_cleanup:
task: asyncio.Task | None = getattr(app.state, "cleanup_task", None)
if task:
@@ -139,39 +150,143 @@ def app_version() -> dict[str, str]:
app.include_router(getattr(routers, _route).router)
frontend_dir: Path = Path(settings.frontend_export_path).resolve()
- if frontend_dir.exists():
- @app.get("/{full_path:path}", name="static_frontend")
- async def frontend(full_path: str) -> FileResponse:
- """
- Serve static frontend files.
+ @app.get("/f/{token}", name="share_page")
+ @app.get("/f/{token}/")
+ async def share_page(token: str, request: Request, user_agent: Annotated[str | None, Header()] = None):
+ """Handle /f/{token} with bot detection for embed preview."""
+ from sqlalchemy import select
+
+ from backend.app import models, utils
+ from backend.app.db import get_db
+
+ user_agent_lower: str = (user_agent or "").lower()
+ is_bot = any(bot in user_agent_lower for bot in ["discordbot", "twitterbot", "slackbot", "facebookexternalhit", "whatsapp"])
+
+ if is_bot and settings.allow_public_downloads:
+ async for db in get_db():
+ stmt = select(models.UploadToken).where((models.UploadToken.token == token) | (models.UploadToken.download_token == token))
+ result = await db.execute(stmt)
+ token_row = result.scalar_one_or_none()
+
+ if token_row:
+ uploads_stmt = (
+ select(models.UploadRecord)
+ .where(models.UploadRecord.token_id == token_row.id, models.UploadRecord.status == "completed")
+ .order_by(models.UploadRecord.created_at.desc())
+ )
+ uploads_result = await db.execute(uploads_stmt)
+ uploads = uploads_result.scalars().all()
+
+ media_files = [u for u in uploads if u.mimetype and utils.is_multimedia(u.mimetype)]
+
+ if media_files:
+ first_media = media_files[0]
+
+ is_video = first_media.mimetype.startswith("video/")
+ ffprobe_data = None
+ if first_media.meta_data and isinstance(first_media.meta_data, dict):
+ ffprobe_data = first_media.meta_data.get("ffprobe")
+
+ video_metadata = utils.extract_video_metadata(ffprobe_data)
+
+ other_files = [
+ {
+ "name": u.filename or "Unknown",
+ "size": utils.format_file_size(u.size_bytes) if u.size_bytes else "Unknown",
+ }
+ for u in uploads
+ if u.public_id != first_media.public_id
+ ]
+
+ media_url = str(
+ request.url_for("download_file", download_token=token_row.download_token, upload_id=first_media.public_id)
+ )
+ share_url = str(request.url_for("share_page", token=token))
+
+ is_video = first_media.mimetype.startswith("video/")
+ is_audio = first_media.mimetype.startswith("audio/")
+
+ context = {
+ "request": request,
+ "title": first_media.filename or "Shared Media",
+ "description": f"{len(uploads)} file(s) shared" if len(uploads) > 1 else "Shared file",
+ "og_type": "video.other" if is_video else "music.song",
+ "share_url": share_url,
+ "media_url": media_url,
+ "mime_type": first_media.mimetype,
+ "is_video": is_video,
+ "is_audio": is_audio,
+ "width": video_metadata.get("width"),
+ "height": video_metadata.get("height"),
+ "duration": video_metadata.get("duration"),
+ "duration_formatted": utils.format_duration(video_metadata["duration"])
+ if video_metadata.get("duration")
+ else None,
+ "file_size": utils.format_file_size(first_media.size_bytes) if first_media.size_bytes else None,
+ "other_files": other_files,
+ }
+
+ return templates.TemplateResponse(
+ request=request,
+ name="share_preview.html",
+ context=context,
+ status_code=status.HTTP_200_OK,
+ )
+
+ if frontend_dir.exists():
+ index_file = frontend_dir / "index.html"
+ if index_file.exists():
+ return FileResponse(index_file, status_code=status.HTTP_200_OK)
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
- Args:
- full_path (str): The requested file path.
+ @app.get("/t/{token}", name="upload_page")
+ @app.get("/t/{token}/")
+ async def upload_page(token: str, request: Request, user_agent: Annotated[str | None, Header()] = None):
+ """Handle /t/{token} with bot detection for embed preview."""
+ if not frontend_dir.exists():
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
- Returns:
- FileResponse: The response containing the requested file.
+ index_file = frontend_dir / "index.html"
+ if not index_file.exists():
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
- """
- if full_path.startswith("api/"):
- raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
+ return FileResponse(index_file, status_code=status.HTTP_200_OK)
+
+ @app.get("/{full_path:path}", name="static_frontend")
+ async def frontend(full_path: str) -> FileResponse:
+ """
+ Serve static frontend files.
+
+ Args:
+ full_path (str): The requested file path.
- if not full_path or "/" == full_path:
- index_file: Path = frontend_dir / "index.html"
- if index_file.exists():
- return FileResponse(index_file, status_code=status.HTTP_200_OK)
- raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
+ Returns:
+ FileResponse: The response containing the requested file.
+
+ """
+ if full_path.startswith("api/"):
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
- requested_file: Path = frontend_dir / full_path
- if requested_file.is_file():
- return FileResponse(requested_file, status_code=status.HTTP_200_OK)
+ if not frontend_dir.exists():
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
+ if not full_path or "/" == full_path:
index_file: Path = frontend_dir / "index.html"
if index_file.exists():
return FileResponse(index_file, status_code=status.HTTP_200_OK)
-
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
+ requested_file: Path = frontend_dir / full_path
+ if requested_file.is_file():
+ return FileResponse(requested_file, status_code=status.HTTP_200_OK)
+
+ index_file: Path = frontend_dir / "index.html"
+ if index_file.exists():
+ return FileResponse(index_file, status_code=status.HTTP_200_OK)
+
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
+
return app
diff --git a/backend/app/models.py b/backend/app/models.py
index c793f30..d30d628 100644
--- a/backend/app/models.py
+++ b/backend/app/models.py
@@ -18,7 +18,7 @@ class UploadToken(Base):
uploads_used: Mapped[int] = mapped_column(Integer, nullable=False, default=0)
allowed_mime: Mapped[list | None] = mapped_column("allowed_mime", JSON, nullable=True)
disabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
- created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, default=lambda: datetime.now(UTC))
+ created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=lambda: datetime.now(UTC))
uploads: Mapped[list["UploadRecord"]] = relationship("UploadRecord", back_populates="token", cascade="all, delete-orphan")
@@ -42,7 +42,7 @@ class UploadRecord(Base):
upload_length: Mapped[int | None] = mapped_column(BigInteger)
upload_offset: Mapped[int] = mapped_column(BigInteger, nullable=False, default=0)
status: Mapped[str] = mapped_column(String(32), nullable=False, default="pending")
- created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, default=lambda: datetime.now(UTC))
- completed_at: Mapped[datetime | None] = mapped_column(DateTime)
+ created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, default=lambda: datetime.now(UTC))
+ completed_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True))
token: Mapped[UploadToken] = relationship("UploadToken", back_populates="uploads")
diff --git a/backend/app/postprocessing.py b/backend/app/postprocessing.py
new file mode 100644
index 0000000..c862db7
--- /dev/null
+++ b/backend/app/postprocessing.py
@@ -0,0 +1,155 @@
+"""
+Post-processing worker for uploaded files.
+
+Handles background tasks like:
+- MP4 faststart optimization
+- FFprobe metadata extraction
+- Future: thumbnail generation, video transcoding, etc.
+"""
+
+import asyncio
+import contextlib
+import logging
+from datetime import UTC, datetime
+from pathlib import Path
+
+from sqlalchemy import select
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy.orm import attributes
+
+from . import models
+from .db import SessionLocal
+from .utils import ensure_faststart_mp4, extract_ffprobe_metadata, is_multimedia
+
+logger = logging.getLogger(__name__)
+
+
+class ProcessingQueue:
+ """Background processing queue for uploads."""
+
+ def __init__(self) -> None:
+ """Initialize the processing queue."""
+ self._queue: asyncio.Queue[str] = asyncio.Queue()
+ self._worker_task: asyncio.Task | None = None
+
+ async def enqueue(self, upload_id: str) -> None:
+ """Add an upload to the processing queue."""
+ await self._queue.put(upload_id)
+ logger.info("Enqueued upload %s for post-processing", upload_id)
+
+ def start_worker(self) -> None:
+ """Start the background worker if not already running."""
+ if self._worker_task is None or self._worker_task.done():
+ self._worker_task = asyncio.create_task(self._run_worker(), name="postprocessing_worker")
+ logger.info("Started post-processing worker")
+
+ async def stop_worker(self) -> None:
+ """Stop the background worker."""
+ if self._worker_task and not self._worker_task.done():
+ self._worker_task.cancel()
+ with contextlib.suppress(asyncio.CancelledError):
+ await self._worker_task
+ logger.info("Stopped post-processing worker")
+
+ async def _run_worker(self) -> None:
+ """Background worker that processes uploads from the queue."""
+ logger.info("Post-processing worker started")
+
+ while True:
+ try:
+ upload_id = await self._queue.get()
+ try:
+ await self._process_upload_by_id(upload_id)
+ except Exception:
+ logger.exception("Failed to process upload %s", upload_id)
+ finally:
+ self._queue.task_done()
+ except asyncio.CancelledError:
+ logger.info("Post-processing worker cancelled")
+ break
+ except Exception:
+ logger.exception("Error in post-processing worker loop")
+ await asyncio.sleep(1)
+
+ async def _process_upload_by_id(self, upload_id: str) -> None:
+ """Process a single upload by ID."""
+ async with SessionLocal() as session:
+ try:
+ stmt = select(models.UploadRecord).where(models.UploadRecord.public_id == upload_id)
+ result = await session.execute(stmt)
+ record = result.scalar_one_or_none()
+
+ if record is None:
+ logger.warning("Upload %s not found for processing", upload_id)
+ return
+
+ await process_upload(session, record)
+ finally:
+ await session.close()
+
+
+async def process_upload(session: AsyncSession, record: models.UploadRecord) -> bool:
+ """
+ Process a single upload record.
+
+ Args:
+ session: Database session
+ record: Upload record to process
+
+ Returns:
+ True if processing succeeded, False otherwise
+
+ """
+ if not record.storage_path:
+ logger.error("Upload %s has no storage path", record.public_id)
+ record.status = "failed"
+ record.meta_data["error"] = "No storage path"
+ attributes.flag_modified(record, "meta_data")
+ await session.commit()
+ return False
+
+ path = Path(record.storage_path)
+ if not path.exists():
+ logger.error("Upload %s file not found: %s", record.public_id, path)
+ record.status = "failed"
+ record.meta_data["error"] = "File not found"
+ attributes.flag_modified(record, "meta_data")
+ await session.commit()
+ return False
+
+ try:
+ if record.mimetype and is_multimedia(record.mimetype):
+ logger.info("Processing multimedia upload %s", record.public_id)
+
+ try:
+ modified = await ensure_faststart_mp4(path, record.mimetype)
+ if modified:
+ logger.info("Applied faststart to upload %s", record.public_id)
+ except Exception:
+ logger.exception("Failed to apply faststart to upload %s", record.public_id)
+
+ ffprobe_data = await extract_ffprobe_metadata(path)
+ if ffprobe_data is not None:
+ if record.meta_data is None:
+ record.meta_data = {}
+
+ record.meta_data["ffprobe"] = ffprobe_data
+ attributes.flag_modified(record, "meta_data")
+ logger.info("Extracted ffprobe metadata for upload %s", record.public_id)
+
+ record.status = "completed"
+ record.completed_at = datetime.now(UTC)
+ await session.commit()
+ logger.info("Completed processing upload %s", record.public_id)
+
+ except Exception:
+ logger.exception("Failed to process upload %s", record.public_id)
+ record.status = "failed"
+ if record.meta_data is None:
+ record.meta_data = {}
+ record.meta_data["error"] = "Post-processing failed"
+ attributes.flag_modified(record, "meta_data")
+ await session.commit()
+ return False
+ else:
+ return True
diff --git a/backend/app/routers/tokens.py b/backend/app/routers/tokens.py
index 45d5254..101b3ac 100644
--- a/backend/app/routers/tokens.py
+++ b/backend/app/routers/tokens.py
@@ -94,14 +94,10 @@ async def create_token(
await db.commit()
await db.refresh(record)
- upload_url = str(request.url_for("health"))
- if upload_token:
- upload_url: str = upload_url.replace("/api/health", f"/t/{upload_token}")
-
return schemas.TokenResponse(
token=upload_token,
download_token=download_token,
- upload_url=upload_url,
+ upload_url=str(request.app.url_path_for("upload_page", token=upload_token)),
expires_at=record.expires_at,
max_uploads=record.max_uploads,
max_size_bytes=record.max_size_bytes,
@@ -116,7 +112,7 @@ async def get_token(
db: Annotated[AsyncSession, Depends(get_db)],
) -> schemas.TokenPublicInfo:
"""
- Get information about an upload token.
+ Get information about an token.
Args:
request (Request): The FastAPI request object.
@@ -124,7 +120,7 @@ async def get_token(
db (AsyncSession): The database session.
Returns:
- TokenPublicInfo: The upload token information.
+ TokenPublicInfo: The upload token information
"""
stmt: Select[tuple[models.UploadToken]] = select(models.UploadToken).where(
@@ -135,18 +131,6 @@ async def get_token(
if not (token_row := res.scalar_one_or_none()):
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Token not found")
- now: datetime = datetime.now(UTC)
- expires_at: datetime = token_row.expires_at
-
- if expires_at.tzinfo is None:
- expires_at = expires_at.replace(tzinfo=UTC)
-
- if token_row.disabled:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Token is disabled")
-
- if expires_at < now:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Token has expired")
-
uploads_stmt: Select[tuple[models.UploadRecord]] = (
select(models.UploadRecord).where(models.UploadRecord.token_id == token_row.id).order_by(models.UploadRecord.created_at.desc())
)
@@ -156,9 +140,9 @@ async def get_token(
uploads_list: list[schemas.UploadRecordResponse] = []
for u in uploads:
item: schemas.UploadRecordResponse = schemas.UploadRecordResponse.model_validate(u, from_attributes=True)
- item.upload_url = str(request.url_for("tus_head", upload_id=u.public_id))
- item.download_url = str(request.url_for("download_file", download_token=token_row.download_token, upload_id=u.public_id))
- item.info_url = str(request.url_for("get_file_info", download_token=token_row.download_token, upload_id=u.public_id))
+ item.upload_url = str(request.app.url_path_for("tus_head", upload_id=u.public_id))
+ item.download_url = str(request.app.url_path_for("download_file", download_token=token_row.download_token, upload_id=u.public_id))
+ item.info_url = str(request.app.url_path_for("get_file_info", download_token=token_row.download_token, upload_id=u.public_id))
uploads_list.append(item)
return schemas.TokenPublicInfo(
@@ -326,9 +310,9 @@ async def list_token_uploads(
uploads_list: list[schemas.UploadRecordResponse] = []
for u in uploads:
item: schemas.UploadRecordResponse = schemas.UploadRecordResponse.model_validate(u, from_attributes=True)
- item.download_url = str(request.url_for("download_file", download_token=token_row.download_token, upload_id=u.public_id))
- item.upload_url = str(request.url_for("tus_head", upload_id=u.public_id))
- item.info_url = str(request.url_for("get_file_info", download_token=token_row.download_token, upload_id=u.public_id))
+ item.download_url = str(request.app.url_path_for("download_file", download_token=token_row.download_token, upload_id=u.public_id))
+ item.upload_url = str(request.app.url_path_for("tus_head", upload_id=u.public_id))
+ item.info_url = str(request.app.url_path_for("get_file_info", download_token=token_row.download_token, upload_id=u.public_id))
uploads_list.append(item)
return uploads_list
@@ -341,7 +325,7 @@ async def get_file_info(
download_token: str,
upload_id: str,
db: Annotated[AsyncSession, Depends(get_db)],
- _: Annotated[bool, Depends(optional_admin_check)],
+ is_admin: Annotated[bool, Depends(optional_admin_check)],
) -> schemas.UploadRecordResponse:
"""
Retrieve metadata about a specific uploaded file.
@@ -351,6 +335,7 @@ async def get_file_info(
download_token (str): The download token associated with the upload.
upload_id (str): The public ID of the upload.
db (AsyncSession): The database session.
+ is_admin (bool): Whether the request is authenticated as admin.
Returns:
UploadRecordResponse: Metadata about the uploaded file.
@@ -362,6 +347,19 @@ async def get_file_info(
if not (token_row := token_res.scalar_one_or_none()):
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Download token not found")
+ # Check token status (admin can bypass)
+ if not is_admin:
+ now: datetime = datetime.now(UTC)
+ expires_at: datetime = token_row.expires_at
+ if expires_at.tzinfo is None:
+ expires_at = expires_at.replace(tzinfo=UTC)
+
+ if expires_at < now:
+ raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Token has expired")
+
+ if token_row.disabled:
+ raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Token is disabled")
+
upload_stmt: Select[tuple[models.UploadRecord]] = select(models.UploadRecord).where(
models.UploadRecord.public_id == upload_id, models.UploadRecord.token_id == token_row.id
)
@@ -378,9 +376,9 @@ async def get_file_info(
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="File missing")
item: schemas.UploadRecordResponse = schemas.UploadRecordResponse.model_validate(record, from_attributes=True)
- item.download_url = str(request.url_for("download_file", download_token=download_token, upload_id=upload_id))
- item.upload_url = str(request.url_for("tus_head", upload_id=upload_id))
- item.info_url = str(request.url_for("get_file_info", download_token=download_token, upload_id=upload_id))
+ item.download_url = str(request.app.url_path_for("download_file", download_token=download_token, upload_id=upload_id))
+ item.upload_url = str(request.app.url_path_for("tus_head", upload_id=upload_id))
+ item.info_url = str(request.app.url_path_for("get_file_info", download_token=download_token, upload_id=upload_id))
return item
@@ -390,7 +388,7 @@ async def download_file(
download_token: str,
upload_id: str,
db: Annotated[AsyncSession, Depends(get_db)],
- _: Annotated[bool, Depends(optional_admin_check)],
+ is_admin: Annotated[bool, Depends(optional_admin_check)],
) -> FileResponse:
"""
Download the file associated with a specific upload.
@@ -399,6 +397,7 @@ async def download_file(
download_token (str): The download token associated with the upload.
upload_id (str): The public ID of the upload.
db (AsyncSession): The database session.
+ is_admin (bool): Whether the request is authenticated as admin.
Returns:
FileResponse: The file response for downloading the file.
@@ -409,6 +408,19 @@ async def download_file(
if not (token_row := token_res.scalar_one_or_none()):
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Download token not found")
+ # Check token status (admin can bypass)
+ if not is_admin:
+ now: datetime = datetime.now(UTC)
+ expires_at: datetime = token_row.expires_at
+ if expires_at.tzinfo is None:
+ expires_at = expires_at.replace(tzinfo=UTC)
+
+ if expires_at < now:
+ raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Token has expired")
+
+ if token_row.disabled:
+ raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Token is disabled")
+
upload_stmt: Select[tuple[models.UploadRecord]] = select(models.UploadRecord).where(
models.UploadRecord.public_id == upload_id, models.UploadRecord.token_id == token_row.id
)
diff --git a/backend/app/routers/uploads.py b/backend/app/routers/uploads.py
index 40825cf..8fc4bca 100644
--- a/backend/app/routers/uploads.py
+++ b/backend/app/routers/uploads.py
@@ -9,14 +9,14 @@
from sqlalchemy import select
from sqlalchemy.engine.result import Result
from sqlalchemy.ext.asyncio import AsyncSession
-from sqlalchemy.orm import attributes
from sqlalchemy.sql.selectable import Select
from backend.app import models, schemas
from backend.app.config import settings
from backend.app.db import get_db
from backend.app.metadata_schema import validate_metadata
-from backend.app.utils import detect_mimetype, extract_ffprobe_metadata, is_multimedia, mime_allowed
+from backend.app.postprocessing import ProcessingQueue
+from backend.app.utils import detect_mimetype, is_multimedia, mime_allowed
if TYPE_CHECKING:
from sqlalchemy.engine.result import Result
@@ -25,19 +25,38 @@
router = APIRouter(prefix="/api/uploads", tags=["uploads"])
-async def _ensure_token(db: AsyncSession, token_value: str) -> models.UploadToken:
+def get_processing_queue(request: Request) -> ProcessingQueue | None:
+ """Get the processing queue from app state (returns None if not available in tests)."""
+ return getattr(request.app.state, "processing_queue", None)
+
+
+async def _ensure_token(
+ db: AsyncSession,
+ token_value: str | None = None,
+ token_id: int | None = None,
+ check_remaining: bool = True,
+) -> models.UploadToken:
"""
- Ensure the upload token is valid, not expired or disabled, and has remaining uploads.
+ Ensure the upload token is valid, not expired or disabled, and optionally has remaining uploads.
Args:
db (AsyncSession): Database session.
- token_value (str): The upload token string.
+ token_value (str | None): The upload token string.
+ token_id (int | None): The upload token ID.
+ check_remaining (bool): Whether to check remaining uploads. Defaults to True.
Returns:
UploadToken: The valid upload token object.
"""
- stmt: Select[tuple[models.UploadToken]] = select(models.UploadToken).where(models.UploadToken.token == token_value)
+ if token_value:
+ stmt: Select[tuple[models.UploadToken]] = select(models.UploadToken).where(models.UploadToken.token == token_value)
+ elif token_id:
+ stmt: Select[tuple[models.UploadToken]] = select(models.UploadToken).where(models.UploadToken.id == token_id)
+ else:
+ msg = "Either token_value or token_id must be provided"
+ raise ValueError(msg)
+
res: Result[tuple[models.UploadToken]] = await db.execute(stmt)
if not (token := res.scalar_one_or_none()):
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Token not found")
@@ -48,10 +67,12 @@ async def _ensure_token(db: AsyncSession, token_value: str) -> models.UploadToke
if expires_at.tzinfo is None:
expires_at = expires_at.replace(tzinfo=UTC)
- if token.disabled or expires_at < now:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Token expired or disabled")
+ if expires_at < now:
+ raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Token expired")
+ if token.disabled:
+ raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Token disabled")
- if token.remaining_uploads <= 0:
+ if check_remaining and token.remaining_uploads <= 0:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Upload limit reached")
return token
@@ -144,8 +165,8 @@ async def initiate_upload(
return schemas.InitiateUploadResponse(
upload_id=record.public_id,
- upload_url=str(request.url_for("tus_head", upload_id=record.public_id)),
- download_url=str(request.url_for("download_file", download_token=token_row.download_token, upload_id=record.public_id)),
+ upload_url=str(request.app.url_path_for("tus_head", upload_id=record.public_id)),
+ download_url=str(request.app.url_path_for("download_file", download_token=token_row.download_token, upload_id=record.public_id)),
meta_data=cleaned_metadata,
allowed_mime=token_row.allowed_mime,
remaining_uploads=token_row.remaining_uploads,
@@ -166,6 +187,7 @@ async def tus_head(upload_id: str, db: Annotated[AsyncSession, Depends(get_db)])
"""
record: models.UploadRecord = await _get_upload_record(db, upload_id)
+ await _ensure_token(db, token_id=record.token_id, check_remaining=False)
if record.upload_length is None:
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail="Upload length unknown")
@@ -185,6 +207,7 @@ async def tus_patch(
upload_id: str,
request: Request,
db: Annotated[AsyncSession, Depends(get_db)],
+ queue: Annotated[ProcessingQueue | None, Depends(get_processing_queue)],
upload_offset: Annotated[int, Header(convert_underscores=False, alias="Upload-Offset")] = ...,
content_length: Annotated[int | None, Header()] = None,
content_type: Annotated[str, Header(convert_underscores=False, alias="Content-Type")] = ...,
@@ -196,6 +219,7 @@ async def tus_patch(
upload_id (str): The public ID of the upload.
request (Request): The incoming HTTP request.
db (AsyncSession): Database session.
+ queue (ProcessingQueue | None): The processing queue for post-processing.
upload_offset (int): The current upload offset from the client.
content_length (int | None): The Content-Length header value.
content_type (str): The Content-Type header value.
@@ -216,6 +240,7 @@ async def tus_patch(
)
record: models.UploadRecord = await _get_upload_record(db, upload_id)
+ await _ensure_token(db, token_id=record.token_id, check_remaining=False)
if record.upload_length is None:
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail="Upload length unknown")
@@ -270,25 +295,25 @@ async def tus_patch(
record.mimetype = actual_mimetype
if is_multimedia(actual_mimetype):
- ffprobe_data: dict | None = await extract_ffprobe_metadata(path)
- if ffprobe_data is not None:
- if record.meta_data is None:
- record.meta_data = {}
-
- record.meta_data["ffprobe"] = ffprobe_data
- attributes.flag_modified(record, "meta_data")
-
- record.status = "completed"
- record.completed_at = datetime.now(UTC)
+ record.status = "postprocessing"
+ await db.commit()
+ await db.refresh(record)
+ if queue:
+ await queue.enqueue(record.public_id)
+ else:
+ record.status = "completed"
+ record.completed_at = datetime.now(UTC)
+ await db.commit()
+ await db.refresh(record)
else:
record.status = "in_progress"
- try:
- await db.commit()
- await db.refresh(record)
- except Exception:
- await db.rollback()
- await db.refresh(record)
+ try:
+ await db.commit()
+ await db.refresh(record)
+ except Exception:
+ await db.rollback()
+ await db.refresh(record)
return Response(
status_code=status.HTTP_204_NO_CONTENT,
@@ -327,6 +352,8 @@ async def tus_delete(upload_id: str, db: Annotated[AsyncSession, Depends(get_db)
"""
record: models.UploadRecord = await _get_upload_record(db, upload_id)
+ await _ensure_token(db, token_id=record.token_id, check_remaining=False)
+
path = Path(record.storage_path or "")
if path.exists():
@@ -351,11 +378,8 @@ async def mark_complete(upload_id: str, db: Annotated[AsyncSession, Depends(get_
UploadRecord: The updated upload record.
"""
- stmt: Select[tuple[models.UploadRecord]] = select(models.UploadRecord).where(models.UploadRecord.public_id == upload_id)
- res: Result[tuple[models.UploadRecord]] = await db.execute(stmt)
-
- if not (record := res.scalar_one_or_none()):
- raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Upload not found")
+ record: models.UploadRecord = await _get_upload_record(db, upload_id)
+ await _ensure_token(db, token_id=record.token_id, check_remaining=False)
record.status = "completed"
record.completed_at = datetime.now(UTC)
@@ -383,12 +407,7 @@ async def cancel_upload(
"""
record: models.UploadRecord = await _get_upload_record(db, upload_id)
-
- stmt: Select[tuple[models.UploadToken]] = select(models.UploadToken).where(models.UploadToken.token == token)
- res: Result[tuple[models.UploadToken]] = await db.execute(stmt)
-
- if not (token_row := res.scalar_one_or_none()):
- raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Token not found")
+ token_row: models.UploadToken = await _ensure_token(db, token_value=token, check_remaining=False)
if record.token_id != token_row.id:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Upload does not belong to this token")
diff --git a/backend/app/templates/share_preview.html b/backend/app/templates/share_preview.html
new file mode 100644
index 0000000..e8e7dd1
--- /dev/null
+++ b/backend/app/templates/share_preview.html
@@ -0,0 +1,360 @@
+
+
+
+
+
+
+ {{ title }}
+
+
+
+
+ {% if description %}
+
+ {% endif %}
+
+ {% if is_video %}
+
+
+
+
+ {% if width %}
+
+ {% endif %}
+ {% if height %}
+
+ {% endif %}
+ {% if duration %}
+
+ {% endif %}
+ {% elif is_audio %}
+
+
+
+
+ {% endif %}
+
+
+
+ {% if description %}
+
+ {% endif %}
+
+
+
+
+
+
+
+
+
+
+ {% if is_video %}
+
+ {% elif is_audio %}
+
+ {% endif %}
+
+
+
+
+
File Information
+
+ {% if mime_type %}
+
+ Type
+ {{ mime_type }}
+
+ {% endif %}
+ {% if file_size %}
+
+ Size
+ {{ file_size }}
+
+ {% endif %}
+ {% if duration_formatted %}
+
+ Duration
+ {{ duration_formatted }}
+
+ {% endif %}
+ {% if width and height %}
+
+ Resolution
+ {{ width }} × {{ height }}
+
+ {% endif %}
+
+
+
+ {% if other_files %}
+
+
+
+ {% for file in other_files %}
+
+
+
{{ file.name }}
+
{{ file.size }}
+
+
+ {% endfor %}
+
+
+ {% endif %}
+
+
+
+
\ No newline at end of file
diff --git a/backend/app/utils.py b/backend/app/utils.py
index 5ea265d..5f2dba4 100644
--- a/backend/app/utils.py
+++ b/backend/app/utils.py
@@ -1,9 +1,13 @@
"""Utility functions for file handling and validation."""
import asyncio
+import contextlib
import json
+import os
+import tempfile
from pathlib import Path
+import aiofiles
import magic
MIME = magic.Magic(mime=True)
@@ -151,3 +155,185 @@ def parse_size(text: str) -> int:
return int(num * MULTIPLIERS[unit])
return int(s)
+
+
+def extract_video_metadata(ffprobe_data: dict | None) -> dict:
+ """
+ Extract video metadata (width, height, duration) from ffprobe JSON output.
+
+ Args:
+ ffprobe_data: ffprobe JSON output dictionary
+
+ Returns:
+ Dictionary with width, height, duration keys (values may be None)
+
+ """
+ result = {"width": None, "height": None, "duration": None}
+
+ if not ffprobe_data:
+ return result
+
+ if "format" in ffprobe_data and "duration" in ffprobe_data["format"]:
+ with contextlib.suppress(ValueError, TypeError):
+ result["duration"] = int(float(ffprobe_data["format"]["duration"]))
+
+ if "streams" in ffprobe_data:
+ for stream in ffprobe_data["streams"]:
+ if stream.get("codec_type") == "video":
+ result["width"] = stream.get("width")
+ result["height"] = stream.get("height")
+ break
+
+ return result
+
+
+def format_file_size(size_bytes: int) -> str:
+ """
+ Format file size in bytes to human-readable string.
+
+ Args:
+ size_bytes: File size in bytes
+
+ Returns:
+ Formatted string like "1.5 MB", "500 KB", etc.
+
+ """
+ for unit in ["B", "KB", "MB", "GB", "TB"]:
+ if size_bytes < 1024.0:
+ return f"{size_bytes:.1f} {unit}"
+ size_bytes /= 1024.0
+ return f"{size_bytes:.1f} PB"
+
+
+def format_duration(seconds: int) -> str:
+ """
+ Format duration in seconds to HH:MM:SS or MM:SS string.
+
+ Args:
+ seconds: Duration in seconds
+
+ Returns:
+ Formatted time string
+
+ """
+ hours, remainder = divmod(seconds, 3600)
+ minutes, secs = divmod(remainder, 60)
+
+ return f"{hours:02d}:{minutes:02d}:{secs:02d}" if hours > 0 else f"{minutes:02d}:{secs:02d}"
+
+
+async def _needs_faststart(path: str | Path, *, scan_bytes: int = 8 * 1024 * 1024) -> bool:
+ """
+ Check if an MP4 file needs 'faststart' (moov atom at the beginning).
+
+ Args:
+ path (str | Path): Path to the MP4 file to check
+ scan_bytes (int): Number of bytes to scan for moov/mdat atoms
+
+ Returns:
+ bool: True if faststart is needed, False otherwise
+
+ """
+ p = Path(path)
+
+ async with aiofiles.open(p, "rb") as f:
+ data = await f.read(scan_bytes)
+
+ moov = data.find(b"moov")
+ mdat = data.find(b"mdat")
+
+ if -1 == moov:
+ return True
+
+ if -1 == mdat:
+ return False
+
+ return moov > mdat
+
+
+async def ensure_faststart_mp4(
+ mp4_path: str | Path,
+ mimetype: str,
+ *,
+ ffmpeg_bin: str = "ffmpeg",
+ scan_bytes: int = 8 * 1024 * 1024,
+) -> bool:
+ """
+ Ensure that an MP4 file has 'faststart' enabled (moov atom at the beginning).
+
+ Args:
+ mp4_path (str | Path): Path to the MP4 file to process
+ mimetype (str): MIME type of the file
+ ffmpeg_bin (str): Path to the ffmpeg binary
+ scan_bytes (int): Number of bytes to scan for moov/mdat atoms
+
+ Returns:
+ bool: True if the file was modified to enable faststart, False otherwise
+
+ Raises:
+ FileNotFoundError: If the input file does not exist
+ RuntimeError: If ffmpeg fails to process the file
+
+ """
+ src = Path(mp4_path)
+ if not src.exists():
+ raise FileNotFoundError(src)
+
+ mime = mimetype.strip().lower()
+ if mime not in ("video/mp4", "video/quicktime"):
+ return False
+
+ if not await _needs_faststart(src, scan_bytes=scan_bytes):
+ return False
+
+ tmp_dir = src.parent
+ fd, tmp_out = tempfile.mkstemp(
+ prefix=src.name + ".",
+ suffix=".faststart.tmp",
+ dir=tmp_dir,
+ )
+ os.close(fd)
+ tmp_out_path = Path(tmp_out)
+
+ try:
+ cmd = [
+ ffmpeg_bin,
+ "-hide_banner",
+ "-loglevel",
+ "error",
+ "-y",
+ "-i",
+ str(src),
+ "-c",
+ "copy",
+ "-movflags",
+ "+faststart",
+ str(tmp_out_path),
+ ]
+
+ proc = await asyncio.create_subprocess_exec(
+ *cmd,
+ stdout=asyncio.subprocess.PIPE,
+ stderr=asyncio.subprocess.PIPE,
+ )
+ out, err = await proc.communicate()
+
+ if proc.returncode != 0:
+ msg = (
+ f"ffmpeg faststart failed (rc={proc.returncode}).\n"
+ f"stdout:\n{out.decode(errors='replace')}\n"
+ f"stderr:\n{err.decode(errors='replace')}"
+ )
+ raise RuntimeError(msg)
+
+ if not tmp_out_path.exists() or tmp_out_path.stat().st_size == 0:
+ msg = "ffmpeg produced an empty output file"
+ raise RuntimeError(msg)
+
+ tmp_out_path.replace(src)
+ return True
+
+ finally:
+ with contextlib.suppress(Exception):
+ if tmp_out_path.exists():
+ tmp_out_path.unlink()
diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py
index 88c1055..6e96805 100644
--- a/backend/tests/conftest.py
+++ b/backend/tests/conftest.py
@@ -5,19 +5,21 @@
from importlib import reload
from pathlib import Path
+import pytest
+from httpx import ASGITransport, AsyncClient
+
TEST_CONFIG_DIR = tempfile.mkdtemp(prefix="fbc-test-config-")
TEST_STORAGE_DIR = tempfile.mkdtemp(prefix="fbc-test-storage-")
-
+TEST_FRONTEND_DIR = tempfile.mkdtemp(prefix="fbc-test-frontend-")
os.environ["FBC_CONFIG_PATH"] = TEST_CONFIG_DIR
os.environ["FBC_STORAGE_PATH"] = TEST_STORAGE_DIR
+os.environ["FBC_FRONTEND_EXPORT_PATH"] = TEST_FRONTEND_DIR
os.environ["FBC_DATABASE_URL"] = "sqlite+aiosqlite:///:memory:"
os.environ["FBC_ADMIN_API_KEY"] = "test-admin"
os.environ["FBC_SKIP_MIGRATIONS"] = "1"
os.environ["FBC_SKIP_CLEANUP"] = "1"
-import pytest
-
ROOT = Path(__file__).resolve().parent.parent.parent
if str(ROOT) not in sys.path:
sys.path.insert(0, str(ROOT))
@@ -25,6 +27,7 @@
import backend.app.config as config_module
from backend.app import metadata_schema
from backend.app.config import Settings
+from backend.app.postprocessing import ProcessingQueue
config_module.settings = Settings()
@@ -70,6 +73,40 @@ async def setup_db():
await engine.dispose()
+@pytest.fixture(scope="session", autouse=True)
+def setup_frontend():
+ """Create minimal frontend structure for tests."""
+ frontend_dir = Path(TEST_FRONTEND_DIR)
+ frontend_dir.mkdir(parents=True, exist_ok=True)
+ index_html = frontend_dir / "index.html"
+ index_html.write_text("Test")
+ yield
+ import shutil
+
+ shutil.rmtree(TEST_FRONTEND_DIR, ignore_errors=True)
+
+
+@pytest.fixture
+async def processing_queue():
+ """Create a fresh processing queue for each test."""
+ queue = ProcessingQueue()
+ queue.start_worker()
+ yield queue
+ await queue.stop_worker()
+
+
+@pytest.fixture
+async def client(processing_queue):
+ """Create an HTTP client with overridden dependencies."""
+ from backend.app.main import app
+
+ app.state.processing_queue = processing_queue
+
+ transport = ASGITransport(app=app)
+ async with AsyncClient(transport=transport, base_url="http://testserver") as client:
+ yield client
+
+
def seed_schema(fields: list[dict] | None = None) -> Path:
"""Seed test metadata schema with provided or default fields."""
if fields is None:
diff --git a/backend/tests/test_download_restrictions.py b/backend/tests/test_download_restrictions.py
new file mode 100644
index 0000000..3873d18
--- /dev/null
+++ b/backend/tests/test_download_restrictions.py
@@ -0,0 +1,133 @@
+"""Test download restrictions for expired/disabled tokens."""
+
+import pytest
+from datetime import UTC, datetime, timedelta
+from fastapi import status
+from unittest.mock import patch
+
+from backend.app.main import app
+from backend.app.config import settings
+from backend.tests.utils import create_token, initiate_upload, upload_file_via_tus
+
+
+@pytest.mark.asyncio
+async def test_download_blocked_for_disabled_token(client):
+ """Test that downloads are blocked when token is disabled and public downloads are off."""
+ with patch("backend.app.security.settings.allow_public_downloads", False):
+ token_data = await create_token(client, max_uploads=1)
+ upload_token = token_data["token"]
+ download_token = token_data["download_token"]
+
+ upload_data = await initiate_upload(client, upload_token, "test.txt", 12)
+ upload_id = upload_data["upload_id"]
+ await upload_file_via_tus(client, upload_id, b"test content")
+
+ await client.patch(
+ app.url_path_for("update_token", token_value=upload_token),
+ json={"disabled": True},
+ headers={"Authorization": f"Bearer {settings.admin_api_key}"},
+ )
+
+ download_url = app.url_path_for("download_file", download_token=download_token, upload_id=upload_id)
+ response = await client.get(download_url)
+ assert response.status_code in [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN], (
+ "Download should be blocked for disabled token without auth"
+ )
+
+
+@pytest.mark.asyncio
+async def test_download_blocked_for_expired_token(client):
+ """Test that downloads are blocked when token is expired and public downloads are off."""
+ with patch("backend.app.security.settings.allow_public_downloads", False):
+ token_data = await create_token(client, max_uploads=1)
+ upload_token = token_data["token"]
+ download_token = token_data["download_token"]
+
+ upload_data = await initiate_upload(client, upload_token, "test.txt", 12)
+ upload_id = upload_data["upload_id"]
+ await upload_file_via_tus(client, upload_id, b"test content")
+
+ expired_time = datetime.now(UTC) - timedelta(hours=1)
+ await client.patch(
+ app.url_path_for("update_token", token_value=upload_token),
+ json={"expiry_datetime": expired_time.isoformat()},
+ headers={"Authorization": f"Bearer {settings.admin_api_key}"},
+ )
+
+ download_url = app.url_path_for("download_file", download_token=download_token, upload_id=upload_id)
+ response = await client.get(download_url)
+ assert response.status_code in [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN], (
+ "Download should be blocked for expired token without auth"
+ )
+
+
+@pytest.mark.asyncio
+async def test_download_allowed_for_disabled_token_with_admin_key(client):
+ """Test that admin can download from disabled tokens."""
+ token_data = await create_token(client, max_uploads=1)
+ upload_token = token_data["token"]
+ download_token = token_data["download_token"]
+
+ upload_data = await initiate_upload(client, upload_token, "test.txt", 12)
+ upload_id = upload_data["upload_id"]
+ await upload_file_via_tus(client, upload_id, b"test content")
+
+ await client.patch(
+ app.url_path_for("update_token", token_value=upload_token),
+ json={"disabled": True},
+ headers={"Authorization": f"Bearer {settings.admin_api_key}"},
+ )
+
+ download_url = app.url_path_for("download_file", download_token=download_token, upload_id=upload_id)
+ response = await client.get(download_url, headers={"Authorization": f"Bearer {settings.admin_api_key}"})
+ assert response.status_code == status.HTTP_200_OK, "Admin should be able to download from disabled token"
+ assert response.content == b"test content", "Downloaded content should match"
+
+
+@pytest.mark.asyncio
+async def test_get_file_info_blocked_for_disabled_token(client):
+ """Test that file info is blocked when token is disabled and public downloads are off."""
+ with patch("backend.app.security.settings.allow_public_downloads", False):
+ token_data = await create_token(client, max_uploads=1)
+ upload_token = token_data["token"]
+ download_token = token_data["download_token"]
+
+ upload_data = await initiate_upload(client, upload_token, "test.txt", 12)
+ upload_id = upload_data["upload_id"]
+ await upload_file_via_tus(client, upload_id, b"test content")
+
+ await client.patch(
+ app.url_path_for("update_token", token_value=upload_token),
+ json={"disabled": True},
+ headers={"Authorization": f"Bearer {settings.admin_api_key}"},
+ )
+
+ info_url = app.url_path_for("get_file_info", download_token=download_token, upload_id=upload_id)
+ response = await client.get(info_url)
+ assert response.status_code in [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN], (
+ "File info should be blocked for disabled token without auth"
+ )
+
+
+@pytest.mark.asyncio
+async def test_get_file_info_allowed_for_disabled_token_with_admin_key(client):
+ """Test that admin can get file info from disabled tokens."""
+ token_data = await create_token(client, max_uploads=1)
+ upload_token = token_data["token"]
+ download_token = token_data["download_token"]
+
+ upload_data = await initiate_upload(client, upload_token, "test.txt", 12)
+ upload_id = upload_data["upload_id"]
+ await upload_file_via_tus(client, upload_id, b"test content")
+
+ await client.patch(
+ app.url_path_for("update_token", token_value=upload_token),
+ json={"disabled": True},
+ headers={"Authorization": f"Bearer {settings.admin_api_key}"},
+ )
+
+ info_url = app.url_path_for("get_file_info", download_token=download_token, upload_id=upload_id)
+ response = await client.get(info_url, headers={"Authorization": f"Bearer {settings.admin_api_key}"})
+ assert response.status_code == status.HTTP_200_OK, "Admin should be able to get file info from disabled token"
+ data = response.json()
+ assert data["filename"] == "test.txt", "File info should be returned"
diff --git a/backend/tests/test_faststart.py b/backend/tests/test_faststart.py
new file mode 100644
index 0000000..700b2e1
--- /dev/null
+++ b/backend/tests/test_faststart.py
@@ -0,0 +1,93 @@
+"""Tests for MP4 faststart post-processing."""
+
+import tempfile
+from pathlib import Path
+
+import pytest
+
+from backend.app.utils import _needs_faststart, ensure_faststart_mp4
+
+
+@pytest.mark.asyncio
+async def test_not_mp4_returns_false():
+ """Test that non-MP4 files return False without processing."""
+ with tempfile.NamedTemporaryFile(mode="w", suffix=".txt", delete=False) as f:
+ f.write("not a video file")
+ f.flush()
+ path = Path(f.name)
+
+ try:
+ modified = await ensure_faststart_mp4(path, "text/plain")
+ assert modified is False, "Non-MP4 file should not be modified"
+ finally:
+ path.unlink(missing_ok=True)
+
+
+@pytest.mark.asyncio
+async def test_needs_faststart_non_mp4():
+ """Test faststart check on non-MP4 file doesn't crash."""
+ with tempfile.NamedTemporaryFile(mode="wb", suffix=".bin", delete=False) as f:
+ f.write(b"some random binary data" * 1000)
+ f.flush()
+ path = Path(f.name)
+
+ try:
+ result = await _needs_faststart(path)
+ assert isinstance(result, bool), "Function should return a boolean"
+ finally:
+ path.unlink(missing_ok=True)
+
+
+@pytest.mark.asyncio
+async def test_needs_faststart_empty_file():
+ """Test faststart check on empty file."""
+ with tempfile.NamedTemporaryFile(mode="wb", suffix=".mp4", delete=False) as f:
+ path = Path(f.name)
+
+ try:
+ result = await _needs_faststart(path)
+ assert result is True, "Empty file should need faststart (moov not found)"
+ finally:
+ path.unlink(missing_ok=True)
+
+
+@pytest.mark.asyncio
+async def test_needs_faststart_moov_before_mdat():
+ """Test that file with moov before mdat doesn't need faststart."""
+ with tempfile.NamedTemporaryFile(mode="wb", suffix=".mp4", delete=False) as f:
+ f.write(b"ftyp" + b"\x00" * 100)
+ f.write(b"moov" + b"\x00" * 1000)
+ f.write(b"mdat" + b"\x00" * 5000)
+ f.flush()
+ path = Path(f.name)
+
+ try:
+ result = await _needs_faststart(path)
+ assert result is False, "File with moov before mdat should not need faststart"
+ finally:
+ path.unlink(missing_ok=True)
+
+
+@pytest.mark.asyncio
+async def test_needs_faststart_mdat_before_moov():
+ """Test that file with mdat before moov needs faststart."""
+ with tempfile.NamedTemporaryFile(mode="wb", suffix=".mp4", delete=False) as f:
+ f.write(b"ftyp" + b"\x00" * 100)
+ f.write(b"mdat" + b"\x00" * 5000)
+ f.write(b"moov" + b"\x00" * 1000)
+ f.flush()
+ path = Path(f.name)
+
+ try:
+ result = await _needs_faststart(path)
+ assert result is True, "File with mdat before moov should need faststart"
+ finally:
+ path.unlink(missing_ok=True)
+
+
+@pytest.mark.asyncio
+async def test_ensure_faststart_file_not_found():
+ """Test that missing file raises FileNotFoundError."""
+ nonexistent = Path("/tmp/does_not_exist_12345.mp4")
+ with pytest.raises(FileNotFoundError):
+ await ensure_faststart_mp4(nonexistent, "video/mp4")
diff --git a/backend/tests/test_mimetype_validation.py b/backend/tests/test_mimetype_validation.py
index 54bd983..6223cd3 100644
--- a/backend/tests/test_mimetype_validation.py
+++ b/backend/tests/test_mimetype_validation.py
@@ -5,7 +5,6 @@
from fastapi import status
import pytest
-from httpx import ASGITransport, AsyncClient
from sqlalchemy import select
from backend.app import models
@@ -15,270 +14,269 @@
@pytest.mark.asyncio
-async def test_mimetype_spoofing_rejected():
+async def test_mimetype_spoofing_rejected(client):
"""Test that files with fake mimetypes are rejected after upload."""
- async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client:
- resp = await client.post(
- app.url_path_for("create_token"),
- json={
- "label": "Video Only",
- "max_size_bytes": 1_000_000,
- "expires_in_days": 1,
- "allowed_mime": ["video/*"],
- },
- headers={"Authorization": f"Bearer {settings.admin_api_key}"},
- )
- assert resp.status_code == status.HTTP_201_CREATED, "Token creation should return 201"
- token_data = resp.json()
- token_value = token_data["token"]
-
- fake_video = b"This is actually a text file, not a video!"
-
- init_resp = await client.post(
- app.url_path_for("initiate_upload"),
- json={
- "filename": "test.mp4",
- "filetype": "video/mp4",
- "size_bytes": len(fake_video),
- "meta_data": {},
- },
- params={"token": token_value},
- )
- assert init_resp.status_code == status.HTTP_201_CREATED, "Upload initiation should return 201"
- upload_data = init_resp.json()
- upload_id = upload_data["upload_id"]
-
- patch_resp = await client.patch(
- app.url_path_for("tus_patch", upload_id=upload_id),
- content=fake_video,
- headers={
- "Content-Type": "application/offset+octet-stream",
- "Upload-Offset": "0",
- "Content-Length": str(len(fake_video)),
- },
- )
-
- assert patch_resp.status_code == status.HTTP_415_UNSUPPORTED_MEDIA_TYPE, "Fake video file should be rejected with 415"
- assert "does not match allowed types" in patch_resp.json()["detail"], "Error should indicate type mismatch"
-
- head_resp = await client.head(app.url_path_for("tus_head", upload_id=upload_id))
- assert head_resp.status_code == status.HTTP_404_NOT_FOUND, "Rejected upload should be removed"
+ resp = await client.post(
+ app.url_path_for("create_token"),
+ json={
+ "label": "Video Only",
+ "max_size_bytes": 1_000_000,
+ "expires_in_days": 1,
+ "allowed_mime": ["video/*"],
+ },
+ headers={"Authorization": f"Bearer {settings.admin_api_key}"},
+ )
+ assert resp.status_code == status.HTTP_201_CREATED, "Token creation should return 201"
+ token_data = resp.json()
+ token_value = token_data["token"]
+
+ fake_video = b"This is actually a text file, not a video!"
+
+ init_resp = await client.post(
+ app.url_path_for("initiate_upload"),
+ json={
+ "filename": "test.mp4",
+ "filetype": "video/mp4",
+ "size_bytes": len(fake_video),
+ "meta_data": {},
+ },
+ params={"token": token_value},
+ )
+ assert init_resp.status_code == status.HTTP_201_CREATED, "Upload initiation should return 201"
+ upload_data = init_resp.json()
+ upload_id = upload_data["upload_id"]
+
+ patch_resp = await client.patch(
+ app.url_path_for("tus_patch", upload_id=upload_id),
+ content=fake_video,
+ headers={
+ "Content-Type": "application/offset+octet-stream",
+ "Upload-Offset": "0",
+ "Content-Length": str(len(fake_video)),
+ },
+ )
+
+ assert patch_resp.status_code == status.HTTP_415_UNSUPPORTED_MEDIA_TYPE, "Fake video file should be rejected with 415"
+ assert "does not match allowed types" in patch_resp.json()["detail"], "Error should indicate type mismatch"
+
+ head_resp = await client.head(app.url_path_for("tus_head", upload_id=upload_id))
+ assert head_resp.status_code == status.HTTP_404_NOT_FOUND, "Rejected upload should be removed"
@pytest.mark.asyncio
-async def test_valid_mimetype_accepted():
+async def test_valid_mimetype_accepted(client):
"""Test that files with correct mimetypes are accepted."""
- async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client:
- resp = await client.post(
- app.url_path_for("create_token"),
- json={
- "label": "Text Only",
- "max_size_bytes": 1_000_000,
- "expires_in_days": 1,
- "allowed_mime": ["text/*"],
- },
- headers={"Authorization": f"Bearer {settings.admin_api_key}"},
- )
- assert resp.status_code == status.HTTP_201_CREATED, "Token creation should return 201"
- token_data = resp.json()
- token_value = token_data["token"]
-
- init_resp = await client.post(
- app.url_path_for("initiate_upload"),
- json={
- "filename": "test.txt",
- "filetype": "text/plain",
- "size_bytes": 20,
- "meta_data": {},
- },
- params={"token": token_value},
- )
- assert init_resp.status_code == status.HTTP_201_CREATED, "Upload initiation should return 201"
- upload_data = init_resp.json()
- upload_id = upload_data["upload_id"]
-
- text_content = b"This is a text file."
-
- head_resp = await client.head(app.url_path_for("tus_head", upload_id=upload_id))
- assert head_resp.status_code == status.HTTP_200_OK, "TUS HEAD should return 200"
-
- patch_resp = await client.patch(
- app.url_path_for("tus_patch", upload_id=upload_id),
- content=text_content,
- headers={
- "Content-Type": "application/offset+octet-stream",
- "Upload-Offset": "0",
- "Content-Length": str(len(text_content)),
- },
- )
-
- assert patch_resp.status_code == status.HTTP_204_NO_CONTENT, "Valid text file should be accepted"
-
- head_resp = await client.head(app.url_path_for("tus_head", upload_id=upload_id))
- assert head_resp.status_code == status.HTTP_200_OK, "Upload should still exist after completion"
+ resp = await client.post(
+ app.url_path_for("create_token"),
+ json={
+ "label": "Text Only",
+ "max_size_bytes": 1_000_000,
+ "expires_in_days": 1,
+ "allowed_mime": ["text/*"],
+ },
+ headers={"Authorization": f"Bearer {settings.admin_api_key}"},
+ )
+ assert resp.status_code == status.HTTP_201_CREATED, "Token creation should return 201"
+ token_data = resp.json()
+ token_value = token_data["token"]
+
+ init_resp = await client.post(
+ app.url_path_for("initiate_upload"),
+ json={
+ "filename": "test.txt",
+ "filetype": "text/plain",
+ "size_bytes": 20,
+ "meta_data": {},
+ },
+ params={"token": token_value},
+ )
+ assert init_resp.status_code == status.HTTP_201_CREATED, "Upload initiation should return 201"
+ upload_data = init_resp.json()
+ upload_id = upload_data["upload_id"]
+
+ text_content = b"This is a text file."
+
+ head_resp = await client.head(app.url_path_for("tus_head", upload_id=upload_id))
+ assert head_resp.status_code == status.HTTP_200_OK, "TUS HEAD should return 200"
+
+ patch_resp = await client.patch(
+ app.url_path_for("tus_patch", upload_id=upload_id),
+ content=text_content,
+ headers={
+ "Content-Type": "application/offset+octet-stream",
+ "Upload-Offset": "0",
+ "Content-Length": str(len(text_content)),
+ },
+ )
+
+ assert patch_resp.status_code == status.HTTP_204_NO_CONTENT, "Valid text file should be accepted"
+
+ head_resp = await client.head(app.url_path_for("tus_head", upload_id=upload_id))
+ assert head_resp.status_code == status.HTTP_200_OK, "Upload should still exist after completion"
@pytest.mark.asyncio
-async def test_mimetype_updated_on_completion():
+async def test_mimetype_updated_on_completion(client):
"""Test that mimetype is updated with detected value on completion."""
- async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client:
- resp = await client.post(
- app.url_path_for("create_token"),
- json={
- "label": "Unrestricted",
- "max_size_bytes": 1_000_000,
- "expires_in_days": 1,
- },
- headers={"Authorization": f"Bearer {settings.admin_api_key}"},
- )
- assert resp.status_code == status.HTTP_201_CREATED, "Token creation should return 201"
- token_data = resp.json()
- token_value = token_data["token"]
-
- init_resp = await client.post(
- app.url_path_for("initiate_upload"),
- json={
- "filename": "test.txt",
- "filetype": "application/octet-stream",
- "size_bytes": 20,
- "meta_data": {},
- },
- params={"token": token_value},
- )
- assert init_resp.status_code == status.HTTP_201_CREATED, "Upload initiation should return 201"
- upload_data = init_resp.json()
- upload_id = upload_data["upload_id"]
-
- text_content = b"This is a text file."
-
- patch_resp = await client.patch(
- app.url_path_for("tus_patch", upload_id=upload_id),
- content=text_content,
- headers={
- "Content-Type": "application/offset+octet-stream",
- "Upload-Offset": "0",
- "Content-Length": str(len(text_content)),
- },
- )
- assert patch_resp.status_code == status.HTTP_204_NO_CONTENT, "Upload completion should return 204"
-
- async with SessionLocal() as session:
- stmt = select(models.UploadRecord).where(models.UploadRecord.public_id == upload_id)
- res = await session.execute(stmt)
- upload = res.scalar_one_or_none()
- assert upload is not None, "Upload record should exist"
- assert upload.mimetype.startswith("text/"), "Mimetype should be detected as text"
+ resp = await client.post(
+ app.url_path_for("create_token"),
+ json={
+ "label": "Unrestricted",
+ "max_size_bytes": 1_000_000,
+ "expires_in_days": 1,
+ },
+ headers={"Authorization": f"Bearer {settings.admin_api_key}"},
+ )
+ assert resp.status_code == status.HTTP_201_CREATED, "Token creation should return 201"
+ token_data = resp.json()
+ token_value = token_data["token"]
+
+ init_resp = await client.post(
+ app.url_path_for("initiate_upload"),
+ json={
+ "filename": "test.txt",
+ "filetype": "application/octet-stream",
+ "size_bytes": 20,
+ "meta_data": {},
+ },
+ params={"token": token_value},
+ )
+ assert init_resp.status_code == status.HTTP_201_CREATED, "Upload initiation should return 201"
+ upload_data = init_resp.json()
+ upload_id = upload_data["upload_id"]
+
+ text_content = b"This is a text file."
+
+ patch_resp = await client.patch(
+ app.url_path_for("tus_patch", upload_id=upload_id),
+ content=text_content,
+ headers={
+ "Content-Type": "application/offset+octet-stream",
+ "Upload-Offset": "0",
+ "Content-Length": str(len(text_content)),
+ },
+ )
+ assert patch_resp.status_code == status.HTTP_204_NO_CONTENT, "Upload completion should return 204"
+
+ async with SessionLocal() as session:
+ stmt = select(models.UploadRecord).where(models.UploadRecord.public_id == upload_id)
+ res = await session.execute(stmt)
+ upload = res.scalar_one_or_none()
+ assert upload is not None, "Upload record should exist"
+ assert upload.mimetype.startswith("text/"), "Mimetype should be detected as text"
@pytest.mark.asyncio
@pytest.mark.skipif(shutil.which("ffprobe") is None, reason="ffprobe not available")
-async def test_ffprobe_extracts_metadata_for_video():
+async def test_ffprobe_extracts_metadata_for_video(client):
"""Test that ffprobe metadata is extracted for video files."""
- async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client:
- resp = await client.post(
- app.url_path_for("create_token"),
- json={
- "label": "Video Upload",
- "max_size_bytes": 50_000_000,
- "expires_in_days": 1,
- "allowed_mime": ["video/*"],
- },
- headers={"Authorization": f"Bearer {settings.admin_api_key}"},
- )
- assert resp.status_code == status.HTTP_201_CREATED, "Token creation should return 201"
- token_data = resp.json()
- token_value = token_data["token"]
-
- file = Path(__file__).parent / "fixtures" / "sample.mp4"
-
- init_resp = await client.post(
- app.url_path_for("initiate_upload"),
- json={
- "filename": "sample.mp4",
- "filetype": "video/mp4",
- "size_bytes": file.stat().st_size,
- "meta_data": {},
- },
- params={"token": token_value},
- )
- assert init_resp.status_code == status.HTTP_201_CREATED, "Upload initiation should return 201"
- upload_data = init_resp.json()
- upload_id = upload_data["upload_id"]
-
- patch_resp = await client.patch(
- app.url_path_for("tus_patch", upload_id=upload_id),
- content=file.read_bytes(),
- headers={
- "Content-Type": "application/offset+octet-stream",
- "Upload-Offset": "0",
- "Content-Length": str(file.stat().st_size),
- },
- )
- assert patch_resp.status_code == status.HTTP_204_NO_CONTENT, "Video upload should complete successfully"
-
- async with SessionLocal() as session:
- stmt = select(models.UploadRecord).where(models.UploadRecord.public_id == upload_id)
- res = await session.execute(stmt)
- upload = res.scalar_one_or_none()
- assert upload is not None, "Upload record should exist"
- assert upload.mimetype == "video/mp4", "Mimetype should be video/mp4"
- assert upload.meta_data is not None, "Metadata should be extracted"
- if "ffprobe" in upload.meta_data:
- assert isinstance(upload.meta_data["ffprobe"], dict), "ffprobe data should be a dict"
- assert "format" in upload.meta_data["ffprobe"] or "streams" in upload.meta_data["ffprobe"], (
- "ffprobe should contain format or streams info"
- )
+ resp = await client.post(
+ app.url_path_for("create_token"),
+ json={
+ "label": "Video Upload",
+ "max_size_bytes": 50_000_000,
+ "expires_in_days": 1,
+ "allowed_mime": ["video/*"],
+ },
+ headers={"Authorization": f"Bearer {settings.admin_api_key}"},
+ )
+ assert resp.status_code == status.HTTP_201_CREATED, "Token creation should return 201"
+ token_data = resp.json()
+ token_value = token_data["token"]
+
+ file = Path(__file__).parent / "fixtures" / "sample.mp4"
+
+ init_resp = await client.post(
+ app.url_path_for("initiate_upload"),
+ json={
+ "filename": "sample.mp4",
+ "filetype": "video/mp4",
+ "size_bytes": file.stat().st_size,
+ "meta_data": {},
+ },
+ params={"token": token_value},
+ )
+ assert init_resp.status_code == status.HTTP_201_CREATED, "Upload initiation should return 201"
+ upload_data = init_resp.json()
+ upload_id = upload_data["upload_id"]
+
+ patch_resp = await client.patch(
+ app.url_path_for("tus_patch", upload_id=upload_id),
+ content=file.read_bytes(),
+ headers={
+ "Content-Type": "application/offset+octet-stream",
+ "Upload-Offset": "0",
+ "Content-Length": str(file.stat().st_size),
+ },
+ )
+ assert patch_resp.status_code == status.HTTP_204_NO_CONTENT, "Video upload should complete successfully"
+
+ from backend.tests.test_postprocessing import wait_for_processing
+
+ await wait_for_processing([upload_id], timeout=10.0)
+
+ async with SessionLocal() as session:
+ stmt = select(models.UploadRecord).where(models.UploadRecord.public_id == upload_id)
+ res = await session.execute(stmt)
+ upload = res.scalar_one_or_none()
+ assert upload is not None, "Upload record should exist"
+ assert upload.mimetype == "video/mp4", "Mimetype should be video/mp4"
+ assert upload.meta_data is not None, "Metadata should be extracted"
+ if "ffprobe" in upload.meta_data:
+ assert isinstance(upload.meta_data["ffprobe"], dict), "ffprobe data should be a dict"
+ assert "format" in upload.meta_data["ffprobe"] or "streams" in upload.meta_data["ffprobe"], (
+ "ffprobe should contain format or streams info"
+ )
@pytest.mark.asyncio
@pytest.mark.skipif(shutil.which("ffprobe") is None, reason="ffprobe not available")
-async def test_ffprobe_not_run_for_non_multimedia():
+async def test_ffprobe_not_run_for_non_multimedia(client):
"""Test that ffprobe is not run for non-multimedia files."""
- async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client:
- resp = await client.post(
- app.url_path_for("create_token"),
- json={
- "label": "Text Upload",
- "max_size_bytes": 1_000_000,
- "expires_in_days": 1,
- },
- headers={"Authorization": f"Bearer {settings.admin_api_key}"},
- )
- assert resp.status_code == status.HTTP_201_CREATED, "Token creation should return 201"
- token_data = resp.json()
- token_value = token_data["token"]
-
- text_content = b"This is just text content, not multimedia."
- init_resp = await client.post(
- app.url_path_for("initiate_upload"),
- json={
- "filename": "test.txt",
- "filetype": "text/plain",
- "size_bytes": len(text_content),
- "meta_data": {},
- },
- params={"token": token_value},
- )
- assert init_resp.status_code == status.HTTP_201_CREATED, "Upload initiation should return 201"
- upload_data = init_resp.json()
- upload_id = upload_data["upload_id"]
-
- patch_resp = await client.patch(
- app.url_path_for("tus_patch", upload_id=upload_id),
- content=text_content,
- headers={
- "Content-Type": "application/offset+octet-stream",
- "Upload-Offset": "0",
- "Content-Length": str(len(text_content)),
- },
- )
- assert patch_resp.status_code == status.HTTP_204_NO_CONTENT, "Text upload should complete successfully"
-
- async with SessionLocal() as session:
- stmt = select(models.UploadRecord).where(models.UploadRecord.public_id == upload_id)
- res = await session.execute(stmt)
- upload = res.scalar_one_or_none()
- assert upload is not None, "Upload record should exist"
- assert upload.mimetype.startswith("text/"), "Mimetype should be text"
- assert upload.meta_data is not None, "Metadata should exist"
- assert "ffprobe" not in upload.meta_data, "ffprobe should not run for text files"
+ resp = await client.post(
+ app.url_path_for("create_token"),
+ json={
+ "label": "Text Upload",
+ "max_size_bytes": 1_000_000,
+ "expires_in_days": 1,
+ },
+ headers={"Authorization": f"Bearer {settings.admin_api_key}"},
+ )
+ assert resp.status_code == status.HTTP_201_CREATED, "Token creation should return 201"
+ token_data = resp.json()
+ token_value = token_data["token"]
+
+ text_content = b"This is just text content, not multimedia."
+ init_resp = await client.post(
+ app.url_path_for("initiate_upload"),
+ json={
+ "filename": "test.txt",
+ "filetype": "text/plain",
+ "size_bytes": len(text_content),
+ "meta_data": {},
+ },
+ params={"token": token_value},
+ )
+ assert init_resp.status_code == status.HTTP_201_CREATED, "Upload initiation should return 201"
+ upload_data = init_resp.json()
+ upload_id = upload_data["upload_id"]
+
+ patch_resp = await client.patch(
+ app.url_path_for("tus_patch", upload_id=upload_id),
+ content=text_content,
+ headers={
+ "Content-Type": "application/offset+octet-stream",
+ "Upload-Offset": "0",
+ "Content-Length": str(len(text_content)),
+ },
+ )
+ assert patch_resp.status_code == status.HTTP_204_NO_CONTENT, "Text upload should complete successfully"
+
+ async with SessionLocal() as session:
+ stmt = select(models.UploadRecord).where(models.UploadRecord.public_id == upload_id)
+ res = await session.execute(stmt)
+ upload = res.scalar_one_or_none()
+ assert upload is not None, "Upload record should exist"
+ assert upload.mimetype.startswith("text/"), "Mimetype should be text"
+ assert upload.meta_data is not None, "Metadata should exist"
+ assert "ffprobe" not in upload.meta_data, "ffprobe should not run for text files"
diff --git a/backend/tests/test_postprocessing.py b/backend/tests/test_postprocessing.py
new file mode 100644
index 0000000..0ab61ad
--- /dev/null
+++ b/backend/tests/test_postprocessing.py
@@ -0,0 +1,168 @@
+"""Tests for post-processing worker."""
+
+import asyncio
+import tempfile
+from datetime import UTC, datetime, timedelta
+from pathlib import Path
+
+import pytest
+from httpx import AsyncClient
+from sqlalchemy import select
+
+from backend.app import models
+from backend.app.db import SessionLocal
+from backend.app.postprocessing import process_upload
+from backend.tests.utils import create_token, initiate_upload, upload_file_via_tus
+
+
+async def wait_for_processing(upload_ids: list[str], timeout: float = 5.0) -> bool:
+ """
+ Wait for uploads to complete processing.
+
+ Args:
+ upload_ids: List of upload public IDs to wait for
+ timeout: Maximum time to wait in seconds
+
+ Returns:
+ True if all uploads completed, False if timeout
+
+ """
+ start = asyncio.get_event_loop().time()
+ while (asyncio.get_event_loop().time() - start) < timeout:
+ async with SessionLocal() as session:
+ stmt = select(models.UploadRecord).where(models.UploadRecord.public_id.in_(upload_ids))
+ result = await session.execute(stmt)
+ records = result.scalars().all()
+
+ if all(r.status in ("completed", "failed") for r in records):
+ return True
+
+ await asyncio.sleep(0.1)
+
+ return False
+
+
+@pytest.mark.asyncio
+async def test_multimedia_upload_enters_postprocessing(client):
+ """Test that multimedia uploads enter postprocessing status."""
+ token_data = await create_token(client, max_uploads=1)
+ token_value = token_data["token"]
+
+ video_file = Path(__file__).parent / "fixtures" / "sample.mp4"
+ video_content = video_file.read_bytes()
+
+ upload_data = await initiate_upload(
+ client, token_value, filename="test.mp4", size_bytes=len(video_content), filetype="video/mp4", meta_data={"title": "Test Video"}
+ )
+ upload_id = upload_data["upload_id"]
+
+ await upload_file_via_tus(client, upload_id, video_content)
+
+ async with SessionLocal() as session:
+ stmt = select(models.UploadRecord).where(models.UploadRecord.public_id == upload_id)
+ result = await session.execute(stmt)
+ record = result.scalar_one()
+
+ assert record.status == "postprocessing", "Multimedia upload should enter postprocessing status"
+ assert record.completed_at is None, "Upload should not be marked complete yet"
+
+
+@pytest.mark.asyncio
+async def test_non_multimedia_upload_completes_immediately(client):
+ """Test that non-multimedia uploads complete immediately without post-processing."""
+ token_data = await create_token(client, max_uploads=1)
+ token_value = token_data["token"]
+
+ pdf_content = b"%PDF-1.4 fake pdf content"
+ upload_data = await initiate_upload(
+ client, token_value, filename="document.pdf", size_bytes=len(pdf_content), meta_data={"title": "Test Doc"}
+ )
+ upload_id = upload_data["upload_id"]
+
+ await upload_file_via_tus(client, upload_id, pdf_content)
+
+ async with SessionLocal() as session:
+ stmt = select(models.UploadRecord).where(models.UploadRecord.public_id == upload_id)
+ result = await session.execute(stmt)
+ record = result.scalar_one()
+
+ assert record.status == "completed", "Non-multimedia upload should complete immediately"
+ assert record.completed_at is not None, "Upload should be marked complete"
+
+
+@pytest.mark.asyncio
+async def test_postprocessing_worker_processes_queue(client):
+ """Test that the post-processing worker processes pending uploads."""
+ token_data = await create_token(client, max_uploads=2)
+ token_value = token_data["token"]
+
+ video_file = Path(__file__).parent / "fixtures" / "sample.mp4"
+ video_content = video_file.read_bytes()
+
+ upload1_data = await initiate_upload(
+ client, token_value, filename="video1.mp4", size_bytes=len(video_content), filetype="video/mp4", meta_data={"title": "Video 1"}
+ )
+ upload1_id = upload1_data["upload_id"]
+
+ upload2_data = await initiate_upload(
+ client, token_value, filename="video2.mp4", size_bytes=len(video_content), filetype="video/mp4", meta_data={"title": "Video 2"}
+ )
+ upload2_id = upload2_data["upload_id"]
+
+ await upload_file_via_tus(client, upload1_id, video_content)
+ await upload_file_via_tus(client, upload2_id, video_content)
+
+ async with SessionLocal() as session:
+ stmt = select(models.UploadRecord).where(models.UploadRecord.public_id.in_([upload1_id, upload2_id]))
+ result = await session.execute(stmt)
+ records = result.scalars().all()
+
+ for record in records:
+ assert record.status in ("postprocessing", "completed"), "Upload should be in postprocessing or already completed"
+
+ completed = await wait_for_processing([upload1_id, upload2_id])
+ assert completed, "Processing should complete within timeout"
+
+ async with SessionLocal() as session:
+ stmt = select(models.UploadRecord).where(models.UploadRecord.public_id.in_([upload1_id, upload2_id]))
+ result = await session.execute(stmt)
+ records = result.scalars().all()
+
+ for record in records:
+ assert record.status == "completed", "Both uploads should be completed after processing"
+ assert record.completed_at is not None, "Both uploads should have completion time"
+
+
+@pytest.mark.asyncio
+async def test_postprocessing_handles_missing_file():
+ """Test that post-processing handles missing files gracefully."""
+ async with SessionLocal() as session:
+ expires_at = datetime.now(UTC) + timedelta(days=1)
+
+ token = models.UploadToken(
+ token="test_token_missing",
+ download_token="test_download_missing",
+ max_uploads=1,
+ max_size_bytes=1000000,
+ expires_at=expires_at,
+ )
+ session.add(token)
+ await session.flush()
+
+ record = models.UploadRecord(
+ public_id="missing_file_test",
+ token_id=token.id,
+ filename="nonexistent.mp4",
+ mimetype="video/mp4",
+ status="postprocessing",
+ storage_path="/tmp/nonexistent_file_12345.mp4",
+ )
+ session.add(record)
+ await session.commit()
+
+ success = await process_upload(session, record)
+ assert success is False, "Processing should fail for missing file"
+
+ await session.refresh(record)
+ assert record.status == "failed", "Upload should be marked as failed"
+ assert "error" in record.meta_data, "Error should be recorded in metadata"
diff --git a/backend/tests/test_share_view.py b/backend/tests/test_share_view.py
index 5f6cb86..b7bb4ea 100644
--- a/backend/tests/test_share_view.py
+++ b/backend/tests/test_share_view.py
@@ -1,11 +1,15 @@
"""Test share view endpoint returns appropriate data based on token type."""
+import asyncio
import pytest
+from pathlib import Path
from httpx import ASGITransport, AsyncClient
from fastapi import status
+from unittest.mock import patch
from backend.app.main import app
from backend.tests.utils import create_token
+from backend.tests.test_postprocessing import wait_for_processing
@pytest.mark.asyncio
@@ -57,3 +61,71 @@ async def test_get_token_invalid_token_returns_404():
data = response.json()
assert "detail" in data, "Should include error detail"
assert "not found" in data["detail"].lower(), "Error should mention token not found"
+
+
+@pytest.mark.asyncio
+async def test_share_page_route_exists_and_responds():
+ """Test that /f/{token} route exists and responds appropriately."""
+ transport = ASGITransport(app=app)
+ async with AsyncClient(transport=transport, base_url="http://testserver") as client:
+ token_data = await create_token(client, max_uploads=1)
+ upload_token = token_data["token"]
+
+ response_bot = await client.get(f"/f/{upload_token}", headers={"User-Agent": "Mozilla/5.0 (compatible; Discordbot/2.0)"})
+ assert response_bot.status_code == status.HTTP_200_OK, "Should return 200 for bot accessing share page"
+ html_content = response_bot.text
+ assert "" in html_content or "" in html_browser or "= 0 ? '+' : '-';
+ const isoWithTz = `${state.expiry}:00${offsetSign}${offsetHours}:${offsetMins}`;
+ payload.expiry_datetime = isoWithTz;
}
if (props.mode === "edit") {
diff --git a/frontend/app/components/AdminTokensTable.vue b/frontend/app/components/AdminTokensTable.vue
index 30cdf96..870050c 100644
--- a/frontend/app/components/AdminTokensTable.vue
+++ b/frontend/app/components/AdminTokensTable.vue
@@ -26,15 +26,10 @@
-
- {{ token.token }}
-
-
-
-
-
- {{ token.token }}
-
+ {{ token.token }}
+
+
+
|
diff --git a/frontend/app/components/AdminUploadsTable.vue b/frontend/app/components/AdminUploadsTable.vue
index baf7bef..da73ad8 100644
--- a/frontend/app/components/AdminUploadsTable.vue
+++ b/frontend/app/components/AdminUploadsTable.vue
@@ -21,7 +21,7 @@
No uploads found
-
+
|
diff --git a/frontend/app/components/UploadsTable.vue b/frontend/app/components/UploadsTable.vue
index d92bb4b..213005f 100644
--- a/frontend/app/components/UploadsTable.vue
+++ b/frontend/app/components/UploadsTable.vue
@@ -11,8 +11,8 @@
|
-
- | {{ row.id }} |
+
+ | {{ index+1 }} |
- {{ row.status }}
+ Processing
+ {{ row.status }}
|
-
+
{{ formatBytes(row.size_bytes ?? row.upload_length ?? 0) }}
@@ -74,7 +75,7 @@
Resume
-
Cancel
@@ -107,6 +108,7 @@ function getStatusColor(status: string): 'success' | 'error' | 'warning' | 'prim
case 'paused': return 'warning';
case 'uploading':
case 'in_progress':
+ case 'postprocessing':
case 'initiating': return 'primary';
default: return 'neutral';
}
@@ -120,6 +122,7 @@ function getStatusIcon(status: string): string {
case 'paused': return 'i-heroicons-pause-circle-20-solid';
case 'uploading':
case 'in_progress': return 'i-heroicons-arrow-path-20-solid';
+ case 'postprocessing': return 'i-heroicons-cog-6-tooth-20-solid';
case 'initiating': return 'i-heroicons-arrow-up-tray-20-solid';
case 'pending': return 'i-heroicons-clock-20-solid';
default: return 'i-heroicons-question-mark-circle-20-solid';
diff --git a/frontend/app/composables/useTokenInfo.ts b/frontend/app/composables/useTokenInfo.ts
index ac38c80..11f23af 100644
--- a/frontend/app/composables/useTokenInfo.ts
+++ b/frontend/app/composables/useTokenInfo.ts
@@ -1,10 +1,13 @@
import { ref, computed } from 'vue'
import type { TokenInfo } from '~/types/token'
+import type { ApiError } from '~/types/uploads'
export function useTokenInfo(tokenValue: Ref) {
const tokenInfo = ref(null)
const notFound = ref(false)
const tokenError = ref('')
+ const isExpired = ref(false)
+ const isDisabled = ref(false)
const shareLinkText = computed(() => {
if (!tokenInfo.value) return ''
@@ -17,16 +20,31 @@ export function useTokenInfo(tokenValue: Ref) {
return
}
tokenError.value = ''
+ isExpired.value = false
+ isDisabled.value = false
try {
- const data = await $fetch('/api/tokens/' + tokenValue.value)
- tokenInfo.value = data as any
+ const { $apiFetch } = useNuxtApp()
+ const data = await $apiFetch('/api/tokens/' + tokenValue.value)
+ tokenInfo.value = data
notFound.value = false
- } catch (err: any) {
+
+ // Check token status based on returned data
+ if (tokenInfo.value) {
+ const now = new Date()
+ if (tokenInfo.value.expires_at) {
+ const expiresAt = new Date(tokenInfo.value.expires_at)
+ isExpired.value = expiresAt < now
+ }
+ isDisabled.value = tokenInfo.value.disabled || false
+ }
+ } catch (err) {
+ const error = err as ApiError
tokenInfo.value = null
notFound.value = true
- tokenError.value = err?.data?.detail || err?.message || 'Failed to load token info.'
+ tokenError.value = error?.data?.detail || error?.message || 'Failed to load token info.'
}
}
- return { tokenInfo, notFound, tokenError, shareLinkText, fetchTokenInfo }
+ return { tokenInfo, notFound, tokenError, isExpired, isDisabled, shareLinkText, fetchTokenInfo }
}
+
diff --git a/frontend/app/composables/useTusUpload.ts b/frontend/app/composables/useTusUpload.ts
index 616118e..cd2cd18 100644
--- a/frontend/app/composables/useTusUpload.ts
+++ b/frontend/app/composables/useTusUpload.ts
@@ -3,7 +3,13 @@ import type { Slot } from '~/types/uploads'
import type { TokenInfo } from '~/types/token'
export function useTusUpload() {
- async function startTusUpload(slot: Slot, uploadUrl: string, file: File, tokenInfo: TokenInfo | null) {
+ async function startTusUpload(
+ slot: Slot,
+ uploadUrl: string,
+ file: File,
+ tokenInfo: TokenInfo | null,
+ onUploadComplete?: (slot: Slot) => void
+ ) {
slot.status = 'uploading'
slot.paused = false
return new Promise((resolve, reject) => {
@@ -17,7 +23,7 @@ export function useTusUpload() {
filename: file.name,
filetype: file.type,
},
- onError(error: any) {
+ onError(error: Error) {
slot.error = error.message
slot.status = 'error'
slot.tusUpload = undefined
@@ -29,9 +35,14 @@ export function useTusUpload() {
slot.status = 'uploading'
},
onSuccess() {
- slot.status = 'completed'
+ slot.status = 'postprocessing'
slot.progress = 100
slot.tusUpload = undefined
+
+ if (onUploadComplete) {
+ onUploadComplete(slot)
+ }
+
resolve()
},
})
diff --git a/frontend/app/composables/useUploadPolling.ts b/frontend/app/composables/useUploadPolling.ts
new file mode 100644
index 0000000..223e4ea
--- /dev/null
+++ b/frontend/app/composables/useUploadPolling.ts
@@ -0,0 +1,62 @@
+import { ref } from 'vue'
+import type { Slot } from '~/types/uploads'
+import type { TokenInfo } from '~/types/token'
+
+export function useUploadPolling() {
+ const pollingIntervals = ref |
-
+
@@ -106,7 +114,7 @@
ID:
- {{ upload.id }}
+ {{ upload.public_id }}
Type:
@@ -165,7 +173,7 @@ const route = useRoute()
const toast = useToast()
const token = ref((route.params.token as string) || '')
-const { tokenInfo, notFound, tokenError, fetchTokenInfo } = useTokenInfo(token)
+const { tokenInfo, notFound, tokenError, isExpired, isDisabled, fetchTokenInfo } = useTokenInfo(token)
const loading = ref(true)
const notice = ref('')
const showNotice = useStorage('show_notice', true)
@@ -194,6 +202,7 @@ function getStatusColor(status: string): 'success' | 'error' | 'warning' | 'neut
case 'error':
case 'validation_failed': return 'error'
case 'in_progress':
+ case 'postprocessing':
case 'uploading': return 'warning'
default: return 'neutral'
}
diff --git a/frontend/app/pages/index.vue b/frontend/app/pages/index.vue
index 3f8b2bf..f076180 100644
--- a/frontend/app/pages/index.vue
+++ b/frontend/app/pages/index.vue
@@ -30,6 +30,7 @@
diff --git a/frontend/app/tests/useTokenInfo.test.ts b/frontend/app/tests/useTokenInfo.test.ts
index 7f0db66..dfffed8 100644
--- a/frontend/app/tests/useTokenInfo.test.ts
+++ b/frontend/app/tests/useTokenInfo.test.ts
@@ -4,7 +4,7 @@ import { useTokenInfo } from '~/composables/useTokenInfo'
afterEach(() => {
vi.restoreAllMocks()
- ; (vi as any).unstubAllGlobals?.()
+ vi.unstubAllGlobals()
})
describe('useTokenInfo', () => {
@@ -15,8 +15,12 @@ describe('useTokenInfo', () => {
remaining_uploads: 2,
max_uploads: 5,
expires_at: '2024-12-01T00:00:00Z',
+ disabled: false,
})
- vi.stubGlobal('$fetch', fetchMock)
+
+ vi.stubGlobal('useNuxtApp', () => ({
+ $apiFetch: fetchMock
+ }))
const { tokenInfo, notFound, shareLinkText, fetchTokenInfo } = useTokenInfo(tokenValue)
@@ -31,7 +35,10 @@ describe('useTokenInfo', () => {
it('sets error state when fetch fails', async () => {
const tokenValue = ref('missing')
const fetchMock = vi.fn().mockRejectedValue({ data: { detail: 'No token' } })
- vi.stubGlobal('$fetch', fetchMock)
+
+ vi.stubGlobal('useNuxtApp', () => ({
+ $apiFetch: fetchMock
+ }))
const { tokenInfo, notFound, tokenError, fetchTokenInfo } = useTokenInfo(tokenValue)
diff --git a/frontend/app/tests/useTusUpload.test.ts b/frontend/app/tests/useTusUpload.test.ts
index 445ec92..ebbc30a 100644
--- a/frontend/app/tests/useTusUpload.test.ts
+++ b/frontend/app/tests/useTusUpload.test.ts
@@ -11,7 +11,7 @@ const makeSlot = (): Slot => ({
errors: [],
paused: false,
initiated: false,
- uploadId: 1,
+ uploadId: 'test-upload-id',
})
// Create a configurable mock Upload class
@@ -58,7 +58,7 @@ describe('useTusUpload', () => {
await startTusUpload(slot, 'http://upload', file, { max_chunk_bytes: 50 } as any)
- expect(slot.status).toBe('completed')
+ expect(slot.status).toBe('postprocessing')
expect(slot.progress).toBe(100)
expect(slot.tusUpload).toBeUndefined()
})
diff --git a/frontend/app/tests/useUploadPolling.test.ts b/frontend/app/tests/useUploadPolling.test.ts
new file mode 100644
index 0000000..b383845
--- /dev/null
+++ b/frontend/app/tests/useUploadPolling.test.ts
@@ -0,0 +1,59 @@
+import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'
+import { useUploadPolling } from '../composables/useUploadPolling'
+import { reactive } from 'vue'
+
+describe('useUploadPolling', () => {
+ beforeEach(() => {
+ vi.useFakeTimers()
+ })
+
+ afterEach(() => {
+ vi.restoreAllMocks()
+ vi.useRealTimers()
+ })
+
+ it('should provide polling functions', () => {
+ const { pollUploadStatus, stopPolling, stopAllPolling } = useUploadPolling()
+
+ expect(pollUploadStatus).toBeTypeOf('function')
+ expect(stopPolling).toBeTypeOf('function')
+ expect(stopAllPolling).toBeTypeOf('function')
+ })
+
+ it('should stop all polling on cleanup', () => {
+ const { pollUploadStatus, stopAllPolling } = useUploadPolling()
+
+ const mockSlot1 = reactive({
+ file: null,
+ values: {},
+ error: '',
+ working: false,
+ progress: 0,
+ status: 'postprocessing',
+ errors: [],
+ paused: false,
+ initiated: true,
+ uploadId: 'abc123xyz',
+ })
+
+ const mockSlot2 = reactive({
+ file: null,
+ values: {},
+ error: '',
+ working: false,
+ progress: 0,
+ status: 'postprocessing',
+ errors: [],
+ paused: false,
+ initiated: true,
+ uploadId: 'def456uvw',
+ })
+
+ pollUploadStatus('abc123xyz', 'token1', mockSlot1)
+ pollUploadStatus('def456uvw', 'token2', mockSlot2)
+
+ stopAllPolling()
+
+ expect(true).toBe(true)
+ })
+})
diff --git a/frontend/app/types/token.ts b/frontend/app/types/token.ts
index 2ae35f8..23e32f9 100644
--- a/frontend/app/types/token.ts
+++ b/frontend/app/types/token.ts
@@ -10,6 +10,7 @@ export type TokenInfo = {
allowed_mime?: string[];
download_token: string;
expires_at?: string;
+ disabled?: boolean;
allow_public_downloads?: boolean;
uploads: UploadRow[];
};
diff --git a/frontend/app/types/uploads.ts b/frontend/app/types/uploads.ts
index 63e0665..56ca59c 100644
--- a/frontend/app/types/uploads.ts
+++ b/frontend/app/types/uploads.ts
@@ -1,11 +1,20 @@
+export interface TusUpload {
+ start(): void;
+ abort(): Promise | void;
+}
+
+export interface UploadMetadata {
+ [key: string]: string | number | boolean | Date | string[] | undefined;
+}
+
export type UploadRow = {
- id: number;
+ public_id: string;
title?: string;
filename?: string;
ext?: string;
mimetype?: string;
source?: string;
- meta_data?: Record;
+ meta_data?: UploadMetadata;
broadcast_date?: string;
size_bytes?: number;
created_at?: string;
@@ -19,15 +28,44 @@ export type UploadRow = {
export type Slot = {
file: File | null;
- values: Record;
+ values: UploadMetadata;
error: string;
working: boolean;
progress: number;
bytesUploaded?: number;
status: string;
errors: string[];
- tusUpload?: any;
+ tusUpload?: TusUpload;
paused: boolean;
- uploadId?: number;
+ uploadId?: string;
initiated: boolean;
};
+
+export interface UploadRowWithSlot extends UploadRow {
+ slot?: Slot;
+ _reactiveKey?: string;
+}
+
+export interface InitiateUploadResponse {
+ upload_id: string;
+ upload_url: string;
+ download_url: string;
+ meta_data: UploadMetadata;
+ allowed_mime: string[] | null;
+ remaining_uploads: number;
+}
+
+export interface CancelUploadResponse {
+ remaining_uploads: number;
+}
+
+export interface ApiError {
+ data?: {
+ detail?: string;
+ };
+ message?: string;
+ response?: {
+ status?: number;
+ };
+ status?: number;
+}
diff --git a/frontend/app/utils/index.ts b/frontend/app/utils/index.ts
index 289e9a2..a8b8996 100644
--- a/frontend/app/utils/index.ts
+++ b/frontend/app/utils/index.ts
@@ -35,13 +35,20 @@ function formatBytes(size: number): string {
}
/**
- * Format date to locale string
+ * Format date to locale string with timezone
*/
function formatDate(d?: string) {
if (!d) return "";
try {
const date = new Date(d);
- return date.toLocaleString();
+ return date.toLocaleString(undefined, {
+ year: 'numeric',
+ month: 'short',
+ day: 'numeric',
+ hour: '2-digit',
+ minute: '2-digit',
+ timeZoneName: 'short'
+ });
} catch {
return d;
}
@@ -66,7 +73,7 @@ function formatKey(key: string): string {
/**
* Format metadata value for display
*/
-function formatValue(val: any): string {
+function formatValue(val: unknown): string {
if (val === null || val === undefined) return '—';
if (Array.isArray(val)) return val.join(', ');
if (typeof val === 'object') return JSON.stringify(val);
diff --git a/frontend/app/utils/validation.ts b/frontend/app/utils/validation.ts
index ecb0e52..c907640 100644
--- a/frontend/app/utils/validation.ts
+++ b/frontend/app/utils/validation.ts
@@ -14,8 +14,10 @@ export function validateSlot(slot: Slot, schema: Field[], tokenInfo: TokenInfo |
return
}
if (f.type === 'date' && val) {
- const d = new Date(val)
- if (Number.isNaN(d.getTime())) errs.push(`${f.label} must be a valid date`)
+ if (typeof val === 'string' || typeof val === 'number' || val instanceof Date) {
+ const d = new Date(val)
+ if (Number.isNaN(d.getTime())) errs.push(`${f.label} must be a valid date`)
+ }
}
if (f.type === 'number' || f.type === 'integer') {
if (val !== null && val !== undefined && val !== '') {
@@ -29,7 +31,7 @@ export function validateSlot(slot: Slot, schema: Field[], tokenInfo: TokenInfo |
if (f.minLength && val.length < f.minLength) errs.push(`${f.label} must be at least ${f.minLength} chars`)
if (f.maxLength && val.length > f.maxLength) errs.push(`${f.label} must be at most ${f.maxLength} chars`)
}
- if (f.type === 'select' && f.options && val) {
+ if (f.type === 'select' && f.options && val && typeof val === 'string') {
if (!f.allowCustom) {
const opts = f.options.map((o) => (typeof o === 'string' ? o : o.value))
if (!opts.includes(val)) errs.push(`${f.label} has invalid option`)
@@ -38,8 +40,8 @@ export function validateSlot(slot: Slot, schema: Field[], tokenInfo: TokenInfo |
if (f.type === 'multiselect' && Array.isArray(val) && f.options) {
if (!f.allowCustom) {
const opts = f.options.map((o) => (typeof o === 'string' ? o : o.value))
- val.forEach((v: any) => {
- if (!opts.includes(v)) errs.push(`${f.label} has invalid option: ${v}`)
+ val.forEach((v: unknown) => {
+ if (typeof v === 'string' && !opts.includes(v)) errs.push(`${f.label} has invalid option: ${v}`)
})
}
}
diff --git a/pyproject.toml b/pyproject.toml
index b561800..96302cc 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -17,6 +17,7 @@ dependencies = [
"aiofiles>=24.1.0",
"alembic>=1.13.3",
"python-magic>=0.4.27",
+ "jinja2>=3.1.6",
]
[dependency-groups]
diff --git a/tools/fbc_extractor.py b/tools/fbc_extractor.py
index 17d5073..063a0c9 100644
--- a/tools/fbc_extractor.py
+++ b/tools/fbc_extractor.py
@@ -32,7 +32,7 @@ def _match_id(cls, url) -> None | str:
return None
if mat.group("fid"):
- return f"{mat.group('id')}-{mat.group('fid')}"
+ return mat.group("fid")
return mat.group("id")
@@ -64,6 +64,10 @@ def _real_extract(self, url):
video_id: str = self._match_id(url)
headers: dict[str, str] = {}
+ parsed: ParseResult = urlparse(url)
+ path_prefix = re.sub(r"/(api/tokens|f)/.*$", "", parsed.path)
+ base_url: str = f"{parsed.scheme}://{parsed.netloc}{path_prefix}"
+
if apikey := (FBCIE._APIKEY or os.environ.get("FBC_API_KEY")):
headers["Authorization"] = f"Bearer {apikey!s}"
@@ -88,6 +92,7 @@ def _real_extract(self, url):
video_data,
"video" if is_single or len(items_info) < 2 else "url",
headers=headers,
+ base_url=base_url,
)
for video_data in items_info
if "completed" == video_data.get("status")
@@ -97,7 +102,7 @@ def _real_extract(self, url):
self.report_warning("Token contains no uploaded files.")
return None
- if len(playlist) == 1 or self.get_param("noplaylist"):
+ if is_single or self.get_param("noplaylist"):
if self.get_param("noplaylist") and len(playlist) > 1:
self.to_screen(f"Downloading 1 video out of '{len(playlist)}' because of --no-playlist option")
playlist[0]["_type"] = "video"
@@ -236,9 +241,13 @@ def _expand_format(self, format_dict: dict, ffprobe_data: dict) -> dict:
return format_dict
- def _format_item(self, video_data: dict, _type: str, headers: dict | None = None) -> dict:
+ def _format_item(self, video_data: dict, _type: str, headers: dict | None = None, base_url: str | None = None) -> dict:
+ download_url = video_data.get("download_url")
+ if download_url and base_url and not download_url.startswith(("http://", "https://")):
+ download_url = f"{base_url}{download_url}"
+
base_format = {
- "url": video_data.get("download_url"),
+ "url": download_url,
"ext": video_data.get("ext"),
"filesize": int_or_none(video_data.get("size_bytes")),
}
@@ -247,13 +256,17 @@ def _format_item(self, video_data: dict, _type: str, headers: dict | None = None
if ffprobe_data := meta_data.get("ffprobe"):
base_format = self._expand_format(base_format, ffprobe_data)
+ info_url = video_data.get("info_url")
+ if info_url and base_url and not info_url.startswith(("http://", "https://")):
+ info_url = f"{base_url}{info_url}"
+
dct = {
"id": video_data.get("public_id", video_data.get("id")),
"_type": _type,
"ext": video_data.get("ext"),
"mimetype": video_data.get("mimetype"),
- "url": video_data.get("info_url"),
- "webpage_url": video_data.get("info_url"),
+ "url": info_url,
+ "webpage_url": info_url,
"formats": [base_format],
"title": meta_data.get("title") or video_data.get("filename", f"file_{video_data['id']}"),
"filename": video_data.get("filename"),
diff --git a/uv.lock b/uv.lock
index 91fc713..eb9b88a 100644
--- a/uv.lock
+++ b/uv.lock
@@ -119,6 +119,7 @@ dependencies = [
{ name = "alembic" },
{ name = "fastapi" },
{ name = "httpx" },
+ { name = "jinja2" },
{ name = "pydantic" },
{ name = "pydantic-settings" },
{ name = "python-magic" },
@@ -142,6 +143,7 @@ requires-dist = [
{ name = "alembic", specifier = ">=1.13.3" },
{ name = "fastapi", specifier = ">=0.115.5" },
{ name = "httpx", specifier = ">=0.28.1" },
+ { name = "jinja2", specifier = ">=3.1.6" },
{ name = "pydantic", specifier = ">=2.9.2" },
{ name = "pydantic-settings", specifier = ">=2.4.0" },
{ name = "python-magic", specifier = ">=0.4.27" },
@@ -266,6 +268,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484 },
]
+[[package]]
+name = "jinja2"
+version = "3.1.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markupsafe" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 },
+]
+
[[package]]
name = "mako"
version = "1.3.10"
|