From a6445b961efd765c24ae598d2d26ac66ce9fa141 Mon Sep 17 00:00:00 2001 From: harshit Date: Sun, 14 Dec 2025 04:23:42 +0530 Subject: [PATCH 01/22] implemented the memory feature backend --- backend/app/database/images.py | 396 +++++++++++++++- backend/app/routes/memories.py | 411 +++++++++++++++++ .../app/utils/extract_location_metadata.py | 390 ++++++++++++++++ backend/app/utils/memory_clustering.py | 432 ++++++++++++++++++ backend/app/utils/verify_memories_setup.py | 268 +++++++++++ backend/extract_metadata_simple.py | 111 +++++ backend/main.py | 8 +- backend/migrate_add_memories_columns.py | 217 +++++++++ backend/requirements.txt | 4 +- backend/test_memories_api.py | 167 +++++++ 10 files changed, 2398 insertions(+), 6 deletions(-) create mode 100644 backend/app/routes/memories.py create mode 100644 backend/app/utils/extract_location_metadata.py create mode 100644 backend/app/utils/memory_clustering.py create mode 100644 backend/app/utils/verify_memories_setup.py create mode 100644 backend/extract_metadata_simple.py create mode 100644 backend/migrate_add_memories_columns.py create mode 100644 backend/test_memories_api.py diff --git a/backend/app/database/images.py b/backend/app/database/images.py index ec9541a56..25de965e5 100644 --- a/backend/app/database/images.py +++ b/backend/app/database/images.py @@ -1,6 +1,7 @@ # Standard library imports import sqlite3 -from typing import Any, List, Mapping, Tuple, TypedDict, Union +from typing import Any, List, Mapping, Tuple, TypedDict, Union, Optional +from datetime import datetime # App-specific imports from app.config.settings import ( @@ -18,7 +19,7 @@ ClassId = int -class ImageRecord(TypedDict): +class ImageRecord(TypedDict, total=False): """Represents the full images table structure""" id: ImageId @@ -27,6 +28,11 @@ class ImageRecord(TypedDict): thumbnailPath: str metadata: Union[Mapping[str, Any], str] isTagged: bool + isFavourite: bool + # New fields for Memories feature + latitude: Optional[float] + longitude: Optional[float] + captured_at: Optional[datetime] class UntaggedImageRecord(TypedDict): @@ -53,7 +59,7 @@ def db_create_images_table() -> None: conn = _connect() cursor = conn.cursor() - # Create new images table with merged fields + # Create new images table with merged fields including Memories feature columns cursor.execute( """ CREATE TABLE IF NOT EXISTS images ( @@ -64,11 +70,28 @@ def db_create_images_table() -> None: metadata TEXT, isTagged BOOLEAN DEFAULT 0, isFavourite BOOLEAN DEFAULT 0, + latitude REAL, + longitude REAL, + captured_at DATETIME, FOREIGN KEY (folder_id) REFERENCES folders(folder_id) ON DELETE CASCADE ) """ ) + # Create indexes for Memories feature queries + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)" + ) + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)" + ) + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)" + ) + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)" + ) + # Create new image_classes junction table cursor.execute( """ @@ -86,6 +109,62 @@ def db_create_images_table() -> None: conn.close() +def db_migrate_add_memories_columns() -> None: + """ + Add Memories feature columns to existing images table if they don't exist. + This function handles backward compatibility for existing databases. + """ + conn = _connect() + cursor = conn.cursor() + + try: + # Check if images table exists + cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='images'") + if not cursor.fetchone(): + logger.info("Images table does not exist yet, will be created by db_create_images_table()") + conn.close() + return + + # Get existing columns + cursor.execute("PRAGMA table_info(images)") + columns = {row[1] for row in cursor.fetchall()} + + # Add missing columns + changes_made = False + + if 'latitude' not in columns: + cursor.execute("ALTER TABLE images ADD COLUMN latitude REAL") + logger.info("Added column: latitude") + changes_made = True + + if 'longitude' not in columns: + cursor.execute("ALTER TABLE images ADD COLUMN longitude REAL") + logger.info("Added column: longitude") + changes_made = True + + if 'captured_at' not in columns: + cursor.execute("ALTER TABLE images ADD COLUMN captured_at DATETIME") + logger.info("Added column: captured_at") + changes_made = True + + # Create indexes + cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)") + cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)") + cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)") + cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)") + + if changes_made: + logger.info("Memories feature columns migration completed") + + conn.commit() + + except Exception as e: + logger.error(f"Error during Memories columns migration: {e}") + conn.rollback() + finally: + conn.close() + + def db_bulk_insert_images(image_records: List[ImageRecord]) -> bool: """Insert multiple image records in a single transaction.""" if not image_records: @@ -145,6 +224,9 @@ def db_get_all_images(tagged: Union[bool, None] = None) -> List[dict]: i.metadata, i.isTagged, i.isFavourite, + i.latitude, + i.longitude, + i.captured_at, m.name as tag_name FROM images i LEFT JOIN image_classes ic ON i.id = ic.image_id @@ -172,6 +254,9 @@ def db_get_all_images(tagged: Union[bool, None] = None) -> List[dict]: metadata, is_tagged, is_favourite, + latitude, + longitude, + captured_at, tag_name, ) in results: if image_id not in images_dict: @@ -188,6 +273,9 @@ def db_get_all_images(tagged: Union[bool, None] = None) -> List[dict]: "metadata": metadata_dict, "isTagged": bool(is_tagged), "isFavourite": bool(is_favourite), + "latitude": latitude, + "longitude": longitude, + "captured_at": captured_at if captured_at else None, # SQLite returns string "tags": [], } @@ -419,3 +507,305 @@ def db_toggle_image_favourite_status(image_id: str) -> bool: return False finally: conn.close() + + +# ============================================================================ +# MEMORIES FEATURE - Location and Time-based Queries +# ============================================================================ + + +def db_get_images_by_date_range( + start_date: datetime, + end_date: datetime, + include_favorites_only: bool = False +) -> List[dict]: + """ + Get images captured within a date range for Memories timeline. + + Args: + start_date: Start of date range (inclusive) + end_date: End of date range (inclusive) + include_favorites_only: If True, only return favorite images + + Returns: + List of image dictionaries with location and time data + """ + conn = _connect() + cursor = conn.cursor() + + try: + query = """ + SELECT + i.id, + i.path, + i.folder_id, + i.thumbnailPath, + i.metadata, + i.isTagged, + i.isFavourite, + i.latitude, + i.longitude, + i.captured_at, + GROUP_CONCAT(m.name, ',') as tags + FROM images i + LEFT JOIN image_classes ic ON i.id = ic.image_id + LEFT JOIN mappings m ON ic.class_id = m.class_id + WHERE i.captured_at BETWEEN ? AND ? + """ + + params = [start_date, end_date] + + if include_favorites_only: + query += " AND i.isFavourite = 1" + + query += """ + GROUP BY i.id + ORDER BY i.captured_at DESC + """ + + cursor.execute(query, params) + results = cursor.fetchall() + + images = [] + for row in results: + from app.utils.images import image_util_parse_metadata + + images.append({ + "id": row[0], + "path": row[1], + "folder_id": str(row[2]) if row[2] else None, + "thumbnailPath": row[3], + "metadata": image_util_parse_metadata(row[4]), + "isTagged": bool(row[5]), + "isFavourite": bool(row[6]), + "latitude": row[7], + "longitude": row[8], + "captured_at": row[9] if row[9] else None , + "tags": row[10].split(',') if row[10] else None + }) + + return images + + except Exception as e: + logger.error(f"Error getting images by date range: {e}") + return [] + finally: + conn.close() + + +def db_get_images_near_location( + latitude: float, + longitude: float, + radius_km: float = 5.0 +) -> List[dict]: + """ + Get images near a location within radius_km using bounding box approximation. + + Args: + latitude: Center latitude (-90 to 90) + longitude: Center longitude (-180 to 180) + radius_km: Search radius in kilometers (default: 5km) + + Returns: + List of image dictionaries with location data + + Note: + Uses simple bounding box (not precise Haversine distance). + 1 degree latitude ≈ 111 km + 1 degree longitude ≈ 111 km * cos(latitude) + """ + conn = _connect() + cursor = conn.cursor() + + try: + import math + + # Calculate bounding box offsets + lat_offset = radius_km / 111.0 + lon_offset = radius_km / (111.0 * abs(math.cos(math.radians(latitude)))) + + cursor.execute(""" + SELECT + i.id, + i.path, + i.folder_id, + i.thumbnailPath, + i.metadata, + i.isTagged, + i.isFavourite, + i.latitude, + i.longitude, + i.captured_at, + GROUP_CONCAT(m.name, ',') as tags + FROM images i + LEFT JOIN image_classes ic ON i.id = ic.image_id + LEFT JOIN mappings m ON ic.class_id = m.class_id + WHERE i.latitude BETWEEN ? AND ? + AND i.longitude BETWEEN ? AND ? + AND i.latitude IS NOT NULL + AND i.longitude IS NOT NULL + GROUP BY i.id + ORDER BY i.captured_at DESC + """, ( + latitude - lat_offset, + latitude + lat_offset, + longitude - lon_offset, + longitude + lon_offset + )) + + results = cursor.fetchall() + + images = [] + for row in results: + from app.utils.images import image_util_parse_metadata + + images.append({ + "id": row[0], + "path": row[1], + "folder_id": str(row[2]) if row[2] else None, + "thumbnailPath": row[3], + "metadata": image_util_parse_metadata(row[4]), + "isTagged": bool(row[5]), + "isFavourite": bool(row[6]), + "latitude": row[7], + "longitude": row[8], + "captured_at": row[9] if row[9] else None , # SQLite returns string, + "tags": row[10].split(',') if row[10] else None + }) + + return images + + except Exception as e: + logger.error(f"Error getting images near location: {e}") + return [] + finally: + conn.close() + + +def db_get_images_by_year_month(year: int, month: int) -> List[dict]: + """ + Get all images captured in a specific year and month. + + Args: + year: Year (e.g., 2024) + month: Month (1-12) + + Returns: + List of image dictionaries captured in the specified month + """ + conn = _connect() + cursor = conn.cursor() + + try: + cursor.execute(""" + SELECT + i.id, + i.path, + i.folder_id, + i.thumbnailPath, + i.metadata, + i.isTagged, + i.isFavourite, + i.latitude, + i.longitude, + i.captured_at, + GROUP_CONCAT(m.name, ',') as tags + FROM images i + LEFT JOIN image_classes ic ON i.id = ic.image_id + LEFT JOIN mappings m ON ic.class_id = m.class_id + WHERE strftime('%Y', i.captured_at) = ? + AND strftime('%m', i.captured_at) = ? + GROUP BY i.id + ORDER BY i.captured_at DESC + """, (str(year).zfill(4), str(month).zfill(2))) + + results = cursor.fetchall() + + images = [] + for row in results: + from app.utils.images import image_util_parse_metadata + + images.append({ + "id": row[0], + "path": row[1], + "folder_id": str(row[2]) if row[2] else None, + "thumbnailPath": row[3], + "metadata": image_util_parse_metadata(row[4]), + "isTagged": bool(row[5]), + "isFavourite": bool(row[6]), + "latitude": row[7], + "longitude": row[8], + "captured_at": row[9] if row[9] else None , # SQLite returns string, + "tags": row[10].split(',') if row[10] else None + }) + + return images + + except Exception as e: + logger.error(f"Error getting images by year/month: {e}") + return [] + finally: + conn.close() + + +def db_get_images_with_location() -> List[dict]: + """ + Get all images that have valid GPS coordinates. + Useful for displaying all photos on a map. + + Returns: + List of image dictionaries that have latitude and longitude + """ + conn = _connect() + cursor = conn.cursor() + + try: + cursor.execute(""" + SELECT + i.id, + i.path, + i.folder_id, + i.thumbnailPath, + i.metadata, + i.isTagged, + i.isFavourite, + i.latitude, + i.longitude, + i.captured_at, + GROUP_CONCAT(m.name, ',') as tags + FROM images i + LEFT JOIN image_classes ic ON i.id = ic.image_id + LEFT JOIN mappings m ON ic.class_id = m.class_id + WHERE i.latitude IS NOT NULL + AND i.longitude IS NOT NULL + GROUP BY i.id + ORDER BY i.captured_at DESC + """) + + results = cursor.fetchall() + + images = [] + for row in results: + from app.utils.images import image_util_parse_metadata + + images.append({ + "id": row[0], + "path": row[1], + "folder_id": str(row[2]) if row[2] else None, + "thumbnailPath": row[3], + "metadata": image_util_parse_metadata(row[4]), + "isTagged": bool(row[5]), + "isFavourite": bool(row[6]), + "latitude": row[7], + "longitude": row[8], + "captured_at": row[9] if row[9] else None , # SQLite returns string, + "tags": row[10].split(',') if row[10] else None + }) + + return images + + except Exception as e: + logger.error(f"Error getting images with location: {e}") + return [] + finally: + conn.close() diff --git a/backend/app/routes/memories.py b/backend/app/routes/memories.py new file mode 100644 index 000000000..24a218828 --- /dev/null +++ b/backend/app/routes/memories.py @@ -0,0 +1,411 @@ +""" +Memories API Routes + +This module provides REST API endpoints for the Memories feature, which groups +photos by location and time into meaningful collections. + +Endpoints: +- POST /api/memories/generate - Generate memories from all images with location data +- GET /api/memories/timeline - Get memories from past N days +- GET /api/memories/on-this-day - Get photos from this date in previous years +- GET /api/memories/locations - Get all unique locations where photos were taken + +Author: PictoPy Team +Date: 2025-12-14 +""" + +from datetime import datetime, timedelta +from typing import List, Dict, Any, Optional +from collections import defaultdict + +from fastapi import APIRouter, HTTPException, Query +from pydantic import BaseModel, Field + +from app.database.images import ( + db_get_images_with_location, + db_get_images_by_date_range, + db_get_images_by_year_month +) +from app.utils.memory_clustering import MemoryClustering +from app.logging.setup_logging import get_logger + +# Initialize router and logger +router = APIRouter(prefix="/api/memories", tags=["memories"]) +logger = get_logger(__name__) + + +# ============================================================================ +# Response Models +# ============================================================================ + +class MemoryImage(BaseModel): + """Image within a memory.""" + id: str + path: str + thumbnailPath: str + latitude: Optional[float] + longitude: Optional[float] + captured_at: Optional[str] + + +class Memory(BaseModel): + """Memory object containing grouped images.""" + memory_id: str + title: str + description: str + location_name: str + date_start: Optional[str] + date_end: Optional[str] + image_count: int + images: List[MemoryImage] + thumbnail_image_id: str + center_lat: float + center_lon: float + + +class GenerateMemoriesResponse(BaseModel): + """Response for generate memories endpoint.""" + success: bool + message: str + memory_count: int + image_count: int + memories: List[Memory] + + +class TimelineResponse(BaseModel): + """Response for timeline endpoint.""" + success: bool + date_range: Dict[str, str] + memory_count: int + memories: List[Memory] + + +class OnThisDayResponse(BaseModel): + """Response for on-this-day endpoint.""" + success: bool + today: str + years: List[int] + image_count: int + images: List[MemoryImage] + + +class LocationCluster(BaseModel): + """Location cluster with photo count.""" + location_name: str + center_lat: float + center_lon: float + image_count: int + sample_images: List[MemoryImage] + + +class LocationsResponse(BaseModel): + """Response for locations endpoint.""" + success: bool + location_count: int + locations: List[LocationCluster] + + +# ============================================================================ +# API Endpoints +# ============================================================================ + +@router.post("/generate", response_model=GenerateMemoriesResponse) +async def generate_memories( + location_radius_km: float = Query(5.0, ge=0.1, le=100, description="Location clustering radius in km"), + date_tolerance_days: int = Query(3, ge=1, le=30, description="Date tolerance in days"), + min_images: int = Query(2, ge=1, le=10, description="Minimum images per memory") +): + """ + Generate memories from all images with location data. + + This endpoint: + 1. Fetches all images that have GPS coordinates + 2. Clusters them by location using DBSCAN + 3. Within each location, clusters by date + 4. Returns memory objects with metadata + + Args: + location_radius_km: Maximum distance between photos in same location (default: 5km) + date_tolerance_days: Maximum days between photos in same memory (default: 3) + min_images: Minimum images required to form a memory (default: 2) + + Returns: + GenerateMemoriesResponse with list of generated memories + + Raises: + HTTPException: If database query fails or clustering fails + """ + try: + logger.info("Generating memories with params: " + f"radius={location_radius_km}km, " + f"date_tolerance={date_tolerance_days}days, " + f"min_images={min_images}") + + # Fetch all images with location data + images = db_get_images_with_location() + + if not images: + return GenerateMemoriesResponse( + success=True, + message="No images with location data found", + memory_count=0, + image_count=0, + memories=[] + ) + + logger.info(f"Found {len(images)} images with location data") + + # Cluster images into memories + clustering = MemoryClustering( + location_radius_km=location_radius_km, + date_tolerance_days=date_tolerance_days, + min_images_per_memory=min_images + ) + + memories = clustering.cluster_memories(images) + + logger.info(f"Generated {len(memories)} memories") + + return GenerateMemoriesResponse( + success=True, + message=f"Successfully generated {len(memories)} memories from {len(images)} images", + memory_count=len(memories), + image_count=len(images), + memories=memories + ) + + except Exception as e: + logger.error(f"Error generating memories: {e}") + raise HTTPException(status_code=500, detail=f"Failed to generate memories: {str(e)}") + + +@router.get("/timeline", response_model=TimelineResponse) +async def get_timeline( + days: int = Query(365, ge=1, le=3650, description="Number of days to look back"), + location_radius_km: float = Query(5.0, ge=0.1, le=100, description="Location clustering radius in km"), + date_tolerance_days: int = Query(3, ge=1, le=30, description="Date tolerance in days") +): + """ + Get memories from the past N days as a timeline. + + This endpoint: + 1. Calculates date range (today - N days to today) + 2. Fetches images within that date range + 3. Clusters them into memories + 4. Returns timeline of memories + + Args: + days: Number of days to look back (default: 365 = 1 year) + location_radius_km: Location clustering radius (default: 5km) + date_tolerance_days: Date tolerance for temporal clustering (default: 3) + + Returns: + TimelineResponse with memories ordered by date + + Raises: + HTTPException: If database query fails + """ + try: + # Calculate date range + end_date = datetime.now() + start_date = end_date - timedelta(days=days) + + logger.info(f"Getting timeline from {start_date.date()} to {end_date.date()}") + + # Fetch images within date range + images = db_get_images_by_date_range(start_date, end_date) + + if not images: + return TimelineResponse( + success=True, + date_range={ + "start": start_date.isoformat(), + "end": end_date.isoformat() + }, + memory_count=0, + memories=[] + ) + + logger.info(f"Found {len(images)} images in date range") + + # Cluster into memories + clustering = MemoryClustering( + location_radius_km=location_radius_km, + date_tolerance_days=date_tolerance_days, + min_images_per_memory=1 # Allow single images in timeline + ) + + memories = clustering.cluster_memories(images) + + return TimelineResponse( + success=True, + date_range={ + "start": start_date.isoformat(), + "end": end_date.isoformat() + }, + memory_count=len(memories), + memories=memories + ) + + except Exception as e: + logger.error(f"Error getting timeline: {e}") + raise HTTPException(status_code=500, detail=f"Failed to get timeline: {str(e)}") + + +@router.get("/on-this-day", response_model=OnThisDayResponse) +async def get_on_this_day(): + """ + Get photos taken on this date in previous years. + + This endpoint: + 1. Gets current month and day + 2. Searches for images from this month-day in all previous years + 3. Groups by year + 4. Returns images sorted by year (most recent first) + + Returns: + OnThisDayResponse with images from this date in previous years + + Raises: + HTTPException: If database query fails + """ + try: + today = datetime.now() + current_month = today.month + current_day = today.day + + logger.info(f"Getting 'On This Day' for {today.strftime('%B %d')}") + + # Search for images from this month-day in past years + # Go back 10 years maximum + all_images = [] + years_found = [] + + for year_offset in range(1, 11): # 1-10 years ago + target_year = today.year - year_offset + + try: + images = db_get_images_by_year_month(target_year, current_month) + + # Filter to specific day + day_images = [ + img for img in images + if img.get('captured_at') and + datetime.fromisoformat(img['captured_at']).day == current_day + ] + + if day_images: + all_images.extend(day_images) + years_found.append(target_year) + logger.info(f"Found {len(day_images)} images from {target_year}") + + except Exception as e: + logger.warning(f"Error querying year {target_year}: {e}") + continue + + # Sort by year (most recent first) + all_images.sort( + key=lambda x: datetime.fromisoformat(x['captured_at']) if x.get('captured_at') else datetime.min, + reverse=True + ) + + return OnThisDayResponse( + success=True, + today=today.strftime("%B %d"), + years=sorted(years_found, reverse=True), + image_count=len(all_images), + images=all_images + ) + + except Exception as e: + logger.error(f"Error getting 'On This Day': {e}") + raise HTTPException(status_code=500, detail=f"Failed to get 'On This Day': {str(e)}") + + +@router.get("/locations", response_model=LocationsResponse) +async def get_locations( + location_radius_km: float = Query(5.0, ge=0.1, le=100, description="Location clustering radius in km"), + max_sample_images: int = Query(5, ge=1, le=20, description="Max sample images per location") +): + """ + Get all unique locations where photos were taken. + + This endpoint: + 1. Fetches all images with GPS coordinates + 2. Clusters them by location + 3. Returns location clusters with photo counts + 4. Includes sample images for each location + + Args: + location_radius_km: Location clustering radius (default: 5km) + max_sample_images: Maximum sample images per location (default: 5) + + Returns: + LocationsResponse with list of location clusters + + Raises: + HTTPException: If database query fails + """ + try: + logger.info(f"Getting locations with radius={location_radius_km}km") + + # Fetch all images with location data + images = db_get_images_with_location() + + if not images: + return LocationsResponse( + success=True, + location_count=0, + locations=[] + ) + + logger.info(f"Found {len(images)} images with location data") + + # Cluster by location only (no date clustering) + clustering = MemoryClustering( + location_radius_km=location_radius_km, + date_tolerance_days=999999, # Large number to group all dates together + min_images_per_memory=1 + ) + + # Use internal method to get location clusters + location_clusters = clustering._cluster_by_location( + clustering._filter_valid_images(images) + ) + + # Create location cluster objects + locations = [] + for cluster_images in location_clusters: + if not cluster_images: + continue + + # Calculate center + center_lat = sum(img['latitude'] for img in cluster_images) / len(cluster_images) + center_lon = sum(img['longitude'] for img in cluster_images) / len(cluster_images) + + # Get location name + location_name = clustering._reverse_geocode(center_lat, center_lon) + + # Get sample images (up to max_sample_images) + sample_images = cluster_images[:max_sample_images] + + locations.append(LocationCluster( + location_name=location_name, + center_lat=center_lat, + center_lon=center_lon, + image_count=len(cluster_images), + sample_images=sample_images + )) + + # Sort by image count (most photos first) + locations.sort(key=lambda loc: loc.image_count, reverse=True) + + return LocationsResponse( + success=True, + location_count=len(locations), + locations=locations + ) + + except Exception as e: + logger.error(f"Error getting locations: {e}") + raise HTTPException(status_code=500, detail=f"Failed to get locations: {str(e)}") diff --git a/backend/app/utils/extract_location_metadata.py b/backend/app/utils/extract_location_metadata.py new file mode 100644 index 000000000..ccd8290a4 --- /dev/null +++ b/backend/app/utils/extract_location_metadata.py @@ -0,0 +1,390 @@ +""" +Location and Datetime Metadata Extraction Utility + +This module extracts GPS coordinates and capture datetime from image metadata JSON +and populates the dedicated latitude, longitude, and captured_at columns in the database. + +Usage: + python -m app.utils.extract_location_metadata + +Author: PictoPy Team +Date: 2025-12-14 +""" + +import json +import sqlite3 +from datetime import datetime +from typing import Optional, Tuple, Dict, Any +from pathlib import Path + +from app.config.settings import DATABASE_PATH +from app.logging.setup_logging import get_logger + +# Initialize logger +logger = get_logger(__name__) + + +class MetadataExtractor: + """ + Extracts location and datetime information from image metadata JSON. + + This class provides utilities to safely parse metadata and extract: + - GPS coordinates (latitude, longitude) + - Capture datetime + """ + + def __init__(self): + """Initialize the metadata extractor.""" + self.stats = { + 'total': 0, + 'updated': 0, + 'with_location': 0, + 'with_datetime': 0, + 'with_both': 0, + 'skipped': 0, + 'errors': 0 + } + + def extract_gps_coordinates(self, metadata: Dict[str, Any]) -> Tuple[Optional[float], Optional[float]]: + """ + Extract GPS coordinates from metadata dictionary. + + Supports multiple metadata structures: + - Top-level: {"latitude": 28.6, "longitude": 77.2} + - Nested EXIF: {"exif": {"gps": {"latitude": 28.6, "longitude": 77.2}}} + - Alternative names: lat, lon, Latitude, Longitude + + Args: + metadata: Parsed metadata dictionary + + Returns: + Tuple of (latitude, longitude) or (None, None) if not found + + Validates: + - Latitude: -90 to 90 + - Longitude: -180 to 180 + """ + latitude = None + longitude = None + + try: + if not isinstance(metadata, dict): + return None, None + + # Method 1: Direct top-level fields + lat = metadata.get('latitude') + lon = metadata.get('longitude') + + # Method 2: Check nested 'exif' -> 'gps' structure + if not lat or not lon: + exif = metadata.get('exif', {}) + if isinstance(exif, dict): + gps = exif.get('gps', {}) + if isinstance(gps, dict): + lat = lat or gps.get('latitude') + lon = lon or gps.get('longitude') + + # Method 3: Check alternative field names + if not lat or not lon: + lat = lat or metadata.get('lat') or metadata.get('Latitude') + lon = lon or metadata.get('lon') or metadata.get('Longitude') + + # Validate and convert coordinates + if lat is not None and lon is not None: + try: + lat = float(lat) + lon = float(lon) + + # Sanity check: valid coordinate ranges + if -90 <= lat <= 90 and -180 <= lon <= 180: + latitude = lat + longitude = lon + else: + logger.warning(f"Invalid coordinate range: lat={lat}, lon={lon}") + except (ValueError, TypeError) as e: + logger.warning(f"Could not convert coordinates to float: {e}") + + except Exception as e: + logger.error(f"Unexpected error extracting GPS coordinates: {e}") + + return latitude, longitude + + def extract_datetime(self, metadata: Dict[str, Any]) -> Optional[datetime]: + """ + Extract capture datetime from metadata dictionary. + + Supports multiple datetime formats and field names: + - date_created, datetime, date_taken, timestamp, DateTime + - Nested: exif.datetime, exif.DateTimeOriginal + - Formats: ISO 8601, EXIF format (YYYY:MM:DD HH:MM:SS), etc. + + Args: + metadata: Parsed metadata dictionary + + Returns: + datetime object or None if not found/parseable + """ + captured_at = None + + try: + if not isinstance(metadata, dict): + return None + + # Method 1: Check common top-level field names + date_str = None + for field in ['date_created', 'datetime', 'date_taken', 'timestamp', 'DateTime']: + if field in metadata: + date_str = metadata[field] + break + + # Method 2: Check nested 'exif' structure + if not date_str: + exif = metadata.get('exif', {}) + if isinstance(exif, dict): + date_str = ( + exif.get('datetime') or + exif.get('DateTime') or + exif.get('DateTimeOriginal') or + exif.get('DateTimeDigitized') + ) + + # Parse datetime string + if date_str: + date_str = str(date_str).strip() + + # Try multiple datetime formats + datetime_formats = [ + '%Y-%m-%d %H:%M:%S', # 2024-01-15 14:30:45 + '%Y:%m:%d %H:%M:%S', # 2024:01:15 14:30:45 (EXIF format) + '%Y-%m-%dT%H:%M:%S', # 2024-01-15T14:30:45 (ISO) + '%Y-%m-%dT%H:%M:%S.%f', # 2024-01-15T14:30:45.123456 + '%Y-%m-%d', # 2024-01-15 + '%d/%m/%Y %H:%M:%S', # 15/01/2024 14:30:45 + '%d/%m/%Y', # 15/01/2024 + '%m/%d/%Y %H:%M:%S', # 01/15/2024 14:30:45 + '%m/%d/%Y', # 01/15/2024 + ] + + # Try ISO format first (handles timezone) + if 'T' in date_str: + try: + # Remove timezone suffix for simpler parsing + date_str_clean = date_str.replace('Z', '').split('+')[0].split('-') + # Rejoin only date-time parts (not timezone) + if len(date_str_clean) >= 3: + date_str_clean = '-'.join(date_str_clean[:3]) + captured_at = datetime.fromisoformat(date_str_clean) + except Exception: + pass + + # Try other formats + if not captured_at: + for fmt in datetime_formats: + try: + captured_at = datetime.strptime(date_str, fmt) + break + except (ValueError, TypeError): + continue + + if not captured_at: + logger.warning(f"Could not parse datetime: {date_str}") + + except Exception as e: + logger.error(f"Unexpected error extracting datetime: {e}") + + return captured_at + + def extract_all(self, metadata_json: str) -> Tuple[Optional[float], Optional[float], Optional[datetime]]: + """ + Extract GPS coordinates and datetime from metadata JSON string. + + Args: + metadata_json: JSON string from images.metadata column + + Returns: + Tuple of (latitude, longitude, captured_at) + """ + latitude = None + longitude = None + captured_at = None + + # Handle null/empty metadata + if not metadata_json or metadata_json == 'null': + return None, None, None + + try: + # Parse JSON + if isinstance(metadata_json, bytes): + metadata_json = metadata_json.decode('utf-8') + + metadata = json.loads(metadata_json) + + # Extract GPS coordinates + latitude, longitude = self.extract_gps_coordinates(metadata) + + # Extract datetime + captured_at = self.extract_datetime(metadata) + + except json.JSONDecodeError as e: + logger.warning(f"Invalid JSON in metadata: {e}") + except Exception as e: + logger.error(f"Unexpected error parsing metadata: {e}") + + return latitude, longitude, captured_at + + def migrate_metadata(self) -> Dict[str, int]: + """ + Main migration function to populate latitude, longitude, and captured_at + columns for all images with metadata. + + This function: + 1. Connects to the database + 2. Retrieves all images with metadata + 3. Extracts GPS coordinates and datetime + 4. Updates the database with extracted values + 5. Reports statistics + + Returns: + Dictionary with migration statistics + """ + logger.info("=" * 70) + logger.info("Starting metadata extraction migration...") + logger.info("=" * 70) + + # Connect to database + conn = sqlite3.connect(DATABASE_PATH) + cursor = conn.cursor() + + try: + # Fetch all images with metadata + logger.info("Fetching images from database...") + cursor.execute("SELECT id, metadata FROM images WHERE metadata IS NOT NULL") + images = cursor.fetchall() + + self.stats['total'] = len(images) + logger.info(f"Found {self.stats['total']} images with metadata") + + if self.stats['total'] == 0: + logger.warning("No images found with metadata") + return self.stats + + # Process each image + updates = [] + for image_id, metadata_json in images: + try: + lat, lon, dt = self.extract_all(metadata_json) + + # Only update if we extracted something + if lat is not None or lon is not None or dt is not None: + updates.append({ + 'id': image_id, + 'latitude': lat, + 'longitude': lon, + 'captured_at': dt + }) + + # Track statistics + has_location = lat is not None and lon is not None + has_datetime = dt is not None + + if has_location: + self.stats['with_location'] += 1 + if has_datetime: + self.stats['with_datetime'] += 1 + if has_location and has_datetime: + self.stats['with_both'] += 1 + else: + self.stats['skipped'] += 1 + + except Exception as e: + self.stats['errors'] += 1 + logger.error(f"Error processing image {image_id}: {e}") + + # Batch update database + if updates: + logger.info(f"Updating {len(updates)} images...") + + for update_data in updates: + cursor.execute(""" + UPDATE images + SET latitude = ?, + longitude = ?, + captured_at = ? + WHERE id = ? + """, ( + update_data['latitude'], + update_data['longitude'], + update_data['captured_at'], + update_data['id'] + )) + + conn.commit() + self.stats['updated'] = len(updates) + logger.info(f"Successfully updated {self.stats['updated']} images") + + # Print summary + self._print_summary() + + except Exception as e: + logger.error(f"Migration failed: {e}") + conn.rollback() + raise + + finally: + conn.close() + + return self.stats + + def _print_summary(self): + """Print migration summary statistics.""" + logger.info("\n" + "=" * 70) + logger.info("METADATA EXTRACTION SUMMARY") + logger.info("=" * 70) + logger.info(f"Total images processed: {self.stats['total']}") + logger.info(f"Images updated: {self.stats['updated']}") + logger.info(f"Images with location data: {self.stats['with_location']} ({self._percentage('with_location')}%)") + logger.info(f"Images with datetime: {self.stats['with_datetime']} ({self._percentage('with_datetime')}%)") + logger.info(f"Images with both: {self.stats['with_both']} ({self._percentage('with_both')}%)") + logger.info(f"Images skipped (no data): {self.stats['skipped']}") + logger.info(f"Errors encountered: {self.stats['errors']}") + logger.info("=" * 70) + + def _percentage(self, key: str) -> str: + """Calculate percentage for a statistic.""" + if self.stats['total'] == 0: + return "0.0" + return f"{(self.stats[key] / self.stats['total'] * 100):.1f}" + + +def main(): + """ + Main entry point for the metadata extraction script. + + Usage: + python -m app.utils.extract_location_metadata + """ + try: + # Check if database exists + if not Path(DATABASE_PATH).exists(): + logger.error(f"Database not found at: {DATABASE_PATH}") + return + + # Create extractor and run migration + extractor = MetadataExtractor() + stats = extractor.migrate_metadata() + + # Exit with appropriate code + if stats['errors'] > 0: + logger.warning("Migration completed with errors") + exit(1) + else: + logger.info("✅ Migration completed successfully!") + exit(0) + + except Exception as e: + logger.error(f"❌ Migration failed: {e}") + exit(1) + + +if __name__ == "__main__": + main() diff --git a/backend/app/utils/memory_clustering.py b/backend/app/utils/memory_clustering.py new file mode 100644 index 000000000..42ac0e4d5 --- /dev/null +++ b/backend/app/utils/memory_clustering.py @@ -0,0 +1,432 @@ +""" +Memory Clustering Algorithm + +This module groups images into "memories" based on spatial proximity (location) +and temporal proximity (date/time). Uses DBSCAN for spatial clustering and +date-based grouping for temporal clustering. + +A "memory" is a collection of photos taken at the same place around the same time. + +Author: PictoPy Team +Date: 2025-12-14 +""" + +import math +from datetime import datetime, timedelta +from typing import List, Dict, Any, Optional, Tuple +from collections import defaultdict + +import numpy as np +from sklearn.cluster import DBSCAN + +from app.logging.setup_logging import get_logger + +# Initialize logger +logger = get_logger(__name__) + + +class MemoryClustering: + """ + Clusters images into memories based on location and time proximity. + + Algorithm: + 1. Spatial clustering: Group images by GPS coordinates using DBSCAN + 2. Temporal clustering: Within each location cluster, group by date + 3. Memory creation: Generate memory objects with metadata + + Parameters: + location_radius_km: Maximum distance between photos in the same location (default: 5km) + date_tolerance_days: Maximum days between photos in the same memory (default: 3) + min_images_per_memory: Minimum images required to form a memory (default: 2) + """ + + def __init__( + self, + location_radius_km: float = 5.0, + date_tolerance_days: int = 3, + min_images_per_memory: int = 2 + ): + """Initialize the memory clustering algorithm.""" + self.location_radius_km = location_radius_km + self.date_tolerance_days = date_tolerance_days + self.min_images_per_memory = min_images_per_memory + + # Convert km to degrees for DBSCAN + # Approximate: 1 degree latitude ≈ 111 km + self.location_eps_degrees = location_radius_km / 111.0 + + logger.info(f"MemoryClustering initialized: radius={location_radius_km}km, " + f"date_tolerance={date_tolerance_days}days, " + f"min_images={min_images_per_memory}") + + def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + """ + Main entry point: Cluster images into memories. + + Args: + images: List of image dictionaries with fields: + - id: Image ID + - path: File path + - thumbnailPath: Thumbnail path + - latitude: GPS latitude (required) + - longitude: GPS longitude (required) + - captured_at: Capture datetime (ISO string or datetime object) + - metadata: Additional metadata dict + + Returns: + List of memory dictionaries with fields: + - memory_id: Unique memory identifier + - title: Memory title (e.g., "Trip to Paris") + - description: Memory description + - location_name: Human-readable location + - date_start: Start date (ISO string) + - date_end: End date (ISO string) + - image_count: Number of images in memory + - images: List of image objects + - thumbnail_image_id: ID of representative image + - center_lat: Center latitude of cluster + - center_lon: Center longitude of cluster + """ + logger.info(f"Starting memory clustering for {len(images)} images") + + if not images: + logger.warning("No images provided for clustering") + return [] + + # Filter images with valid location data + valid_images = self._filter_valid_images(images) + + if not valid_images: + logger.warning("No images with valid location data") + return [] + + logger.info(f"Found {len(valid_images)} images with valid location data") + + # Step 1: Cluster by location (spatial) + location_clusters = self._cluster_by_location(valid_images) + logger.info(f"Created {len(location_clusters)} location clusters") + + # Step 2: Within each location cluster, cluster by date (temporal) + memories = [] + for location_cluster in location_clusters: + temporal_clusters = self._cluster_by_date(location_cluster) + + # Step 3: Create memory objects + for temporal_cluster in temporal_clusters: + if len(temporal_cluster) >= self.min_images_per_memory: + memory = self._create_memory(temporal_cluster) + memories.append(memory) + + logger.info(f"Generated {len(memories)} memories") + + # Sort memories by date (most recent first) + memories.sort(key=lambda m: m['date_start'] if m['date_start'] else '', reverse=True) + + return memories + + def _filter_valid_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + """ + Filter images that have valid location and datetime data. + + Args: + images: List of image dictionaries + + Returns: + List of valid images with parsed datetime objects + """ + valid_images = [] + + for img in images: + try: + # Check for required fields + if not img.get('latitude') or not img.get('longitude'): + continue + + # Parse captured_at if it's a string + captured_at = img.get('captured_at') + img_copy = img.copy() + + if captured_at: + if isinstance(captured_at, str): + try: + # SQLite returns ISO format: "YYYY-MM-DDTHH:MM:SS" + captured_at = datetime.fromisoformat(captured_at.replace('Z', '')) + img_copy['captured_at'] = captured_at + except Exception as e: + # Try alternative formats + for fmt in ['%Y-%m-%d %H:%M:%S', '%Y:%m:%d %H:%M:%S', '%Y-%m-%d']: + try: + captured_at = datetime.strptime(captured_at, fmt) + img_copy['captured_at'] = captured_at + break + except Exception: + continue + else: + # Could not parse date, but location is still valid + logger.debug(f"Could not parse date for image {img.get('id')}: {captured_at}") + elif isinstance(captured_at, datetime): + img_copy['captured_at'] = captured_at + + valid_images.append(img_copy) + + except Exception as e: + logger.warning(f"Error filtering image {img.get('id')}: {e}") + continue + + return valid_images + + def _cluster_by_location(self, images: List[Dict[str, Any]]) -> List[List[Dict[str, Any]]]: + """ + Cluster images by geographic location using DBSCAN. + + Args: + images: List of image dictionaries with latitude/longitude + + Returns: + List of location clusters (each cluster is a list of images) + """ + if not images: + return [] + + # Extract coordinates + coordinates = np.array([ + [img['latitude'], img['longitude']] + for img in images + ]) + + # Apply DBSCAN clustering + # eps: maximum distance between two samples (in degrees) + # min_samples: minimum number of samples to form a cluster + clustering = DBSCAN( + eps=self.location_eps_degrees, + min_samples=1, # Even single photos can form a cluster + metric='haversine', # Use haversine distance for lat/lon + algorithm='ball_tree' + ) + + # Convert to radians for haversine + coordinates_rad = np.radians(coordinates) + labels = clustering.fit_predict(coordinates_rad) + + # Group images by cluster label + clusters = defaultdict(list) + for idx, label in enumerate(labels): + if label != -1: # -1 is noise in DBSCAN + clusters[label].append(images[idx]) + + # Noise points (label -1) each become their own cluster + for idx, label in enumerate(labels): + if label == -1: + clusters[f"noise_{idx}"].append(images[idx]) + + return list(clusters.values()) + + def _cluster_by_date(self, images: List[Dict[str, Any]]) -> List[List[Dict[str, Any]]]: + """ + Cluster images by date within a location cluster. + + Groups images that were taken within date_tolerance_days of each other. + + Args: + images: List of image dictionaries with captured_at datetime + + Returns: + List of temporal clusters (each cluster is a list of images) + """ + if not images: + return [] + + # Sort by date + sorted_images = sorted( + [img for img in images if img.get('captured_at')], + key=lambda x: x['captured_at'] + ) + + # Images without dates go into a separate cluster + no_date_images = [img for img in images if not img.get('captured_at')] + + if not sorted_images: + return [no_date_images] if no_date_images else [] + + # Group by date tolerance + clusters = [] + current_cluster = [sorted_images[0]] + + for i in range(1, len(sorted_images)): + prev_date = sorted_images[i-1]['captured_at'] + curr_date = sorted_images[i]['captured_at'] + + # Check if within tolerance + date_diff = abs((curr_date - prev_date).days) + + if date_diff <= self.date_tolerance_days: + current_cluster.append(sorted_images[i]) + else: + # Start new cluster + clusters.append(current_cluster) + current_cluster = [sorted_images[i]] + + # Add last cluster + if current_cluster: + clusters.append(current_cluster) + + # Add no-date images as separate cluster if exists + if no_date_images: + clusters.append(no_date_images) + + return clusters + + def _create_memory(self, images: List[Dict[str, Any]]) -> Dict[str, Any]: + """ + Create a memory object from a cluster of images. + + Args: + images: List of image dictionaries in the cluster + + Returns: + Memory dictionary with metadata + """ + # Calculate center coordinates + center_lat = np.mean([img['latitude'] for img in images]) + center_lon = np.mean([img['longitude'] for img in images]) + + # Get date range + dates = [img['captured_at'] for img in images if img.get('captured_at')] + if dates: + date_start = min(dates) + date_end = max(dates) + else: + date_start = None + date_end = None + + # Get location name + location_name = self._reverse_geocode(center_lat, center_lon) + + # Generate title + title = self._generate_title(location_name, date_start, len(images)) + + # Generate description + description = self._generate_description(len(images), date_start, date_end) + + # Select thumbnail (first image or middle image) + thumbnail_idx = len(images) // 2 + thumbnail_image_id = images[thumbnail_idx]['id'] + + # Create memory ID (use timestamp + location hash) + memory_id = self._generate_memory_id(center_lat, center_lon, date_start) + + # Convert captured_at datetime objects to ISO strings for all images + serialized_images = [] + for img in images: + img_copy = img.copy() + if img_copy.get('captured_at') and isinstance(img_copy['captured_at'], datetime): + img_copy['captured_at'] = img_copy['captured_at'].isoformat() + serialized_images.append(img_copy) + + return { + 'memory_id': memory_id, + 'title': title, + 'description': description, + 'location_name': location_name, + 'date_start': date_start.isoformat() if date_start else None, + 'date_end': date_end.isoformat() if date_end else None, + 'image_count': len(images), + 'images': serialized_images, + 'thumbnail_image_id': thumbnail_image_id, + 'center_lat': float(center_lat), + 'center_lon': float(center_lon) + } + + def _reverse_geocode(self, latitude: float, longitude: float) -> str: + """ + Convert GPS coordinates to a human-readable location name. + + This is a simple implementation. For production, consider using: + - Geopy with Nominatim + - Google Maps Geocoding API + - Mapbox Geocoding API + + Args: + latitude: GPS latitude + longitude: GPS longitude + + Returns: + Location string (e.g., "Paris, France") + """ + # Simple placeholder implementation + # Returns coordinates formatted as location + return f"{latitude:.4f}°, {longitude:.4f}°" + + def _generate_title( + self, + location_name: str, + date: Optional[datetime], + image_count: int + ) -> str: + """ + Generate a title for the memory. + + Args: + location_name: Human-readable location + date: Date of the memory + image_count: Number of images + + Returns: + Title string + """ + if date: + month_year = date.strftime("%B %Y") + return f"{location_name} - {month_year}" + else: + return f"{location_name} - {image_count} photos" + + def _generate_description( + self, + image_count: int, + date_start: Optional[datetime], + date_end: Optional[datetime] + ) -> str: + """ + Generate a description for the memory. + + Args: + image_count: Number of images + date_start: Start date + date_end: End date + + Returns: + Description string + """ + if date_start and date_end: + if date_start.date() == date_end.date(): + return f"{image_count} photos from {date_start.strftime('%B %d, %Y')}" + else: + days = (date_end - date_start).days + 1 + return f"{image_count} photos over {days} days ({date_start.strftime('%b %d')} - {date_end.strftime('%b %d, %Y')})" + else: + return f"{image_count} photos" + + def _generate_memory_id( + self, + latitude: float, + longitude: float, + date: Optional[datetime] + ) -> str: + """ + Generate a unique ID for the memory. + + Args: + latitude: Center latitude + longitude: Center longitude + date: Date of memory + + Returns: + Unique memory ID + """ + # Create hash from location and date + location_hash = hash((round(latitude, 2), round(longitude, 2))) + if date: + date_str = date.strftime('%Y%m%d') + return f"mem_{date_str}_{abs(location_hash)}" + else: + return f"mem_nodate_{abs(location_hash)}" diff --git a/backend/app/utils/verify_memories_setup.py b/backend/app/utils/verify_memories_setup.py new file mode 100644 index 000000000..e38b2d6ee --- /dev/null +++ b/backend/app/utils/verify_memories_setup.py @@ -0,0 +1,268 @@ +""" +Verification script for Memories feature setup. +Checks all dependencies, database schema, file structure, and API routes. + +Usage: + python -m app.utils.verify_memories_setup +""" + +import sys +import os +import sqlite3 +import importlib +from pathlib import Path + +# ANSI color codes for terminal output +class Colors: + GREEN = '\033[92m' + RED = '\033[91m' + YELLOW = '\033[93m' + BLUE = '\033[94m' + BOLD = '\033[1m' + RESET = '\033[0m' + +def print_header(text): + """Print section header""" + print(f"\n{Colors.BOLD}{Colors.BLUE}{'='*60}{Colors.RESET}") + print(f"{Colors.BOLD}{Colors.BLUE}{text}{Colors.RESET}") + print(f"{Colors.BOLD}{Colors.BLUE}{'='*60}{Colors.RESET}\n") + +def print_success(text): + """Print success message""" + print(f"{Colors.GREEN}✓ {text}{Colors.RESET}") + +def print_error(text): + """Print error message""" + print(f"{Colors.RED}✗ {text}{Colors.RESET}") + +def print_warning(text): + """Print warning message""" + print(f"{Colors.YELLOW}⚠ {text}{Colors.RESET}") + +def print_info(text): + """Print info message""" + print(f" {text}") + +def check_dependencies(): + """Check if all required packages are installed""" + print_header("1. Checking Python Dependencies") + + required_packages = { + 'numpy': '1.26.4', + 'sklearn': '1.5.1', # scikit-learn imports as sklearn + 'fastapi': '0.111.0', + 'sqlalchemy': None, + 'pydantic': None, + } + + all_installed = True + + for package, expected_version in required_packages.items(): + try: + module = importlib.import_module(package) + version = getattr(module, '__version__', 'Unknown') + + if expected_version and version != expected_version: + print_warning(f"{package} installed (v{version}), expected v{expected_version}") + else: + print_success(f"{package} v{version}") + except ImportError: + print_error(f"{package} is NOT installed") + all_installed = False + + return all_installed + +def check_file_structure(): + """Check if all required files exist""" + print_header("2. Checking File Structure") + + backend_path = Path(__file__).parent.parent.parent + + required_files = [ + 'app/utils/extract_location_metadata.py', + 'app/utils/memory_clustering.py', + 'app/routes/memories.py', + 'app/database/images.py', + 'main.py', + ] + + all_exist = True + + for file_path in required_files: + full_path = backend_path / file_path + if full_path.exists(): + print_success(f"{file_path}") + print_info(f" → {full_path}") + else: + print_error(f"{file_path} NOT FOUND") + all_exist = False + + return all_exist + +def check_database_schema(): + """Check if database has required columns and indexes""" + print_header("3. Checking Database Schema") + + backend_path = Path(__file__).parent.parent.parent + db_path = backend_path / 'app' / 'database' / 'PictoPy.db' + + if not db_path.exists(): + print_warning("Database file 'gallery.db' not found") + print_info(" → Database will be created on first run") + return None # Not an error, just not initialized yet + + try: + conn = sqlite3.connect(str(db_path)) + cursor = conn.cursor() + + # Check if images table exists + cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='images'") + if not cursor.fetchone(): + print_error("Table 'images' does not exist") + conn.close() + return False + + print_success("Table 'images' exists") + + # Check for required columns + cursor.execute("PRAGMA table_info(images)") + columns = {row[1]: row[2] for row in cursor.fetchall()} + + required_columns = { + 'latitude': 'FLOAT', + 'longitude': 'FLOAT', + 'captured_at': 'DATETIME', + } + + all_columns_exist = True + for col_name, col_type in required_columns.items(): + if col_name in columns: + print_success(f"Column '{col_name}' ({columns[col_name]})") + else: + print_error(f"Column '{col_name}' NOT FOUND") + print_info(" → Run migration: python migrate_add_memories_columns.py") + print_info(" → Or restart the app (auto-migration enabled)") + all_columns_exist = False + + # Check for indexes + cursor.execute("SELECT name FROM sqlite_master WHERE type='index'") + indexes = [row[0] for row in cursor.fetchall()] + + required_indexes = [ + 'ix_images_latitude', + 'ix_images_longitude', + 'ix_images_captured_at', + ] + + print() + for index_name in required_indexes: + if index_name in indexes: + print_success(f"Index '{index_name}'") + else: + print_warning(f"Index '{index_name}' not found (recommended for performance)") + + conn.close() + return all_columns_exist + + except Exception as e: + print_error(f"Database check failed: {e}") + return False + +def check_imports(): + """Check if all modules can be imported""" + print_header("4. Checking Module Imports") + + modules_to_check = [ + 'app.utils.extract_location_metadata', + 'app.utils.memory_clustering', + 'app.routes.memories', + 'app.database.images', + ] + + all_imported = True + + for module_name in modules_to_check: + try: + importlib.import_module(module_name) + print_success(f"{module_name}") + except Exception as e: + print_error(f"{module_name} - {str(e)}") + all_imported = False + + return all_imported + +def check_api_routes(): + """Check if Memories API routes are registered""" + print_header("5. Checking API Routes") + + try: + # Import main app + sys.path.insert(0, str(Path(__file__).parent.parent.parent)) + from main import app + + # Get all routes + routes = [route.path for route in app.routes] + + required_routes = [ + '/api/memories/generate', + '/api/memories/timeline', + '/api/memories/on-this-day', + '/api/memories/locations', + ] + + all_routes_exist = True + for route_path in required_routes: + if route_path in routes: + print_success(f"{route_path}") + else: + print_error(f"{route_path} NOT FOUND") + print_info(" → Check if memories router is included in main.py") + all_routes_exist = False + + return all_routes_exist + + except Exception as e: + print_error(f"Failed to check routes: {e}") + return False + +def print_summary(results): + """Print final summary""" + print_header("Verification Summary") + + all_passed = all(result is not False for result in results.values()) + + for check_name, result in results.items(): + status = "✓ PASS" if result else ("⚠ WARNING" if result is None else "✗ FAIL") + color = Colors.GREEN if result else (Colors.YELLOW if result is None else Colors.RED) + print(f"{color}{status}{Colors.RESET} - {check_name}") + + print() + if all_passed: + print(f"{Colors.BOLD}{Colors.GREEN}🎉 All checks passed! Memories feature is ready to use.{Colors.RESET}") + print_info("Next steps:") + print_info("1. Start the backend: cd backend && ./run.sh") + print_info("2. Run metadata extraction: python -m app.utils.extract_location_metadata") + print_info("3. Test API endpoints: see MEMORIES_TESTING_GUIDE.md") + else: + print(f"{Colors.BOLD}{Colors.RED}❌ Some checks failed. Please fix the issues above.{Colors.RESET}") + print_info("See MEMORIES_README.md for setup instructions") + + print() + +def main(): + """Run all verification checks""" + print(f"\n{Colors.BOLD}PictoPy Memories Feature Verification{Colors.RESET}") + print(f"{Colors.BOLD}====================================={Colors.RESET}") + + results = { + 'Dependencies': check_dependencies(), + 'File Structure': check_file_structure(), + 'Database Schema': check_database_schema(), + 'Module Imports': check_imports(), + 'API Routes': check_api_routes(), + } + + print_summary(results) + +if __name__ == '__main__': + main() diff --git a/backend/extract_metadata_simple.py b/backend/extract_metadata_simple.py new file mode 100644 index 000000000..2cbccda94 --- /dev/null +++ b/backend/extract_metadata_simple.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python3 +""" +Standalone script to extract location data from metadata and update the database. +""" + +import json +import sqlite3 +from pathlib import Path + +# Database path +DB_PATH = Path(__file__).parent / 'app' / 'database' / 'PictoPy.db' + +def extract_and_update(): + """Extract location and datetime from metadata JSON and update database columns.""" + + print("=" * 70) + print("Starting metadata extraction...") + print("=" * 70) + + conn = sqlite3.connect(DB_PATH) + cursor = conn.cursor() + + # Get all images with metadata + cursor.execute("SELECT id, metadata FROM images WHERE metadata IS NOT NULL AND metadata != ''") + images = cursor.fetchall() + + print(f"\nFound {len(images)} images with metadata") + + updated_count = 0 + location_count = 0 + datetime_count = 0 + both_count = 0 + + for image_id, metadata_str in images: + try: + # Parse JSON metadata + metadata = json.loads(metadata_str) + + # Extract values + latitude = metadata.get('latitude') + longitude = metadata.get('longitude') + date_created = metadata.get('date_created') + + has_location = latitude is not None and longitude is not None + has_datetime = date_created is not None + + if has_location or has_datetime: + # Update the database + if has_location and has_datetime: + cursor.execute( + "UPDATE images SET latitude = ?, longitude = ?, captured_at = ? WHERE id = ?", + (latitude, longitude, date_created, image_id) + ) + both_count += 1 + elif has_location: + cursor.execute( + "UPDATE images SET latitude = ?, longitude = ? WHERE id = ?", + (latitude, longitude, image_id) + ) + location_count += 1 + elif has_datetime: + cursor.execute( + "UPDATE images SET captured_at = ? WHERE id = ?", + (date_created, image_id) + ) + datetime_count += 1 + + updated_count += 1 + + # Show progress every 50 images + if updated_count % 50 == 0: + print(f" Processed {updated_count} images...") + + except Exception as e: + print(f" Error processing image {image_id}: {e}") + continue + + # Commit changes + conn.commit() + + # Get final statistics + cursor.execute("SELECT COUNT(*) FROM images WHERE latitude IS NOT NULL") + total_with_location = cursor.fetchone()[0] + + cursor.execute("SELECT COUNT(*) FROM images WHERE captured_at IS NOT NULL") + total_with_datetime = cursor.fetchone()[0] + + cursor.execute("SELECT COUNT(*) FROM images WHERE latitude IS NOT NULL AND captured_at IS NOT NULL") + total_with_both = cursor.fetchone()[0] + + conn.close() + + # Print summary + print("\n" + "=" * 70) + print("METADATA EXTRACTION SUMMARY") + print("=" * 70) + print(f"Total images processed: {len(images)}") + print(f"Images updated: {updated_count}") + print(f"Images with location data: {total_with_location} ({100*total_with_location/len(images):.1f}%)") + print(f"Images with datetime: {total_with_datetime} ({100*total_with_datetime/len(images):.1f}%)") + print(f"Images with both: {total_with_both} ({100*total_with_both/len(images):.1f}%)") + print(f"Images skipped (no data): {len(images) - updated_count}") + print("=" * 70) + print("\n✅ Migration completed successfully!") + print("\nNext steps:") + print(" 1. Start the backend: .env/bin/python3.12 main.py") + print(" 2. Test API: curl -X POST 'http://localhost:8000/api/memories/generate'") + print() + +if __name__ == '__main__': + extract_and_update() diff --git a/backend/main.py b/backend/main.py index 2c1f39e44..39d63579d 100644 --- a/backend/main.py +++ b/backend/main.py @@ -12,7 +12,7 @@ from contextlib import asynccontextmanager from concurrent.futures import ProcessPoolExecutor from app.database.faces import db_create_faces_table -from app.database.images import db_create_images_table +from app.database.images import db_create_images_table, db_migrate_add_memories_columns from app.database.face_clusters import db_create_clusters_table from app.database.yolo_mapping import db_create_YOLO_classes_table from app.database.albums import db_create_albums_table @@ -26,6 +26,7 @@ from app.routes.images import router as images_router from app.routes.face_clusters import router as face_clusters_router from app.routes.user_preferences import router as user_preferences_router +from app.routes.memories import router as memories_router from fastapi.openapi.utils import get_openapi from app.logging.setup_logging import ( configure_uvicorn_logging, @@ -46,6 +47,7 @@ async def lifespan(app: FastAPI): generate_openapi_json() db_create_folders_table() db_create_images_table() + db_migrate_add_memories_columns() # Add Memories columns to existing database db_create_YOLO_classes_table() db_create_clusters_table() # Create clusters table first since faces references it db_create_faces_table() @@ -132,6 +134,10 @@ async def root(): app.include_router( user_preferences_router, prefix="/user-preferences", tags=["User Preferences"] ) +app.include_router(memories_router) # Memories router (prefix already defined in router) + +logger.info("✅ All routes initialized") +logger.info("✅ Memories feature enabled at /api/memories") # Entry point for running with: python3 main.py diff --git a/backend/migrate_add_memories_columns.py b/backend/migrate_add_memories_columns.py new file mode 100644 index 000000000..bd2c47c2c --- /dev/null +++ b/backend/migrate_add_memories_columns.py @@ -0,0 +1,217 @@ +""" +One-time migration script to add Memories feature columns. +Run this ONCE after pulling the new code. + +This script adds: +- latitude (REAL) column +- longitude (REAL) column +- captured_at (DATETIME) column +- Performance indexes for these columns + +Usage: + cd backend + python migrate_add_memories_columns.py +""" + +import sqlite3 +from pathlib import Path +import sys + +# ANSI color codes for terminal output +class Colors: + GREEN = '\033[92m' + RED = '\033[91m' + YELLOW = '\033[93m' + BLUE = '\033[94m' + BOLD = '\033[1m' + RESET = '\033[0m' + +DATABASE_PATH = Path(__file__).parent / 'app' / 'database' / 'PictoPy.db' + +def print_header(text): + """Print section header""" + print(f"\n{Colors.BOLD}{Colors.BLUE}{'='*70}{Colors.RESET}") + print(f"{Colors.BOLD}{Colors.BLUE}{text}{Colors.RESET}") + print(f"{Colors.BOLD}{Colors.BLUE}{'='*70}{Colors.RESET}\n") + +def print_success(text): + """Print success message""" + print(f"{Colors.GREEN}✓ {text}{Colors.RESET}") + +def print_error(text): + """Print error message""" + print(f"{Colors.RED}✗ {text}{Colors.RESET}") + +def print_info(text): + """Print info message""" + print(f" {text}") + +def check_database_exists(): + """Check if database file exists""" + if not DATABASE_PATH.exists(): + print_error(f"Database not found at: {DATABASE_PATH}") + print_info("The database will be created when you first run the app.") + print_info("Run this migration script AFTER the database is created.") + return False + + print_success(f"Database found at: {DATABASE_PATH}") + return True + +def check_images_table(cursor): + """Check if images table exists""" + cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='images'") + if not cursor.fetchone(): + print_error("Table 'images' does not exist") + print_info("Run the app first to create the database schema.") + return False + + print_success("Table 'images' exists") + return True + +def get_existing_columns(cursor): + """Get list of existing columns in images table""" + cursor.execute("PRAGMA table_info(images)") + columns = {row[1]: row[2] for row in cursor.fetchall()} + return columns + +def add_columns(cursor): + """Add new columns if they don't exist""" + print_header("Adding Memories Feature Columns") + + columns = get_existing_columns(cursor) + changes_made = False + + # Add latitude column + if 'latitude' not in columns: + print_info("Adding column: latitude (REAL)") + cursor.execute("ALTER TABLE images ADD COLUMN latitude REAL") + print_success("Column 'latitude' added") + changes_made = True + else: + print_success(f"Column 'latitude' already exists ({columns['latitude']})") + + # Add longitude column + if 'longitude' not in columns: + print_info("Adding column: longitude (REAL)") + cursor.execute("ALTER TABLE images ADD COLUMN longitude REAL") + print_success("Column 'longitude' added") + changes_made = True + else: + print_success(f"Column 'longitude' already exists ({columns['longitude']})") + + # Add captured_at column + if 'captured_at' not in columns: + print_info("Adding column: captured_at (DATETIME)") + cursor.execute("ALTER TABLE images ADD COLUMN captured_at DATETIME") + print_success("Column 'captured_at' added") + changes_made = True + else: + print_success(f"Column 'captured_at' already exists ({columns['captured_at']})") + + return changes_made + +def create_indexes(cursor): + """Create indexes for performance""" + print_header("Creating Performance Indexes") + + indexes = [ + ("ix_images_latitude", "CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)"), + ("ix_images_longitude", "CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)"), + ("ix_images_captured_at", "CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)"), + ("ix_images_favourite_captured_at", "CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)"), + ] + + for index_name, sql in indexes: + cursor.execute(sql) + print_success(f"Index '{index_name}' created") + +def show_final_schema(cursor): + """Display final table schema""" + print_header("Final 'images' Table Schema") + + cursor.execute("PRAGMA table_info(images)") + print(f"\n{Colors.BOLD}Columns:{Colors.RESET}") + for row in cursor.fetchall(): + col_id, col_name, col_type, not_null, default, pk = row + nullable = "NOT NULL" if not_null else "NULL" + primary = " PRIMARY KEY" if pk else "" + print(f" {col_name:<20} {col_type:<15} {nullable:<10}{primary}") + + cursor.execute("SELECT name FROM sqlite_master WHERE type='index' AND tbl_name='images'") + indexes = cursor.fetchall() + print(f"\n{Colors.BOLD}Indexes:{Colors.RESET}") + for index in indexes: + print(f" - {index[0]}") + print() + +def migrate(): + """Run the migration""" + print_header("PictoPy Memories Feature - Database Migration") + + # Check database exists + if not check_database_exists(): + sys.exit(1) + + conn = None + try: + # Connect to database + print_info("Connecting to database...") + conn = sqlite3.connect(DATABASE_PATH) + cursor = conn.cursor() + print_success("Connected successfully") + + # Check images table exists + if not check_images_table(cursor): + sys.exit(1) + + # Add columns + changes_made = add_columns(cursor) + + # Create indexes + create_indexes(cursor) + + # Commit changes + conn.commit() + + # Show final schema + show_final_schema(cursor) + + # Summary + print_header("Migration Summary") + if changes_made: + print(f"{Colors.BOLD}{Colors.GREEN}✅ Migration completed successfully!{Colors.RESET}\n") + print_info("New columns added to 'images' table:") + print_info(" - latitude (REAL)") + print_info(" - longitude (REAL)") + print_info(" - captured_at (DATETIME)") + print_info("") + print_info("Performance indexes created for fast queries.") + else: + print(f"{Colors.BOLD}{Colors.GREEN}✅ Database is already up to date!{Colors.RESET}\n") + print_info("All required columns and indexes already exist.") + + print(f"\n{Colors.BOLD}Next Steps:{Colors.RESET}") + print_info("1. Run metadata extraction: python -m app.utils.extract_location_metadata") + print_info("2. Verify setup: python -m app.utils.verify_memories_setup") + print_info("3. Start the backend: ./run.sh") + print() + + except sqlite3.Error as e: + print_error(f"SQLite error: {e}") + if conn: + conn.rollback() + sys.exit(1) + + except Exception as e: + print_error(f"Unexpected error: {e}") + if conn: + conn.rollback() + sys.exit(1) + + finally: + if conn: + conn.close() + print_info("Database connection closed") + +if __name__ == '__main__': + migrate() diff --git a/backend/requirements.txt b/backend/requirements.txt index b848d7ad6..2dfa77965 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -31,7 +31,7 @@ mkdocs-material==9.6.16 mkdocs-material-extensions==1.3.1 mkdocs-swagger-ui-tag==0.7.1 mpmath==1.3.0 -numpy==1.26.4 +numpy==1.26.4 # Required for Memories feature: GPS calculations and array operations onnxruntime==1.17.1 opencv-python==4.9.0.80 orjson==3.10.3 @@ -46,7 +46,7 @@ python-dotenv==1.0.1 python-multipart==0.0.9 PyYAML==6.0.1 rich==13.7.1 -scikit-learn==1.5.1 +scikit-learn==1.5.1 # Required for Memories feature: DBSCAN spatial clustering algorithm scipy==1.14.0 shellingham==1.5.4 sniffio==1.3.1 diff --git a/backend/test_memories_api.py b/backend/test_memories_api.py new file mode 100644 index 000000000..aca15eeb3 --- /dev/null +++ b/backend/test_memories_api.py @@ -0,0 +1,167 @@ +""" +Test script for Memories API endpoints + +This script tests all the Memories feature endpoints to verify they're working correctly. + +Usage: + python test_memories_api.py +""" + +import requests +import json +from typing import Dict, Any + +BASE_URL = "http://localhost:8000/api/memories" + + +def print_response(endpoint: str, response: requests.Response): + """Pretty print API response.""" + print("\n" + "="*70) + print(f"🔍 Testing: {endpoint}") + print("="*70) + print(f"Status Code: {response.status_code}") + + if response.status_code == 200: + print("✅ SUCCESS") + data = response.json() + print("\nResponse Preview:") + print(json.dumps(data, indent=2)[:500] + "...") + else: + print("❌ FAILED") + print(f"Error: {response.text}") + print("="*70) + + +def test_generate_memories(): + """Test POST /api/memories/generate""" + print("\n🚀 Testing: Generate Memories") + + response = requests.post( + f"{BASE_URL}/generate", + params={ + "location_radius_km": 5.0, + "date_tolerance_days": 3, + "min_images": 2 + } + ) + + print_response("POST /api/memories/generate", response) + + if response.status_code == 200: + data = response.json() + print(f"\n📊 Summary:") + print(f" - Memory Count: {data.get('memory_count', 0)}") + print(f" - Image Count: {data.get('image_count', 0)}") + print(f" - Message: {data.get('message', 'N/A')}") + + +def test_timeline(): + """Test GET /api/memories/timeline""" + print("\n🚀 Testing: Timeline") + + response = requests.get( + f"{BASE_URL}/timeline", + params={ + "days": 30, + "location_radius_km": 5.0, + "date_tolerance_days": 3 + } + ) + + print_response("GET /api/memories/timeline", response) + + if response.status_code == 200: + data = response.json() + print(f"\n📊 Summary:") + print(f" - Memory Count: {data.get('memory_count', 0)}") + print(f" - Date Range: {data.get('date_range', {})}") + + +def test_on_this_day(): + """Test GET /api/memories/on-this-day""" + print("\n🚀 Testing: On This Day") + + response = requests.get(f"{BASE_URL}/on-this-day") + + print_response("GET /api/memories/on-this-day", response) + + if response.status_code == 200: + data = response.json() + print(f"\n📊 Summary:") + print(f" - Today: {data.get('today', 'N/A')}") + print(f" - Years Found: {data.get('years', [])}") + print(f" - Image Count: {data.get('image_count', 0)}") + + +def test_locations(): + """Test GET /api/memories/locations""" + print("\n🚀 Testing: Locations") + + response = requests.get( + f"{BASE_URL}/locations", + params={ + "location_radius_km": 5.0, + "max_sample_images": 3 + } + ) + + print_response("GET /api/memories/locations", response) + + if response.status_code == 200: + data = response.json() + print(f"\n📊 Summary:") + print(f" - Location Count: {data.get('location_count', 0)}") + if data.get('locations'): + print(f" - Top Location: {data['locations'][0].get('location_name', 'N/A')}") + print(f" - Photos at Top Location: {data['locations'][0].get('image_count', 0)}") + + +def check_server(): + """Check if the server is running.""" + try: + response = requests.get("http://localhost:8000/health", timeout=2) + if response.status_code == 200: + print("✅ Server is running!") + return True + else: + print("⚠️ Server responded but with unexpected status") + return False + except requests.exceptions.ConnectionError: + print("❌ Server is not running!") + print("\n💡 Start the server with:") + print(" cd /Users/harshit/Code/pictopy/PictoPy/backend") + print(" python main.py") + return False + + +def main(): + """Run all tests.""" + print("\n" + "🎯 " * 20) + print(" MEMORIES API TEST SUITE") + print("🎯 " * 20 + "\n") + + # Check if server is running + if not check_server(): + return + + print("\n⏳ Running all tests...\n") + + try: + # Run all tests + test_generate_memories() + test_timeline() + test_on_this_day() + test_locations() + + print("\n" + "✅ " * 20) + print(" ALL TESTS COMPLETED!") + print("✅ " * 20 + "\n") + + except Exception as e: + print(f"\n❌ Test failed with error: {e}") + import traceback + traceback.print_exc() + + +if __name__ == "__main__": + main() From 109b43895918f662f9df64aefa79935f372cea1d Mon Sep 17 00:00:00 2001 From: harshit Date: Sun, 14 Dec 2025 04:24:50 +0530 Subject: [PATCH 02/22] package json files --- docs/backend/backend_python/openapi.json | 563 ++++++++++++++++++++++- frontend/package-lock.json | 14 - 2 files changed, 560 insertions(+), 17 deletions(-) diff --git a/docs/backend/backend_python/openapi.json b/docs/backend/backend_python/openapi.json index 44eb908b1..5578c3471 100644 --- a/docs/backend/backend_python/openapi.json +++ b/docs/backend/backend_python/openapi.json @@ -1117,9 +1117,14 @@ "in": "query", "required": false, "schema": { - "$ref": "#/components/schemas/InputType", + "allOf": [ + { + "$ref": "#/components/schemas/InputType" + } + ], "description": "Choose input type: 'path' or 'base64'", - "default": "path" + "default": "path", + "title": "Input Type" }, "description": "Choose input type: 'path' or 'base64'" } @@ -1299,6 +1304,242 @@ } } } + }, + "/api/memories/generate": { + "post": { + "tags": [ + "memories" + ], + "summary": "Generate Memories", + "description": "Generate memories from all images with location data.\n\nThis endpoint:\n1. Fetches all images that have GPS coordinates\n2. Clusters them by location using DBSCAN\n3. Within each location, clusters by date\n4. Returns memory objects with metadata\n\nArgs:\n location_radius_km: Maximum distance between photos in same location (default: 5km)\n date_tolerance_days: Maximum days between photos in same memory (default: 3)\n min_images: Minimum images required to form a memory (default: 2)\n \nReturns:\n GenerateMemoriesResponse with list of generated memories\n \nRaises:\n HTTPException: If database query fails or clustering fails", + "operationId": "generate_memories_api_memories_generate_post", + "parameters": [ + { + "name": "location_radius_km", + "in": "query", + "required": false, + "schema": { + "type": "number", + "maximum": 100.0, + "minimum": 0.1, + "description": "Location clustering radius in km", + "default": 5.0, + "title": "Location Radius Km" + }, + "description": "Location clustering radius in km" + }, + { + "name": "date_tolerance_days", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 30, + "minimum": 1, + "description": "Date tolerance in days", + "default": 3, + "title": "Date Tolerance Days" + }, + "description": "Date tolerance in days" + }, + { + "name": "min_images", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 10, + "minimum": 1, + "description": "Minimum images per memory", + "default": 2, + "title": "Min Images" + }, + "description": "Minimum images per memory" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GenerateMemoriesResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/api/memories/timeline": { + "get": { + "tags": [ + "memories" + ], + "summary": "Get Timeline", + "description": "Get memories from the past N days as a timeline.\n\nThis endpoint:\n1. Calculates date range (today - N days to today)\n2. Fetches images within that date range\n3. Clusters them into memories\n4. Returns timeline of memories\n\nArgs:\n days: Number of days to look back (default: 365 = 1 year)\n location_radius_km: Location clustering radius (default: 5km)\n date_tolerance_days: Date tolerance for temporal clustering (default: 3)\n \nReturns:\n TimelineResponse with memories ordered by date\n \nRaises:\n HTTPException: If database query fails", + "operationId": "get_timeline_api_memories_timeline_get", + "parameters": [ + { + "name": "days", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 3650, + "minimum": 1, + "description": "Number of days to look back", + "default": 365, + "title": "Days" + }, + "description": "Number of days to look back" + }, + { + "name": "location_radius_km", + "in": "query", + "required": false, + "schema": { + "type": "number", + "maximum": 100.0, + "minimum": 0.1, + "description": "Location clustering radius in km", + "default": 5.0, + "title": "Location Radius Km" + }, + "description": "Location clustering radius in km" + }, + { + "name": "date_tolerance_days", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 30, + "minimum": 1, + "description": "Date tolerance in days", + "default": 3, + "title": "Date Tolerance Days" + }, + "description": "Date tolerance in days" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TimelineResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/api/memories/on-this-day": { + "get": { + "tags": [ + "memories" + ], + "summary": "Get On This Day", + "description": "Get photos taken on this date in previous years.\n\nThis endpoint:\n1. Gets current month and day\n2. Searches for images from this month-day in all previous years\n3. Groups by year\n4. Returns images sorted by year (most recent first)\n\nReturns:\n OnThisDayResponse with images from this date in previous years\n \nRaises:\n HTTPException: If database query fails", + "operationId": "get_on_this_day_api_memories_on_this_day_get", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OnThisDayResponse" + } + } + } + } + } + } + }, + "/api/memories/locations": { + "get": { + "tags": [ + "memories" + ], + "summary": "Get Locations", + "description": "Get all unique locations where photos were taken.\n\nThis endpoint:\n1. Fetches all images with GPS coordinates\n2. Clusters them by location\n3. Returns location clusters with photo counts\n4. Includes sample images for each location\n\nArgs:\n location_radius_km: Location clustering radius (default: 5km)\n max_sample_images: Maximum sample images per location (default: 5)\n \nReturns:\n LocationsResponse with list of location clusters\n \nRaises:\n HTTPException: If database query fails", + "operationId": "get_locations_api_memories_locations_get", + "parameters": [ + { + "name": "location_radius_km", + "in": "query", + "required": false, + "schema": { + "type": "number", + "maximum": 100.0, + "minimum": 0.1, + "description": "Location clustering radius in km", + "default": 5.0, + "title": "Location Radius Km" + }, + "description": "Location clustering radius in km" + }, + { + "name": "max_sample_images", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 20, + "minimum": 1, + "description": "Max sample images per location", + "default": 5, + "title": "Max Sample Images" + }, + "description": "Max sample images per location" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LocationsResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } } }, "components": { @@ -1692,6 +1933,43 @@ ], "title": "FolderDetails" }, + "GenerateMemoriesResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success" + }, + "message": { + "type": "string", + "title": "Message" + }, + "memory_count": { + "type": "integer", + "title": "Memory Count" + }, + "image_count": { + "type": "integer", + "title": "Image Count" + }, + "memories": { + "items": { + "$ref": "#/components/schemas/Memory" + }, + "type": "array", + "title": "Memories" + } + }, + "type": "object", + "required": [ + "success", + "message", + "memory_count", + "image_count", + "memories" + ], + "title": "GenerateMemoriesResponse", + "description": "Response for generate memories endpoint." + }, "GetAlbumImagesRequest": { "properties": { "password": { @@ -2199,7 +2477,6 @@ "metadata": { "anyOf": [ { - "additionalProperties": true, "type": "object" }, { @@ -2262,6 +2539,211 @@ ], "title": "InputType" }, + "LocationCluster": { + "properties": { + "location_name": { + "type": "string", + "title": "Location Name" + }, + "center_lat": { + "type": "number", + "title": "Center Lat" + }, + "center_lon": { + "type": "number", + "title": "Center Lon" + }, + "image_count": { + "type": "integer", + "title": "Image Count" + }, + "sample_images": { + "items": { + "$ref": "#/components/schemas/MemoryImage" + }, + "type": "array", + "title": "Sample Images" + } + }, + "type": "object", + "required": [ + "location_name", + "center_lat", + "center_lon", + "image_count", + "sample_images" + ], + "title": "LocationCluster", + "description": "Location cluster with photo count." + }, + "LocationsResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success" + }, + "location_count": { + "type": "integer", + "title": "Location Count" + }, + "locations": { + "items": { + "$ref": "#/components/schemas/LocationCluster" + }, + "type": "array", + "title": "Locations" + } + }, + "type": "object", + "required": [ + "success", + "location_count", + "locations" + ], + "title": "LocationsResponse", + "description": "Response for locations endpoint." + }, + "Memory": { + "properties": { + "memory_id": { + "type": "string", + "title": "Memory Id" + }, + "title": { + "type": "string", + "title": "Title" + }, + "description": { + "type": "string", + "title": "Description" + }, + "location_name": { + "type": "string", + "title": "Location Name" + }, + "date_start": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Date Start" + }, + "date_end": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Date End" + }, + "image_count": { + "type": "integer", + "title": "Image Count" + }, + "images": { + "items": { + "$ref": "#/components/schemas/MemoryImage" + }, + "type": "array", + "title": "Images" + }, + "thumbnail_image_id": { + "type": "string", + "title": "Thumbnail Image Id" + }, + "center_lat": { + "type": "number", + "title": "Center Lat" + }, + "center_lon": { + "type": "number", + "title": "Center Lon" + } + }, + "type": "object", + "required": [ + "memory_id", + "title", + "description", + "location_name", + "date_start", + "date_end", + "image_count", + "images", + "thumbnail_image_id", + "center_lat", + "center_lon" + ], + "title": "Memory", + "description": "Memory object containing grouped images." + }, + "MemoryImage": { + "properties": { + "id": { + "type": "string", + "title": "Id" + }, + "path": { + "type": "string", + "title": "Path" + }, + "thumbnailPath": { + "type": "string", + "title": "Thumbnailpath" + }, + "latitude": { + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "title": "Latitude" + }, + "longitude": { + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "title": "Longitude" + }, + "captured_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Captured At" + } + }, + "type": "object", + "required": [ + "id", + "path", + "thumbnailPath", + "latitude", + "longitude", + "captured_at" + ], + "title": "MemoryImage", + "description": "Image within a memory." + }, "MetadataModel": { "properties": { "name": { @@ -2345,6 +2827,46 @@ ], "title": "MetadataModel" }, + "OnThisDayResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success" + }, + "today": { + "type": "string", + "title": "Today" + }, + "years": { + "items": { + "type": "integer" + }, + "type": "array", + "title": "Years" + }, + "image_count": { + "type": "integer", + "title": "Image Count" + }, + "images": { + "items": { + "$ref": "#/components/schemas/MemoryImage" + }, + "type": "array", + "title": "Images" + } + }, + "type": "object", + "required": [ + "success", + "today", + "years", + "image_count", + "images" + ], + "title": "OnThisDayResponse", + "description": "Response for on-this-day endpoint." + }, "RenameClusterData": { "properties": { "cluster_id": { @@ -2546,6 +3068,41 @@ ], "title": "SyncFolderResponse" }, + "TimelineResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success" + }, + "date_range": { + "additionalProperties": { + "type": "string" + }, + "type": "object", + "title": "Date Range" + }, + "memory_count": { + "type": "integer", + "title": "Memory Count" + }, + "memories": { + "items": { + "$ref": "#/components/schemas/Memory" + }, + "type": "array", + "title": "Memories" + } + }, + "type": "object", + "required": [ + "success", + "date_range", + "memory_count", + "memories" + ], + "title": "TimelineResponse", + "description": "Response for timeline endpoint." + }, "ToggleFavouriteRequest": { "properties": { "image_id": { diff --git a/frontend/package-lock.json b/frontend/package-lock.json index e1e1ddd5f..ab218ecaf 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -14476,20 +14476,6 @@ "dev": true, "license": "ISC" }, - "node_modules/yaml": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz", - "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==", - "license": "ISC", - "optional": true, - "peer": true, - "bin": { - "yaml": "bin.mjs" - }, - "engines": { - "node": ">= 14.6" - } - }, "node_modules/yargs": { "version": "17.7.2", "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", From 7f7012f6ea01bca9da8fd9782677599c642cd206 Mon Sep 17 00:00:00 2001 From: harshit Date: Sun, 14 Dec 2025 09:49:03 +0530 Subject: [PATCH 03/22] feat: enhance Memories UI with improved titles and event bubbling fix MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✨ Features: - Display 'On this day last year' for memories from exactly 1 year ago - Format location-based memories as 'Trip to [Location], [Year]' (e.g., 'Trip to Jaipur, 2025') - Fix MediaView slideshow and info buttons not working in memory albums 🐛 Bug Fixes: - Fixed event bubbling issue where MediaView control clicks closed the entire viewer - Conditionally render MemoryViewer backdrop only when MediaView is closed - Prevent click handlers from interfering with MediaView controls 🎨 UI Improvements: - Enhanced FeaturedMemoryCard with contextual year display - Updated MemoryCard title formatting for better location context - Improved memory viewing experience with proper z-index layering 📦 Technical Changes: - Backend: Added reverse geocoding for location names in memory clustering - Backend: Fixed latitude/longitude handling for images without GPS data - Frontend: Refactored MemoryViewer JSX structure for proper conditional rendering - Frontend: Integrated MediaView component with full zoom/slideshow/info functionality This commit completes the Memories feature implementation with Google Photos-style presentation and fixes critical UX issues with the image viewer controls. --- backend/app/database/images.py | 70 ++- backend/app/routes/memories.py | 51 +- backend/app/utils/images.py | 50 +- backend/app/utils/memory_clustering.py | 484 ++++++++++++++++-- backend/test_auto_gps_extraction.py | 85 +++ docs/backend/backend_python/openapi.json | 2 +- frontend/src/app/store.ts | 2 + .../Memories/FeaturedMemoryCard.tsx | 154 ++++++ .../src/components/Memories/MemoriesPage.tsx | 434 ++++++++++++++++ .../src/components/Memories/MemoryCard.tsx | 157 ++++++ .../src/components/Memories/MemoryViewer.tsx | 299 +++++++++++ frontend/src/components/Memories/index.ts | 14 + frontend/src/routes/AppRoutes.tsx | 3 +- frontend/src/services/memoriesApi.ts | 470 +++++++++++++++++ frontend/src/store/hooks.ts | 14 + frontend/src/store/slices/memoriesSlice.ts | 355 +++++++++++++ frontend/tsconfig.json | 1 + 17 files changed, 2565 insertions(+), 80 deletions(-) create mode 100644 backend/test_auto_gps_extraction.py create mode 100644 frontend/src/components/Memories/FeaturedMemoryCard.tsx create mode 100644 frontend/src/components/Memories/MemoriesPage.tsx create mode 100644 frontend/src/components/Memories/MemoryCard.tsx create mode 100644 frontend/src/components/Memories/MemoryViewer.tsx create mode 100644 frontend/src/components/Memories/index.ts create mode 100644 frontend/src/services/memoriesApi.ts create mode 100644 frontend/src/store/hooks.ts create mode 100644 frontend/src/store/slices/memoriesSlice.ts diff --git a/backend/app/database/images.py b/backend/app/database/images.py index 25de965e5..11f7eee0f 100644 --- a/backend/app/database/images.py +++ b/backend/app/database/images.py @@ -176,8 +176,8 @@ def db_bulk_insert_images(image_records: List[ImageRecord]) -> bool: try: cursor.executemany( """ - INSERT INTO images (id, path, folder_id, thumbnailPath, metadata, isTagged) - VALUES (:id, :path, :folder_id, :thumbnailPath, :metadata, :isTagged) + INSERT INTO images (id, path, folder_id, thumbnailPath, metadata, isTagged, latitude, longitude, captured_at) + VALUES (:id, :path, :folder_id, :thumbnailPath, :metadata, :isTagged, :latitude, :longitude, :captured_at) ON CONFLICT(path) DO UPDATE SET folder_id=excluded.folder_id, thumbnailPath=excluded.thumbnailPath, @@ -185,7 +185,10 @@ def db_bulk_insert_images(image_records: List[ImageRecord]) -> bool: isTagged=CASE WHEN excluded.isTagged THEN 1 ELSE images.isTagged - END + END, + latitude=COALESCE(excluded.latitude, images.latitude), + longitude=COALESCE(excluded.longitude, images.longitude), + captured_at=COALESCE(excluded.captured_at, images.captured_at) """, image_records, ) @@ -804,6 +807,67 @@ def db_get_images_with_location() -> List[dict]: return images + except Exception as e: + logger.error(f"Error fetching images with location: {e}") + return [] + finally: + conn.close() + + +def db_get_all_images_for_memories() -> List[dict]: + """ + Get ALL images that can be used for memories (with OR without GPS). + Includes images with timestamps for date-based memories. + + Returns: + List of all image dictionaries (both GPS and non-GPS images) + """ + conn = _connect() + cursor = conn.cursor() + + try: + cursor.execute(""" + SELECT + i.id, + i.path, + i.folder_id, + i.thumbnailPath, + i.metadata, + i.isTagged, + i.isFavourite, + i.latitude, + i.longitude, + i.captured_at, + GROUP_CONCAT(m.name, ',') as tags + FROM images i + LEFT JOIN image_classes ic ON i.id = ic.image_id + LEFT JOIN mappings m ON ic.class_id = m.class_id + GROUP BY i.id + ORDER BY i.captured_at DESC + """) + + results = cursor.fetchall() + + images = [] + for row in results: + from app.utils.images import image_util_parse_metadata + + images.append({ + "id": row[0], + "path": row[1], + "folder_id": str(row[2]) if row[2] else None, + "thumbnailPath": row[3], + "metadata": image_util_parse_metadata(row[4]), + "isTagged": bool(row[5]), + "isFavourite": bool(row[6]), + "latitude": row[7] if row[7] else None, # Can be None + "longitude": row[8] if row[8] else None, # Can be None + "captured_at": row[9] if row[9] else None, + "tags": row[10].split(',') if row[10] else None + }) + + return images + except Exception as e: logger.error(f"Error getting images with location: {e}") return [] diff --git a/backend/app/routes/memories.py b/backend/app/routes/memories.py index 24a218828..dd29447ac 100644 --- a/backend/app/routes/memories.py +++ b/backend/app/routes/memories.py @@ -116,46 +116,32 @@ async def generate_memories( min_images: int = Query(2, ge=1, le=10, description="Minimum images per memory") ): """ - Generate memories from all images with location data. + SIMPLIFIED: Generate memories from ALL images. + - GPS images → location-based memories + - Non-GPS images → monthly date-based memories - This endpoint: - 1. Fetches all images that have GPS coordinates - 2. Clusters them by location using DBSCAN - 3. Within each location, clusters by date - 4. Returns memory objects with metadata - - Args: - location_radius_km: Maximum distance between photos in same location (default: 5km) - date_tolerance_days: Maximum days between photos in same memory (default: 3) - min_images: Minimum images required to form a memory (default: 2) - - Returns: - GenerateMemoriesResponse with list of generated memories - - Raises: - HTTPException: If database query fails or clustering fails + Returns simple breakdown: {location_count, date_count, total} """ try: - logger.info("Generating memories with params: " - f"radius={location_radius_km}km, " - f"date_tolerance={date_tolerance_days}days, " - f"min_images={min_images}") + logger.info(f"Generating memories: radius={location_radius_km}km, " + f"date_tolerance={date_tolerance_days}days, min_images={min_images}") - # Fetch all images with location data - images = db_get_images_with_location() + # Fetch ALL images + from app.database.images import db_get_all_images_for_memories + images = db_get_all_images_for_memories() if not images: return GenerateMemoriesResponse( success=True, - message="No images with location data found", + message="No images found", memory_count=0, image_count=0, memories=[] ) - logger.info(f"Found {len(images)} images with location data") + logger.info(f"Processing {len(images)} images") - # Cluster images into memories + # Cluster into memories clustering = MemoryClustering( location_radius_km=location_radius_km, date_tolerance_days=date_tolerance_days, @@ -164,19 +150,24 @@ async def generate_memories( memories = clustering.cluster_memories(images) - logger.info(f"Generated {len(memories)} memories") + # Calculate breakdown + location_count = sum(1 for m in memories if m.get('type') == 'location') + date_count = sum(1 for m in memories if m.get('type') == 'date') + + logger.info(f"Generated {len(memories)} memories " + f"(location: {location_count}, date: {date_count})") return GenerateMemoriesResponse( success=True, - message=f"Successfully generated {len(memories)} memories from {len(images)} images", + message=f"{len(memories)} memories ({location_count} location, {date_count} date)", memory_count=len(memories), image_count=len(images), memories=memories ) except Exception as e: - logger.error(f"Error generating memories: {e}") - raise HTTPException(status_code=500, detail=f"Failed to generate memories: {str(e)}") + logger.error(f"Error generating memories: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) @router.get("/timeline", response_model=TimelineResponse) diff --git a/backend/app/utils/images.py b/backend/app/utils/images.py index c3b202205..c7b91a8c6 100644 --- a/backend/app/utils/images.py +++ b/backend/app/utils/images.py @@ -3,7 +3,7 @@ import datetime import json import logging -from typing import List, Tuple, Dict, Any, Mapping +from typing import List, Tuple, Dict, Any, Mapping, Optional from PIL import Image, ExifTags from pathlib import Path @@ -19,6 +19,7 @@ from app.models.FaceDetector import FaceDetector from app.models.ObjectClassifier import ObjectClassifier from app.logging.setup_logging import get_logger +from app.utils.extract_location_metadata import MetadataExtractor logger = get_logger(__name__) @@ -141,6 +142,7 @@ def image_util_prepare_image_records( ) -> List[Dict]: """ Prepare image records with thumbnails for database insertion. + Automatically extracts GPS coordinates and capture datetime from metadata. Args: image_files: List of image file paths @@ -150,6 +152,8 @@ def image_util_prepare_image_records( List of image record dictionaries ready for database insertion """ image_records = [] + extractor = MetadataExtractor() + for image_path in image_files: folder_id = image_util_find_folder_id_for_image(image_path, folder_path_to_id) @@ -166,16 +170,40 @@ def image_util_prepare_image_records( if image_util_generate_thumbnail(image_path, thumbnail_path): metadata = image_util_extract_metadata(image_path) logger.debug(f"Extracted metadata for {image_path}: {metadata}") - image_records.append( - { - "id": image_id, - "path": image_path, - "folder_id": folder_id, - "thumbnailPath": thumbnail_path, - "metadata": json.dumps(metadata), - "isTagged": False, - } - ) + + # Automatically extract GPS coordinates and datetime from metadata + # Don't fail upload if extraction fails + metadata_json = json.dumps(metadata) + latitude, longitude, captured_at = None, None, None + + try: + latitude, longitude, captured_at = extractor.extract_all(metadata_json) + + # Log GPS extraction results + if latitude and longitude: + logger.info(f"GPS extracted for {os.path.basename(image_path)}: ({latitude}, {longitude})") + if captured_at: + logger.debug(f"Date extracted for {os.path.basename(image_path)}: {captured_at}") + except Exception as e: + logger.warning(f"GPS extraction failed for {os.path.basename(image_path)}: {e}") + # Continue without GPS - don't fail the upload + + # Build image record with GPS data + # ALWAYS include latitude, longitude, captured_at (even if None) + # to satisfy SQL INSERT statement named parameters + image_record = { + "id": image_id, + "path": image_path, + "folder_id": folder_id, + "thumbnailPath": thumbnail_path, + "metadata": metadata_json, + "isTagged": False, + "latitude": latitude, # Can be None + "longitude": longitude, # Can be None + "captured_at": captured_at.isoformat() if isinstance(captured_at, datetime.datetime) and captured_at else captured_at, # Can be None + } + + image_records.append(image_record) return image_records diff --git a/backend/app/utils/memory_clustering.py b/backend/app/utils/memory_clustering.py index 42ac0e4d5..cfa7cdc31 100644 --- a/backend/app/utils/memory_clustering.py +++ b/backend/app/utils/memory_clustering.py @@ -25,6 +25,84 @@ logger = get_logger(__name__) +# ============================================================================ +# City Coordinate Mapping for Reverse Geocoding +# ============================================================================ + +# Major city coordinates for approximate reverse geocoding +CITY_COORDINATES = { + # India - Major Cities + "Jaipur, Rajasthan": (26.9124, 75.7873), + "Delhi, India": (28.7041, 77.1025), + "Mumbai, Maharashtra": (19.0760, 72.8777), + "Bangalore, Karnataka": (12.9716, 77.5946), + "Kolkata, West Bengal": (22.5726, 88.3639), + "Chennai, Tamil Nadu": (13.0827, 80.2707), + "Hyderabad, Telangana": (17.3850, 78.4867), + "Pune, Maharashtra": (18.5204, 73.8567), + "Ahmedabad, Gujarat": (23.0225, 72.5714), + "Goa, India": (15.2993, 74.1240), + "Agra, Uttar Pradesh": (27.1767, 78.0081), + "Udaipur, Rajasthan": (24.5854, 73.7125), + "Jaisalmer, Rajasthan": (26.9157, 70.9083), + "Varanasi, Uttar Pradesh": (25.3176, 82.9739), + "Rishikesh, Uttarakhand": (30.0869, 78.2676), + "Shimla, Himachal Pradesh": (31.1048, 77.1734), + "Manali, Himachal Pradesh": (32.2432, 77.1892), + "Darjeeling, West Bengal": (27.0410, 88.2663), + "Ooty, Tamil Nadu": (11.4102, 76.6950), + "Coorg, Karnataka": (12.3375, 75.8069), + + # International - Major Tourist Destinations + "Paris, France": (48.8566, 2.3522), + "London, UK": (51.5074, -0.1278), + "New York, USA": (40.7128, -74.0060), + "Tokyo, Japan": (35.6762, 139.6503), + "Dubai, UAE": (25.2048, 55.2708), + "Singapore": (1.3521, 103.8198), + "Bangkok, Thailand": (13.7563, 100.5018), + "Bali, Indonesia": (-8.4095, 115.1889), + "Sydney, Australia": (-33.8688, 151.2093), + "Rome, Italy": (41.9028, 12.4964), +} + + +def find_nearest_city(latitude: float, longitude: float, max_distance_km: float = 50.0) -> Optional[str]: + """ + Find the nearest known city to given coordinates. + + Args: + latitude: GPS latitude + longitude: GPS longitude + max_distance_km: Maximum distance to consider (default: 50km) + + Returns: + City name if within range, None otherwise + """ + from math import radians, cos, sin, asin, sqrt + + def haversine_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> float: + """Calculate distance between two points in km using Haversine formula.""" + lat1, lon1, lat2, lon2 = map(radians, [lat1, lon1, lat2, lon2]) + dlat = lat2 - lat1 + dlon = lon2 - lon1 + a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2 + c = 2 * asin(sqrt(a)) + km = 6371 * c # Radius of Earth in km + return km + + nearest_city = None + min_distance = float('inf') + + for city_name, (city_lat, city_lon) in CITY_COORDINATES.items(): + distance = haversine_distance(latitude, longitude, city_lat, city_lon) + if distance < min_distance and distance <= max_distance_km: + min_distance = distance + nearest_city = city_name + + return nearest_city + + class MemoryClustering: """ Clusters images into memories based on location and time proximity. @@ -61,38 +139,232 @@ def __init__( def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: """ - Main entry point: Cluster images into memories. + FLEXIBLE: Cluster ALL images into memories. + - Has GPS + Date: Cluster by location using DBSCAN, then by date within each location + - Has GPS only: Cluster by location using DBSCAN + - Has Date only: Group by month (if ≥5 photos per month) + - Has neither: Skip (can't create meaningful memory) + + Images work with EITHER date OR location - not both required! Args: - images: List of image dictionaries with fields: - - id: Image ID - - path: File path - - thumbnailPath: Thumbnail path - - latitude: GPS latitude (required) - - longitude: GPS longitude (required) - - captured_at: Capture datetime (ISO string or datetime object) - - metadata: Additional metadata dict + images: List of image dicts with id, path, thumbnailPath, + latitude, longitude, captured_at Returns: - List of memory dictionaries with fields: - - memory_id: Unique memory identifier - - title: Memory title (e.g., "Trip to Paris") - - description: Memory description - - location_name: Human-readable location - - date_start: Start date (ISO string) - - date_end: End date (ISO string) - - image_count: Number of images in memory - - images: List of image objects - - thumbnail_image_id: ID of representative image - - center_lat: Center latitude of cluster - - center_lon: Center longitude of cluster + List of memories with type='location' or type='date' """ - logger.info(f"Starting memory clustering for {len(images)} images") + logger.info(f"Starting flexible clustering for {len(images)} images") if not images: - logger.warning("No images provided for clustering") return [] + try: + # Separate images by what data they have + gps_images = [] + date_only_images = [] + skipped_count = 0 + + for img in images: + has_gps = img.get('latitude') and img.get('longitude') + has_date = img.get('captured_at') + + if has_gps: + # Has GPS (with or without date) → location-based clustering + gps_images.append(img) + elif has_date: + # Has date but no GPS → date-based grouping + date_only_images.append(img) + else: + # Has neither GPS nor date → skip + skipped_count += 1 + + logger.info(f"GPS-based: {len(gps_images)}, Date-only: {len(date_only_images)}, Skipped: {skipped_count}") + + memories = [] + + # Process location-based memories (these may also have dates) + if gps_images: + location_memories = self._cluster_location_images(gps_images) + memories.extend(location_memories) + + # Process date-only memories (no GPS) + if date_only_images: + date_memories = self._cluster_date_images(date_only_images) + memories.extend(date_memories) + + # Sort by date descending + memories.sort(key=lambda m: m.get('date_start', ''), reverse=True) + + logger.info(f"Generated {len(memories)} total memories") + return memories + + except Exception as e: + logger.error(f"Clustering failed: {e}", exc_info=True) + return [] + + def _cluster_location_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + """ + SIMPLIFIED: Use existing DBSCAN clustering for GPS images. + """ + try: + valid_images = self._filter_valid_images(images) + if not valid_images: + return [] + + location_clusters = self._cluster_by_location(valid_images) + memories = [] + + for cluster in location_clusters: + temporal_clusters = self._cluster_by_date(cluster) + for temp_cluster in temporal_clusters: + if len(temp_cluster) >= self.min_images_per_memory: + memory = self._create_simple_memory(temp_cluster, memory_type='location') + memories.append(memory) + + return memories + except Exception as e: + logger.error(f"Location clustering failed: {e}") + return [] + + def _cluster_date_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + """ + FLEXIBLE: Group date-only images by year-month. + Uses min_images_per_memory (default: 2) as threshold. + """ + try: + # Group by year-month + monthly_groups = defaultdict(list) + + for img in images: + captured_at = img.get('captured_at') + if not captured_at: + continue + + # Parse date + if isinstance(captured_at, str): + try: + dt = datetime.fromisoformat(captured_at.replace('Z', '')) + except: + continue + elif isinstance(captured_at, datetime): + dt = captured_at + else: + continue + + # Group by year-month + month_key = dt.strftime('%Y-%m') + monthly_groups[month_key].append(img) + + # Create memories for months with enough photos (uses min_images_per_memory) + memories = [] + for month_key, month_images in monthly_groups.items(): + if len(month_images) >= self.min_images_per_memory: + memory = self._create_simple_memory(month_images, memory_type='date') + if memory: + memories.append(memory) + + return memories + except Exception as e: + logger.error(f"Date clustering failed: {e}") + return [] + + def _create_simple_memory(self, images: List[Dict[str, Any]], memory_type: str = 'location') -> Dict[str, Any]: + """ + SIMPLIFIED: Create a memory object with minimal fields. + Ensures all datetime objects are converted to ISO strings. + """ + try: + # Convert datetime objects to ISO strings in images + cleaned_images = [] + for img in images: + img_copy = img.copy() + if img_copy.get('captured_at') and isinstance(img_copy['captured_at'], datetime): + img_copy['captured_at'] = img_copy['captured_at'].isoformat() + cleaned_images.append(img_copy) + + # Sort by date + sorted_images = sorted(cleaned_images, key=lambda x: x.get('captured_at', '')) + + # Get date range + dates = [img.get('captured_at') for img in sorted_images if img.get('captured_at')] + if dates: + if isinstance(dates[0], str): + dates = [datetime.fromisoformat(d.replace('Z', '')) for d in dates] + date_start = min(dates).isoformat() + date_end = max(dates).isoformat() + date_obj = min(dates) + else: + date_start = date_end = None + date_obj = datetime.now() + + # Simple titles + if memory_type == 'location': + # Calculate center first + lats = [img['latitude'] for img in images if img.get('latitude')] + lons = [img['longitude'] for img in images if img.get('longitude')] + center_lat = np.mean(lats) if lats else 0 + center_lon = np.mean(lons) if lons else 0 + + # Get actual location name using reverse geocoding + location_name = self._reverse_geocode(center_lat, center_lon) + + # Create title based on date range + if len(dates) > 1: + # Multiple dates: show date range + start_date = min(dates) + end_date = max(dates) + if start_date.strftime('%B %Y') == end_date.strftime('%B %Y'): + # Same month: "Jaipur in Nov 2025" + title = f"{location_name} in {start_date.strftime('%b %Y')}" + else: + # Different months: "Jaipur - Nov-Dec 2025" or "Jaipur - Nov 2025 to Jan 2026" + if start_date.year == end_date.year: + title = f"{location_name} - {start_date.strftime('%b')}-{end_date.strftime('%b %Y')}" + else: + title = f"{location_name} - {start_date.strftime('%b %Y')} to {end_date.strftime('%b %Y')}" + else: + # Single date or no dates: just the location name + title = location_name + else: + # Date-based: "Month Year" + title = date_obj.strftime('%B %Y') + location_name = "" + center_lat = 0 + center_lon = 0 + + # Create memory + memory_id = f"{memory_type}_{int(date_obj.timestamp())}_{len(images)}" + + return { + 'memory_id': memory_id, + 'title': title, + 'description': f"{len(images)} photos", + 'location_name': location_name, + 'date_start': date_start, + 'date_end': date_end, + 'image_count': len(images), + 'images': sorted_images, + 'thumbnail_image_id': sorted_images[0].get('id', ''), + 'center_lat': center_lat, + 'center_lon': center_lon, + 'type': memory_type # Add type field + } + except Exception as e: + logger.error(f"Memory creation failed: {e}") + return None + + def _cluster_gps_based_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + """ + Cluster images with GPS data into location-based memories. + This is the original clustering logic. + + Args: + images: List of images with GPS coordinates + + Returns: + List of location-based memories + """ # Filter images with valid location data valid_images = self._filter_valid_images(images) @@ -100,7 +372,7 @@ def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]] logger.warning("No images with valid location data") return [] - logger.info(f"Found {len(valid_images)} images with valid location data") + logger.info(f"Processing {len(valid_images)} GPS images") # Step 1: Cluster by location (spatial) location_clusters = self._cluster_by_location(valid_images) @@ -117,13 +389,154 @@ def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]] memory = self._create_memory(temporal_cluster) memories.append(memory) - logger.info(f"Generated {len(memories)} memories") + return memories + + def _cluster_date_based_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + """ + Cluster images WITHOUT GPS data into date-based memories. + Groups photos by capture date/time only (screenshots, downloads, edits, etc.) - # Sort memories by date (most recent first) - memories.sort(key=lambda m: m['date_start'] if m['date_start'] else '', reverse=True) + Args: + images: List of images without GPS coordinates + + Returns: + List of date-based memories + """ + logger.info(f"Clustering {len(images)} non-GPS images by date") + + # Parse and filter images with valid dates + valid_images = [] + for img in images: + img_copy = img.copy() + captured_at = img_copy.get('captured_at') + + if captured_at: + if isinstance(captured_at, str): + try: + captured_at = datetime.fromisoformat(captured_at.replace('Z', '')) + img_copy['captured_at'] = captured_at + except Exception: + # Try alternative formats + for fmt in ['%Y-%m-%d %H:%M:%S', '%Y:%m:%d %H:%M:%S', '%Y-%m-%d']: + try: + captured_at = datetime.strptime(captured_at, fmt) + img_copy['captured_at'] = captured_at + break + except Exception: + continue + else: + logger.debug(f"Could not parse date for image {img.get('id')}") + continue + elif isinstance(captured_at, datetime): + img_copy['captured_at'] = captured_at + + valid_images.append(img_copy) + + if not valid_images: + logger.warning("No non-GPS images with valid dates") + return [] + + logger.info(f"Found {len(valid_images)} non-GPS images with valid dates") + + # Sort by date + valid_images.sort(key=lambda x: x['captured_at']) + + # Group by date tolerance + clusters = [] + current_cluster = [valid_images[0]] + + for i in range(1, len(valid_images)): + prev_date = valid_images[i-1]['captured_at'] + curr_date = valid_images[i]['captured_at'] + + # Check if within tolerance + date_diff = abs((curr_date - prev_date).days) + + if date_diff <= self.date_tolerance_days: + current_cluster.append(valid_images[i]) + else: + # Create memory from current cluster if it meets min size + if len(current_cluster) >= self.min_images_per_memory: + clusters.append(current_cluster) + # Start new cluster + current_cluster = [valid_images[i]] + + # Add last cluster if it meets min size + if current_cluster and len(current_cluster) >= self.min_images_per_memory: + clusters.append(current_cluster) + + logger.info(f"Created {len(clusters)} date-based clusters") + + # Create memory objects + memories = [] + for cluster in clusters: + memory = self._create_date_based_memory(cluster) + memories.append(memory) return memories + def _create_date_based_memory(self, images: List[Dict[str, Any]]) -> Dict[str, Any]: + """ + Create a date-based memory object for images without GPS. + + Args: + images: List of image dictionaries in the cluster (no GPS) + + Returns: + Memory dictionary with metadata + """ + # Get date range + dates = [img['captured_at'] for img in images if img.get('captured_at')] + date_start = min(dates) if dates else None + date_end = max(dates) if dates else None + + # Generate title for date-based memory + if date_start: + if date_start.date() == date_end.date(): + title = date_start.strftime("%B %d, %Y") + else: + days = (date_end - date_start).days + 1 + if days <= 7: + title = date_start.strftime("%B %d, %Y") + elif days <= 31: + title = date_start.strftime("%B %Y") + else: + title = date_start.strftime("%B - %B %Y") if date_start.month != date_end.month else date_start.strftime("%B %Y") + else: + title = "Memories Collection" + + # Generate description + description = self._generate_description(len(images), date_start, date_end) + + # Select thumbnail (middle image) + thumbnail_idx = len(images) // 2 + thumbnail_image_id = images[thumbnail_idx]['id'] + + # Create memory ID (use timestamp only) + memory_id = f"mem_date_{date_start.strftime('%Y%m%d')}" if date_start else f"mem_date_unknown_{hash(tuple(img['id'] for img in images[:5]))}" + + # Convert captured_at datetime objects to ISO strings + serialized_images = [] + for img in images: + img_copy = img.copy() + if img_copy.get('captured_at') and isinstance(img_copy['captured_at'], datetime): + img_copy['captured_at'] = img_copy['captured_at'].isoformat() + serialized_images.append(img_copy) + + return { + 'memory_id': memory_id, + 'title': title, + 'description': description, + 'location_name': 'Date-Based Memory', # Identifier for non-GPS memories + 'date_start': date_start.isoformat() if date_start else None, + 'date_end': date_end.isoformat() if date_end else None, + 'image_count': len(images), + 'images': serialized_images, + 'thumbnail_image_id': thumbnail_image_id, + 'center_lat': 0.0, # No GPS data + 'center_lon': 0.0 # No GPS data + } + def _filter_valid_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: """ Filter images that have valid location and datetime data. @@ -341,20 +754,23 @@ def _reverse_geocode(self, latitude: float, longitude: float) -> str: """ Convert GPS coordinates to a human-readable location name. - This is a simple implementation. For production, consider using: - - Geopy with Nominatim - - Google Maps Geocoding API - - Mapbox Geocoding API + Uses city coordinate mapping for major cities, falls back to coordinates. Args: latitude: GPS latitude longitude: GPS longitude Returns: - Location string (e.g., "Paris, France") + Location string (e.g., "Jaipur, Rajasthan" or formatted coordinates) """ - # Simple placeholder implementation - # Returns coordinates formatted as location + # Try to find nearest known city + city_name = find_nearest_city(latitude, longitude, max_distance_km=50.0) + + if city_name: + logger.debug(f"Mapped coordinates ({latitude:.4f}, {longitude:.4f}) to {city_name}") + return city_name + + # Fallback: Return formatted coordinates return f"{latitude:.4f}°, {longitude:.4f}°" def _generate_title( diff --git a/backend/test_auto_gps_extraction.py b/backend/test_auto_gps_extraction.py new file mode 100644 index 000000000..08f43db0c --- /dev/null +++ b/backend/test_auto_gps_extraction.py @@ -0,0 +1,85 @@ +""" +Test script to verify automatic GPS extraction on image import. + +This script simulates adding a new image and verifies that: +1. GPS coordinates are automatically extracted +2. Capture datetime is automatically extracted +3. Data is properly saved to the database + +Usage: + python test_auto_gps_extraction.py +""" + +import sys +import os +import json + +# Add backend to path +sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) + +from app.utils.extract_location_metadata import MetadataExtractor + +def test_gps_extraction(): + """Test the GPS extraction functionality.""" + print("=" * 70) + print("Testing Automatic GPS Extraction") + print("=" * 70) + + extractor = MetadataExtractor() + + # Test case 1: Sample metadata with GPS + sample_metadata = { + "latitude": 28.6139, + "longitude": 77.2090, + "CreateDate": "2024:11:15 14:30:00" + } + + metadata_json = json.dumps(sample_metadata) + lat, lon, captured_at = extractor.extract_all(metadata_json) + + print("\nTest Case 1: Metadata with GPS") + print(f"Input: {sample_metadata}") + print(f"Extracted:") + print(f" - Latitude: {lat}") + print(f" - Longitude: {lon}") + print(f" - Captured At: {captured_at}") + + if lat and lon: + print("✅ GPS extraction working!") + else: + print("❌ GPS extraction failed") + + # Test case 2: Metadata without GPS + sample_metadata_no_gps = { + "CreateDate": "2024:11:15 14:30:00" + } + + metadata_json_no_gps = json.dumps(sample_metadata_no_gps) + lat2, lon2, captured_at2 = extractor.extract_all(metadata_json_no_gps) + + print("\nTest Case 2: Metadata without GPS") + print(f"Input: {sample_metadata_no_gps}") + print(f"Extracted:") + print(f" - Latitude: {lat2}") + print(f" - Longitude: {lon2}") + print(f" - Captured At: {captured_at2}") + + if lat2 is None and lon2 is None and captured_at2: + print("✅ Correctly handles images without GPS") + else: + print("❌ Unexpected behavior for images without GPS") + + print("\n" + "=" * 70) + print("INTEGRATION STATUS:") + print("=" * 70) + print("✅ MetadataExtractor imported successfully") + print("✅ extract_all() function working") + print("✅ Ready for automatic extraction on image import") + print("\nNEXT STEPS:") + print("1. Add a new folder with images that have GPS data") + print("2. Check the database to verify GPS fields are populated") + print("3. View Memories page to see the new images appear") + print("=" * 70) + +if __name__ == "__main__": + test_gps_extraction() diff --git a/docs/backend/backend_python/openapi.json b/docs/backend/backend_python/openapi.json index 5578c3471..12e811104 100644 --- a/docs/backend/backend_python/openapi.json +++ b/docs/backend/backend_python/openapi.json @@ -1311,7 +1311,7 @@ "memories" ], "summary": "Generate Memories", - "description": "Generate memories from all images with location data.\n\nThis endpoint:\n1. Fetches all images that have GPS coordinates\n2. Clusters them by location using DBSCAN\n3. Within each location, clusters by date\n4. Returns memory objects with metadata\n\nArgs:\n location_radius_km: Maximum distance between photos in same location (default: 5km)\n date_tolerance_days: Maximum days between photos in same memory (default: 3)\n min_images: Minimum images required to form a memory (default: 2)\n \nReturns:\n GenerateMemoriesResponse with list of generated memories\n \nRaises:\n HTTPException: If database query fails or clustering fails", + "description": "SIMPLIFIED: Generate memories from ALL images.\n- GPS images \u2192 location-based memories\n- Non-GPS images \u2192 monthly date-based memories\n\nReturns simple breakdown: {location_count, date_count, total}", "operationId": "generate_memories_api_memories_generate_post", "parameters": [ { diff --git a/frontend/src/app/store.ts b/frontend/src/app/store.ts index 7252274a6..7fb99d6a3 100644 --- a/frontend/src/app/store.ts +++ b/frontend/src/app/store.ts @@ -6,6 +6,7 @@ import imageReducer from '@/features/imageSlice'; import faceClustersReducer from '@/features/faceClustersSlice'; import infoDialogReducer from '@/features/infoDialogSlice'; import folderReducer from '@/features/folderSlice'; +import memoriesReducer from '@/store/slices/memoriesSlice'; export const store = configureStore({ reducer: { @@ -16,6 +17,7 @@ export const store = configureStore({ infoDialog: infoDialogReducer, folders: folderReducer, search: searchReducer, + memories: memoriesReducer, }, }); // Infer the `RootState` and `AppDispatch` types from the store itself diff --git a/frontend/src/components/Memories/FeaturedMemoryCard.tsx b/frontend/src/components/Memories/FeaturedMemoryCard.tsx new file mode 100644 index 000000000..2cbd7d14e --- /dev/null +++ b/frontend/src/components/Memories/FeaturedMemoryCard.tsx @@ -0,0 +1,154 @@ +/** + * FeaturedMemoryCard Component + * + * Large, prominent card for "On This Day" section. + * Shows hero image with "X years ago today" text overlay. + */ + +import React from 'react'; +import { MemoryImage } from '@/services/memoriesApi'; +import { calculateYearsAgo, formatPhotoCount, getThumbnailUrl } from '@/services/memoriesApi'; + +interface FeaturedMemoryCardProps { + images: MemoryImage[]; + today: string; + years: number[]; + onClick: () => void; +} + +/** + * Featured memory card for "On This Day" section + * Shows larger hero image with prominent styling + */ +export const FeaturedMemoryCard = React.memo(({ images, years, onClick }) => { + // Get the first image as hero + const heroImage = images[0]; + + if (!heroImage) return null; + + const thumbnailUrl = getThumbnailUrl(heroImage); + + // Calculate years ago from the captured date + const yearsAgo = heroImage.captured_at ? calculateYearsAgo(heroImage.captured_at) : 0; + + // Handle image load error + const handleImageError = (e: React.SyntheticEvent) => { + e.currentTarget.src = '/placeholder-image.png'; + }; + + return ( +
{ + if (e.key === 'Enter' || e.key === ' ') { + e.preventDefault(); + onClick(); + } + }} + aria-label={`View On This Day memory from ${yearsAgo} years ago`} + > +
+ {/* Hero Image */} +
+ On This Day + + {/* Gradient Overlay */} +
+ + {/* Content Overlay */} +
+ {/* "On This Day" Badge */} +
+ + + + On This Day +
+ + {/* Years Ago Text */} +

+ {yearsAgo === 1 ? 'On this day last year' : yearsAgo > 0 ? `${yearsAgo} years ago` : 'Today'} +

+ + {/* Photo Count */} +
+ + + + {formatPhotoCount(images.length)} + {years.length > 1 && ` from ${years.length} ${years.length === 1 ? 'year' : 'years'}`} +
+
+
+ + {/* Additional Images Preview (if more than 1) */} + {images.length > 1 && ( +
+ {images.slice(1, 4).map((img, idx) => ( +
+ +
+ ))} + {images.length > 4 && ( +
+ +{images.length - 4} +
+ )} +
+ )} +
+ + {/* CTA Text */} +
+

+ Click to relive these memories → +

+
+
+ ); +}); + +FeaturedMemoryCard.displayName = 'FeaturedMemoryCard'; + +export default FeaturedMemoryCard; diff --git a/frontend/src/components/Memories/MemoriesPage.tsx b/frontend/src/components/Memories/MemoriesPage.tsx new file mode 100644 index 000000000..c0a7946cd --- /dev/null +++ b/frontend/src/components/Memories/MemoriesPage.tsx @@ -0,0 +1,434 @@ +/** + * MemoriesPage Component + * + * Main page for the Memories feature. + * Displays memories in sections: On This Day, Recent, This Year, All Memories. + * Includes filter tabs for All/Location/Date memories. + * + * Layout mimics Google Photos Memories with smart feed organization. + */ + +import React, { useEffect, useState } from 'react'; +import { useAppDispatch, useAppSelector } from '@/store/hooks'; +import { + fetchAllMemoriesData, + fetchAllMemories, + fetchRecentMemories, + fetchYearMemories, + fetchOnThisDay, + setSelectedMemory, + selectOnThisDayImages, + selectOnThisDayMeta, + selectRecentMemories, + selectYearMemories, + selectAllMemories, + selectMemoriesLoading, + selectMemoriesError, + selectTotalMemoryCount +} from '@/store/slices/memoriesSlice'; +import { MemoryCard } from './MemoryCard'; +import { FeaturedMemoryCard } from './FeaturedMemoryCard'; +import { MemoryViewer } from './MemoryViewer.tsx'; +import type { Memory } from '@/services/memoriesApi'; + +/** + * Loading skeleton for memory cards + */ +const MemoryCardSkeleton: React.FC = () => ( +
+
+
+
+
+
+
+
+); + +/** + * Featured card skeleton for On This Day + */ +const FeaturedSkeleton: React.FC = () => ( +
+
+
+
+
+
+); + +/** + * Section header component + */ +const SectionHeader: React.FC<{ title: string; count?: number }> = ({ title, count }) => ( +

+ {title} + {count !== undefined && count > 0 && ( + ({count}) + )} +

+); + +/** + * Error message component with retry button + */ +const ErrorMessage: React.FC<{ message: string; onRetry: () => void }> = ({ message, onRetry }) => ( +
+
+ + + +

{message}

+ +
+
+); + +/** + * Empty state component + */ +const EmptyState: React.FC<{ message: string }> = ({ message }) => ( +
+ + + +

{message}

+
+); + +/** + * Main Memories Page Component + * SIMPLIFIED: Basic All/Location/Date filter buttons + */ +export const MemoriesPage: React.FC = () => { + const dispatch = useAppDispatch(); + + // Selectors + const onThisDayImages = useAppSelector(selectOnThisDayImages); + const onThisDayMeta = useAppSelector(selectOnThisDayMeta); + const recentMemories = useAppSelector(selectRecentMemories); + const yearMemories = useAppSelector(selectYearMemories); + const allMemories = useAppSelector(selectAllMemories); + const loading = useAppSelector(selectMemoriesLoading); + const error = useAppSelector(selectMemoriesError); + const totalCount = useAppSelector(selectTotalMemoryCount); + + // Simple filter state: 'all' | 'location' | 'date' + const [filter, setFilter] = useState<'all' | 'location' | 'date'>('all'); + + // Calculate counts + const locationCount = allMemories.filter(m => m.center_lat !== 0 || m.center_lon !== 0).length; + const dateCount = allMemories.filter(m => m.center_lat === 0 && m.center_lon === 0).length; + + // Simple filter function + const applyFilter = (memories: Memory[]) => { + if (filter === 'location') { + return memories.filter(m => m.center_lat !== 0 || m.center_lon !== 0); + } + if (filter === 'date') { + return memories.filter(m => m.center_lat === 0 && m.center_lon === 0); + } + return memories; // 'all' + }; + + // Apply filter + const filteredRecentMemories = applyFilter(recentMemories); + const filteredYearMemories = applyFilter(yearMemories); + const filteredAllMemories = applyFilter(allMemories); + + // Fetch all data on mount + useEffect(() => { + dispatch(fetchAllMemoriesData()); + }, [dispatch]); + + // Handle memory card click + const handleMemoryClick = (memory: Memory) => { + dispatch(setSelectedMemory(memory)); + }; + + // Handle On This Day click - create a temporary memory from images + const handleOnThisDayClick = () => { + if (onThisDayImages.length > 0 && onThisDayMeta) { + const tempMemory: Memory = { + memory_id: 'on-this-day', + title: `On This Day - ${onThisDayMeta.today}`, + description: `Photos from ${onThisDayMeta.years.join(', ')}`, + location_name: 'Various locations', + date_start: onThisDayImages[0]?.captured_at || null, + date_end: onThisDayImages[onThisDayImages.length - 1]?.captured_at || null, + image_count: onThisDayImages.length, + images: onThisDayImages, + thumbnail_image_id: onThisDayImages[0]?.id || '', + center_lat: onThisDayImages[0]?.latitude || 0, + center_lon: onThisDayImages[0]?.longitude || 0 + }; + dispatch(setSelectedMemory(tempMemory)); + } + }; + + // Retry handlers + const handleRetryAll = () => dispatch(fetchAllMemories()); + const handleRetryRecent = () => dispatch(fetchRecentMemories(30)); + const handleRetryYear = () => dispatch(fetchYearMemories(365)); + const handleRetryOnThisDay = () => dispatch(fetchOnThisDay()); + + // Check if any data exists + const hasAnyData = onThisDayImages.length > 0 || recentMemories.length > 0 || + yearMemories.length > 0 || allMemories.length > 0; + + return ( +
+ {/* Header */} +
+
+
+
+ + + +

+ Memories + {totalCount > 0 && ( + + ({totalCount}) + + )} +

+
+ + {/* Refresh button */} + +
+
+
+ + {/* Main Content */} +
+ {/* Simple Filter Buttons */} + {hasAnyData && ( +
+ + + +
+ )} + + {/* Global Loading State */} + {!hasAnyData && loading.all && ( +
+ +
+ {[...Array(8)].map((_, i) => ( + + ))} +
+
+ )} + + {/* Global Error State */} + {!hasAnyData && error.all && ( + + )} + + {/* Global Empty State */} + {!hasAnyData && !loading.all && !error.all && ( + + )} + + {/* ==================================================================== + SECTION 1: On This Day + ==================================================================== */} + {onThisDayImages.length > 0 && onThisDayMeta && ( +
+ + {loading.onThisDay ? ( + + ) : error.onThisDay ? ( + + ) : ( + + )} +
+ )} + + {/* ==================================================================== + SECTION 2: Recent Memories (Last 30 days) + ==================================================================== */} + {filteredRecentMemories.length > 0 && ( +
+ + {loading.recent ? ( +
+ {[...Array(4)].map((_, i) => ( + + ))} +
+ ) : error.recent ? ( + + ) : ( +
+ {filteredRecentMemories.map((memory: Memory) => ( + + ))} +
+ )} +
+ )} + + {/* ==================================================================== + SECTION 3: This Year + ==================================================================== */} + {filteredYearMemories.length > 0 && ( +
+ + {loading.year ? ( +
+ {[...Array(4)].map((_, i) => ( + + ))} +
+ ) : error.year ? ( + + ) : ( +
+ {filteredYearMemories.map((memory: Memory) => ( + + ))} +
+ )} +
+ )} + + {/* ==================================================================== + SECTION 4: All Memories + ==================================================================== */} + {filteredAllMemories.length > 0 && ( +
+ + {loading.all ? ( +
+ {[...Array(8)].map((_, i) => ( + + ))} +
+ ) : error.all ? ( + + ) : ( +
+ {filteredAllMemories.map((memory: Memory) => ( + + ))} +
+ )} +
+ )} +
+ + {/* Memory Viewer Modal */} + +
+ ); +}; + +export default MemoriesPage; diff --git a/frontend/src/components/Memories/MemoryCard.tsx b/frontend/src/components/Memories/MemoryCard.tsx new file mode 100644 index 000000000..3f075f457 --- /dev/null +++ b/frontend/src/components/Memories/MemoryCard.tsx @@ -0,0 +1,157 @@ +/** + * MemoryCard Component + * + * Displays a memory card with thumbnail, title, date, location, and photo count. + * Used in grid layouts for Recent Memories, This Year, and All Memories sections. + */ + +import React from 'react'; +import { Memory } from '@/services/memoriesApi'; +import { + formatDateRangeRelative, + formatPhotoCount, + getThumbnailUrl +} from '@/services/memoriesApi'; + +interface MemoryCardProps { + memory: Memory; + onClick: (memory: Memory) => void; +} + +/** + * Memory card component with hover effects and responsive design + * SIMPLIFIED: Just show type badge, handle missing thumbnails, use convertFileSrc + */ +export const MemoryCard = React.memo(({ memory, onClick }) => { + // Get thumbnail image (first image or find by thumbnail_image_id) + const thumbnailImage = memory.images.find(img => img.id === memory.thumbnail_image_id) || memory.images[0]; + + // Handle missing thumbnail gracefully - use path as fallback + const thumbnailUrl = thumbnailImage + ? getThumbnailUrl(thumbnailImage) + : memory.images[0]?.path + ? getThumbnailUrl(memory.images[0]) + : '/photo.png'; // Default placeholder + + // Determine memory type + const isDateBased = memory.center_lat === 0 && memory.center_lon === 0; + + // Format title based on memory type + let displayTitle = memory.title || 'Untitled Memory'; + const displayLocation = memory.location_name || ''; + + // For location-based memories, format as "Trip to [Location], [Year]" + if (!isDateBased && displayLocation) { + // Extract year from date_start + const year = memory.date_start ? new Date(memory.date_start).getFullYear() : ''; + displayTitle = `Trip to ${displayLocation}${year ? `, ${year}` : ''}`; + } + + // Handle image load error + const handleImageError = (e: React.SyntheticEvent) => { + e.currentTarget.src = '/photo.png'; // Fallback to default + }; + + return ( +
onClick(memory)} + className="group cursor-pointer bg-white dark:bg-gray-800 rounded-lg shadow-md hover:shadow-xl transition-all duration-200 overflow-hidden transform hover:scale-[1.02]" + role="button" + tabIndex={0} + onKeyDown={(e) => { + if (e.key === 'Enter' || e.key === ' ') { + e.preventDefault(); + onClick(memory); + } + }} + aria-label={`View memory: ${displayTitle}`} + > + {/* Thumbnail Image */} +
+ {displayTitle} + + {/* Type Badge - Location or Date */} +
+ {isDateBased ? ( + <> + + + + Date + + ) : ( + <> + + + + + Location + + )} +
+ + {/* Photo Count Badge */} +
+ {formatPhotoCount(memory.image_count)} +
+
+ + {/* Card Content */} +
+ {/* Title */} +

+ {displayTitle} +

+ + {/* Date Range - Relative Format */} +

+ {formatDateRangeRelative(memory.date_start, memory.date_end)} +

+ + {/* Location - Only show if not coordinates */} + {displayLocation && ( +
+ + + + + {displayLocation} +
+ )} + + {/* Description (optional, hidden on small screens) */} + {memory.description && ( +

+ {memory.description} +

+ )} +
+
+ ); +}); + +MemoryCard.displayName = 'MemoryCard'; + +export default MemoryCard; diff --git a/frontend/src/components/Memories/MemoryViewer.tsx b/frontend/src/components/Memories/MemoryViewer.tsx new file mode 100644 index 000000000..fc9d8b830 --- /dev/null +++ b/frontend/src/components/Memories/MemoryViewer.tsx @@ -0,0 +1,299 @@ +/** + * MemoryViewer Component + * + * Full-screen modal for viewing a memory's details and all photos. + * Shows title, description, date, location, and a grid of all images. + * When an image is clicked, opens MediaView for full slideshow/zoom experience. + */ + +import React, { useEffect, useCallback, useState } from 'react'; +import { useAppDispatch, useAppSelector } from '@/store/hooks'; +import { setSelectedMemory, selectSelectedMemory } from '@/store/slices/memoriesSlice'; +import { setCurrentViewIndex } from '@/features/imageSlice'; +import { MediaView } from '@/components/Media/MediaView'; +import { + formatDateRangeRelative, + formatPhotoCount, + getThumbnailUrl, + generateMemoryTitle, + formatLocationName +} from '@/services/memoriesApi'; + +/** + * Memory Viewer Modal Component + */ +export const MemoryViewer: React.FC = () => { + const dispatch = useAppDispatch(); + const memory = useAppSelector(selectSelectedMemory); + const [showMediaView, setShowMediaView] = useState(false); + + // Handle close memory viewer + const handleCloseViewer = useCallback(() => { + dispatch(setSelectedMemory(null)); + }, [dispatch]); + + // Handle image click - open MediaView + const handleImageClick = useCallback((index: number) => { + if (!memory) return; + + // Just set the current index - MediaView will use the images prop + dispatch(setCurrentViewIndex(index)); + setShowMediaView(true); + }, [memory, dispatch]); + + // Handle MediaView close - go back to memory grid + const handleMediaViewClose = useCallback(() => { + setShowMediaView(false); + dispatch(setCurrentViewIndex(-1)); // Reset view index + }, [dispatch]); + + // Handle ESC key press + useEffect(() => { + const handleEsc = (e: KeyboardEvent) => { + if (e.key === 'Escape') { + handleCloseViewer(); + } + }; + + if (memory) { + document.addEventListener('keydown', handleEsc); + // Prevent body scroll when modal is open + document.body.style.overflow = 'hidden'; + } + + return () => { + document.removeEventListener('keydown', handleEsc); + document.body.style.overflow = 'unset'; + }; + }, [memory, handleCloseViewer]); + + // Don't render if no memory selected + if (!memory) return null; + + // Generate better title and format location + const displayTitle = generateMemoryTitle(memory); + const displayLocation = formatLocationName(memory.location_name); + + // Handle image load error + const handleImageError = (e: React.SyntheticEvent) => { + e.currentTarget.src = '/placeholder-image.png'; + }; + + return ( + <> + {/* Memory Grid Modal - hide when MediaView is open */} + {!showMediaView && ( +
+ {/* Modal Container */} +
+
e.stopPropagation()} + > + {/* Header */} +
+
+
+ {/* Title */} +

+ {displayTitle} +

+ + {/* Metadata */} +
+ {/* Date Range - Relative */} +
+ + + + {formatDateRangeRelative(memory.date_start, memory.date_end)} +
+ + {/* Location - Only show if not coordinates */} + {displayLocation && ( +
+ + + + + {displayLocation} +
+ )} + + {/* Photo Count */} +
+ + + + {formatPhotoCount(memory.image_count)} +
+
+ + {/* Description */} + {memory.description && ( +

+ {memory.description} +

+ )} +
+ + {/* Close Button */} + +
+
+ + {/* Images Grid */} +
+
+ {memory.images.map((image, index) => ( +
handleImageClick(index)} + > + {`Photo + + {/* Hover Overlay */} +
+ + + +
+
+ ))} +
+
+ + {/* Footer (optional - for future features like share, download, etc.) */} +
+
+

+ Click any photo to view with zoom and slideshow +

+ + {/* Future: Add share, download buttons here */} +
+ {/* Placeholder for future actions */} +
+
+
+
+
+
+ )} + + {/* MediaView for full-screen image viewing with zoom/slideshow */} + {showMediaView && memory && ( + ({ + id: img.id, + path: img.path, + thumbnailPath: img.thumbnailPath, + folder_id: '', // Memory images don't have folder_id + isTagged: false, // Memory images don't track tagging + isFavourite: false, // Can be added later if needed + tags: [], // Can be added later if needed + metadata: { + name: img.path.split('/').pop() || '', + date_created: img.captured_at, + width: 0, + height: 0, + file_location: img.path, + file_size: 0, + item_type: 'image', + latitude: img.latitude || undefined, + longitude: img.longitude || undefined + } + }))} + /> + )} + + ); +}; + +export default MemoryViewer; diff --git a/frontend/src/components/Memories/index.ts b/frontend/src/components/Memories/index.ts new file mode 100644 index 000000000..881210fbb --- /dev/null +++ b/frontend/src/components/Memories/index.ts @@ -0,0 +1,14 @@ +/** + * Memories Component Exports + * + * Barrel file for clean imports across the application. + * Import components like: import { MemoriesPage, MemoryCard } from '@/components/Memories' + */ + +export { default as MemoriesPage } from './MemoriesPage'; +export { default as MemoryCard } from './MemoryCard'; +export { default as FeaturedMemoryCard } from './FeaturedMemoryCard'; +export { default as MemoryViewer } from './MemoryViewer'; + +// Export types if needed +export type { Memory, MemoryImage } from '@/services/memoriesApi'; diff --git a/frontend/src/routes/AppRoutes.tsx b/frontend/src/routes/AppRoutes.tsx index 22153edbb..1dfb8177a 100644 --- a/frontend/src/routes/AppRoutes.tsx +++ b/frontend/src/routes/AppRoutes.tsx @@ -9,6 +9,7 @@ import { MyFav } from '@/pages/Home/MyFav'; import { AITagging } from '@/pages/AITagging/AITagging'; import { PersonImages } from '@/pages/PersonImages/PersonImages'; import { ComingSoon } from '@/pages/ComingSoon/ComingSoon'; +import { MemoriesPage } from '@/components/Memories'; export const AppRoutes: React.FC = () => { return ( @@ -21,7 +22,7 @@ export const AppRoutes: React.FC = () => { } /> } /> } /> - } /> + } /> } /> diff --git a/frontend/src/services/memoriesApi.ts b/frontend/src/services/memoriesApi.ts new file mode 100644 index 000000000..4f0253812 --- /dev/null +++ b/frontend/src/services/memoriesApi.ts @@ -0,0 +1,470 @@ +/** + * Memories API Service + * + * Handles all HTTP requests to the memories backend endpoints. + * Provides type-safe interfaces and error handling. + */ + +import axios, { AxiosError } from 'axios'; +import { convertFileSrc } from '@tauri-apps/api/core'; + +const API_BASE_URL = 'http://localhost:8000/api/memories'; + +// ============================================================================ +// TypeScript Interfaces +// ============================================================================ + +/** + * Individual image within a memory + */ +export interface MemoryImage { + id: string; + path: string; + thumbnailPath: string; + latitude: number | null; + longitude: number | null; + captured_at: string | null; // ISO 8601 format +} + +/** + * Memory object representing a collection of photos + */ +export interface Memory { + memory_id: string; + title: string; + description: string; + location_name: string; + date_start: string | null; // ISO 8601 format + date_end: string | null; // ISO 8601 format + image_count: number; + images: MemoryImage[]; + thumbnail_image_id: string; + center_lat: number; + center_lon: number; +} + +/** + * Response from POST /api/memories/generate + */ +export interface GenerateMemoriesResponse { + success: boolean; + message: string; + memory_count: number; + image_count: number; + memories: Memory[]; +} + +/** + * Response from GET /api/memories/timeline + */ +export interface TimelineResponse { + success: boolean; + date_range: { + start: string; + end: string; + }; + memory_count: number; + memories: Memory[]; +} + +/** + * Response from GET /api/memories/on-this-day + */ +export interface OnThisDayResponse { + success: boolean; + today: string; // e.g., "December 14" + years: number[]; // [2024, 2023, 2022] + image_count: number; + images: MemoryImage[]; +} + +/** + * Location cluster with sample images + */ +export interface LocationCluster { + location_name: string; + center_lat: number; + center_lon: number; + image_count: number; + sample_images: MemoryImage[]; +} + +/** + * Response from GET /api/memories/locations + */ +export interface LocationsResponse { + success: boolean; + location_count: number; + locations: LocationCluster[]; +} + +/** + * API Error structure + */ +export interface ApiError { + message: string; + status?: number; + details?: string; +} + +// ============================================================================ +// API Functions +// ============================================================================ + +/** + * Generate all memories from images with location data + * + * @param options - Clustering parameters + * @returns Generated memories + */ +export const generateMemories = async (options?: { + location_radius_km?: number; + date_tolerance_days?: number; + min_images?: number; +}): Promise => { + try { + const params = new URLSearchParams(); + if (options?.location_radius_km) params.append('location_radius_km', options.location_radius_km.toString()); + if (options?.date_tolerance_days) params.append('date_tolerance_days', options.date_tolerance_days.toString()); + if (options?.min_images) params.append('min_images', options.min_images.toString()); + + const response = await axios.post( + `${API_BASE_URL}/generate${params.toString() ? '?' + params.toString() : ''}` + ); + + return response.data; + } catch (error) { + throw handleApiError(error); + } +}; + +/** + * Get memories from the past N days as a timeline + * + * @param days - Number of days to look back (default: 365) + * @param options - Clustering parameters + * @returns Timeline memories + */ +export const getTimeline = async ( + days: number = 365, + options?: { + location_radius_km?: number; + date_tolerance_days?: number; + } +): Promise => { + try { + const params = new URLSearchParams(); + params.append('days', days.toString()); + if (options?.location_radius_km) params.append('location_radius_km', options.location_radius_km.toString()); + if (options?.date_tolerance_days) params.append('date_tolerance_days', options.date_tolerance_days.toString()); + + const response = await axios.get( + `${API_BASE_URL}/timeline?${params.toString()}` + ); + + return response.data; + } catch (error) { + throw handleApiError(error); + } +}; + +/** + * Get photos taken on this date in previous years + * + * @returns On This Day images + */ +export const getOnThisDay = async (): Promise => { + try { + const response = await axios.get( + `${API_BASE_URL}/on-this-day` + ); + + return response.data; + } catch (error) { + throw handleApiError(error); + } +}; + +/** + * Get all unique locations where photos were taken + * + * @param options - Clustering and sampling parameters + * @returns Location clusters + */ +export const getLocations = async (options?: { + location_radius_km?: number; + max_sample_images?: number; +}): Promise => { + try { + const params = new URLSearchParams(); + if (options?.location_radius_km) params.append('location_radius_km', options.location_radius_km.toString()); + if (options?.max_sample_images) params.append('max_sample_images', options.max_sample_images.toString()); + + const response = await axios.get( + `${API_BASE_URL}/locations${params.toString() ? '?' + params.toString() : ''}` + ); + + return response.data; + } catch (error) { + throw handleApiError(error); + } +}; + +// ============================================================================ +// Error Handling +// ============================================================================ + +/** + * Convert Axios errors to our ApiError format + */ +const handleApiError = (error: unknown): ApiError => { + if (axios.isAxiosError(error)) { + const axiosError = error as AxiosError<{ detail?: string; message?: string }>; + + return { + message: axiosError.response?.data?.message || + axiosError.response?.data?.detail || + axiosError.message || + 'An unknown error occurred', + status: axiosError.response?.status, + details: axiosError.response?.statusText + }; + } + + if (error instanceof Error) { + return { + message: error.message + }; + } + + return { + message: 'An unexpected error occurred' + }; +}; + +// ============================================================================ +// Utility Functions +// ============================================================================ + +/** + * Format a date string to human-readable format + * + * @param isoDate - ISO 8601 date string + * @returns Formatted date (e.g., "November 25, 2025") + */ +export const formatMemoryDate = (isoDate: string | null): string => { + if (!isoDate) return 'Unknown date'; + + try { + const date = new Date(isoDate); + return date.toLocaleDateString('en-US', { + year: 'numeric', + month: 'long', + day: 'numeric' + }); + } catch { + return 'Invalid date'; + } +}; + +/** + * Format date range for memory display + * + * @param startDate - Start date ISO string + * @param endDate - End date ISO string + * @returns Formatted range (e.g., "Nov 25 - Nov 27, 2025") + */ +export const formatDateRange = (startDate: string | null, endDate: string | null): string => { + if (!startDate || !endDate) return 'Unknown date'; + + try { + const start = new Date(startDate); + const end = new Date(endDate); + + // Same day + if (start.toDateString() === end.toDateString()) { + return start.toLocaleDateString('en-US', { + year: 'numeric', + month: 'long', + day: 'numeric' + }); + } + + // Same month and year + if (start.getMonth() === end.getMonth() && start.getFullYear() === end.getFullYear()) { + const monthYear = start.toLocaleDateString('en-US', { month: 'long', year: 'numeric' }); + return `${start.getDate()} - ${end.getDate()}, ${monthYear}`; + } + + // Different months or years + const startFormatted = start.toLocaleDateString('en-US', { month: 'short', day: 'numeric' }); + const endFormatted = end.toLocaleDateString('en-US', { month: 'short', day: 'numeric', year: 'numeric' }); + return `${startFormatted} - ${endFormatted}`; + } catch { + return 'Invalid date range'; + } +}; + +/** + * Calculate years ago from a date + * + * @param isoDate - ISO date string + * @returns Number of years ago + */ +export const calculateYearsAgo = (isoDate: string): number => { + try { + const date = new Date(isoDate); + const now = new Date(); + return now.getFullYear() - date.getFullYear(); + } catch { + return 0; + } +}; + +/** + * Format photo count + * + * @param count - Number of photos + * @returns Formatted string (e.g., "1 photo" or "5 photos") + */ +export const formatPhotoCount = (count: number): string => { + return count === 1 ? '1 photo' : `${count} photos`; +}; + +/** + * Format date range with relative time for recent dates + * + * @param startDate - Start date ISO string + * @param endDate - End date ISO string + * @returns Formatted range with relative dates like "Yesterday", "Last week", "2 months ago" + */ +export const formatDateRangeRelative = (startDate: string | null, endDate: string | null): string => { + if (!startDate || !endDate) return 'Unknown date'; + + try { + const start = new Date(startDate); + const end = new Date(endDate); + const now = new Date(); + + // Calculate days difference from end date + const daysDiff = Math.floor((now.getTime() - end.getTime()) / (1000 * 60 * 60 * 24)); + + // Today + if (daysDiff === 0) { + return 'Today'; + } + + // Yesterday + if (daysDiff === 1) { + return 'Yesterday'; + } + + // This week (2-6 days ago) + if (daysDiff >= 2 && daysDiff <= 6) { + return `${daysDiff} days ago`; + } + + // Last week + if (daysDiff >= 7 && daysDiff <= 13) { + return 'Last week'; + } + + // This month (2-4 weeks ago) + if (daysDiff >= 14 && daysDiff <= 30) { + const weeks = Math.floor(daysDiff / 7); + return `${weeks} weeks ago`; + } + + // Recent months (1-12 months ago) + const monthsDiff = Math.floor(daysDiff / 30); + if (monthsDiff >= 1 && monthsDiff <= 11) { + return monthsDiff === 1 ? 'Last month' : `${monthsDiff} months ago`; + } + + // Over a year ago - show month and year + return start.toLocaleDateString('en-US', { month: 'short', year: 'numeric' }); + } catch { + return formatDateRange(startDate, endDate); + } +}; + +/** + * Generate a human-readable title from location and date + * Improves ugly coordinate-based titles like "26.9333°, 75.9228° - November 2025" + * + * @param memory - Memory object with location and date info + * @returns Better title like "Weekend in Jaipur", "Jaipur Trip", or "December 2024" + */ +export const generateMemoryTitle = (memory: Memory): string => { + const location = memory.location_name; + const imageCount = memory.image_count; + + // Check if it's a date-based memory (no GPS data) + if (location === 'Date-Based Memory') { + // Use the title from backend which is already well-formatted for date-only memories + return memory.title; + } + + // If location doesn't look like coordinates, use it + if (!location.includes('°') && !location.match(/^-?\d+\.\d+/)) { + // Parse city name from location (e.g., "Jaipur, Rajasthan" -> "Jaipur") + const cityName = location.split(',')[0].trim(); + + // Add descriptive word based on image count + if (imageCount >= 50) { + return `${cityName} Adventure`; + } else if (imageCount >= 20) { + return `${cityName} Trip`; + } else if (imageCount >= 10) { + return `Weekend in ${cityName}`; + } else { + return `${cityName} Memories`; + } + } + + // Fallback: coordinates - try to make it cleaner + if (memory.date_start) { + const date = new Date(memory.date_start); + const monthYear = date.toLocaleDateString('en-US', { month: 'long', year: 'numeric' }); + return `Memories from ${monthYear}`; + } + + // Last resort + return memory.title || 'Photo Collection'; +}; + +/** + * Format location name by removing coordinates if present + * + * @param locationName - Raw location name from API + * @returns Cleaned location name or empty string if only coordinates or date-based + */ +export const formatLocationName = (locationName: string): string => { + // Hide date-based memories indicator (backend sends "Date-Based Memory") + if (locationName === 'Date-Based Memory') { + return ''; + } + + // If it looks like coordinates (contains ° or is a number pattern), hide it + if (locationName.includes('°') || locationName.match(/^-?\d+\.\d+.*-?\d+\.\d+/)) { + return ''; // Hide ugly coordinates + } + + return locationName; +}; + +/** + * Get thumbnail URL with fallback + * + * @param image - Memory image object + * @returns Thumbnail URL or placeholder + */ +export const getThumbnailUrl = (image: MemoryImage): string => { + // Use Tauri's convertFileSrc for proper file path handling in desktop app + if (image.thumbnailPath) { + return convertFileSrc(image.thumbnailPath); + } + + // Fallback to placeholder + return '/placeholder-image.png'; +}; diff --git a/frontend/src/store/hooks.ts b/frontend/src/store/hooks.ts new file mode 100644 index 000000000..440bcf3f8 --- /dev/null +++ b/frontend/src/store/hooks.ts @@ -0,0 +1,14 @@ +/** + * Redux Hooks + * + * Typed hooks for use throughout the application. + * These hooks ensure type safety when using Redux with TypeScript. + */ + +import { useDispatch, useSelector } from 'react-redux'; +import type { TypedUseSelectorHook } from 'react-redux'; +import type { RootState, AppDispatch } from '../app/store'; + +// Use throughout the app instead of plain `useDispatch` and `useSelector` +export const useAppDispatch: () => AppDispatch = useDispatch; +export const useAppSelector: TypedUseSelectorHook = useSelector; diff --git a/frontend/src/store/slices/memoriesSlice.ts b/frontend/src/store/slices/memoriesSlice.ts new file mode 100644 index 000000000..859c9c908 --- /dev/null +++ b/frontend/src/store/slices/memoriesSlice.ts @@ -0,0 +1,355 @@ +/** + * Memories Redux Slice + * + * Manages state for the Memories feature including: + * - All memories (generated from all photos) + * - Recent memories (last 30 days) + * - Year memories (current year) + * - On This Day images + * - Selected memory for viewer modal + */ + +import { createSlice, createAsyncThunk, PayloadAction } from '@reduxjs/toolkit'; +import { + generateMemories, + getTimeline, + getOnThisDay, + Memory, + MemoryImage, + ApiError +} from '@/services/memoriesApi'; + +// ============================================================================ +// State Interface +// ============================================================================ + +interface MemoriesState { + // Memory collections + allMemories: Memory[]; + recentMemories: Memory[]; + yearMemories: Memory[]; + onThisDayImages: MemoryImage[]; + onThisDayMeta: { + today: string; + years: number[]; + } | null; + + // Selected memory for viewer modal + selectedMemory: Memory | null; + + // Loading states for each section + loading: { + all: boolean; + recent: boolean; + year: boolean; + onThisDay: boolean; + }; + + // Error states + error: { + all: string | null; + recent: string | null; + year: string | null; + onThisDay: string | null; + }; + + // Metadata + lastFetched: number | null; +} + +// ============================================================================ +// Initial State +// ============================================================================ + +const initialState: MemoriesState = { + allMemories: [], + recentMemories: [], + yearMemories: [], + onThisDayImages: [], + onThisDayMeta: null, + selectedMemory: null, + loading: { + all: false, + recent: false, + year: false, + onThisDay: false + }, + error: { + all: null, + recent: null, + year: null, + onThisDay: null + }, + lastFetched: null +}; + +// ============================================================================ +// Async Thunks +// ============================================================================ + +/** + * Fetch all memories from photos with location data + */ +export const fetchAllMemories = createAsyncThunk< + Memory[], + void, + { rejectValue: string } +>( + 'memories/fetchAll', + async (_, { rejectWithValue }) => { + try { + const response = await generateMemories(); + return response.memories; + } catch (error) { + const apiError = error as ApiError; + return rejectWithValue(apiError.message); + } + } +); + +/** + * Fetch recent memories (last 30 days) + */ +export const fetchRecentMemories = createAsyncThunk< + Memory[], + number, + { rejectValue: string } +>( + 'memories/fetchRecent', + async (days = 30, { rejectWithValue }) => { + try { + const response = await getTimeline(days); + return response.memories; + } catch (error) { + const apiError = error as ApiError; + return rejectWithValue(apiError.message); + } + } +); + +/** + * Fetch memories from current year + */ +export const fetchYearMemories = createAsyncThunk< + Memory[], + number, + { rejectValue: string } +>( + 'memories/fetchYear', + async (days = 365, { rejectWithValue }) => { + try { + const response = await getTimeline(days); + return response.memories; + } catch (error) { + const apiError = error as ApiError; + return rejectWithValue(apiError.message); + } + } +); + +/** + * Fetch "On This Day" images + */ +export const fetchOnThisDay = createAsyncThunk< + { images: MemoryImage[]; today: string; years: number[] }, + void, + { rejectValue: string } +>( + 'memories/fetchOnThisDay', + async (_, { rejectWithValue }) => { + try { + const response = await getOnThisDay(); + return { + images: response.images, + today: response.today, + years: response.years + }; + } catch (error) { + const apiError = error as ApiError; + return rejectWithValue(apiError.message); + } + } +); + +/** + * Fetch all memories data at once (parallel requests) + */ +export const fetchAllMemoriesData = createAsyncThunk< + void, + void, + { rejectValue: string } +>( + 'memories/fetchAllData', + async (_, { dispatch, rejectWithValue }) => { + try { + await Promise.all([ + dispatch(fetchOnThisDay()), + dispatch(fetchRecentMemories(30)), + dispatch(fetchYearMemories(365)), + dispatch(fetchAllMemories()) + ]); + } catch (error) { + const apiError = error as ApiError; + return rejectWithValue(apiError.message); + } + } +); + +// ============================================================================ +// Slice +// ============================================================================ + +const memoriesSlice = createSlice({ + name: 'memories', + initialState, + reducers: { + /** + * Set the selected memory for the viewer modal + */ + setSelectedMemory: (state, action: PayloadAction) => { + state.selectedMemory = action.payload; + }, + + /** + * Clear all errors + */ + clearErrors: (state) => { + state.error = { + all: null, + recent: null, + year: null, + onThisDay: null + }; + }, + + /** + * Reset memories state + */ + resetMemories: () => { + return initialState; + } + }, + extraReducers: (builder) => { + // ======================================================================== + // Fetch All Memories + // ======================================================================== + builder + .addCase(fetchAllMemories.pending, (state) => { + state.loading.all = true; + state.error.all = null; + }) + .addCase(fetchAllMemories.fulfilled, (state, action) => { + state.loading.all = false; + state.allMemories = action.payload; + state.lastFetched = Date.now(); + }) + .addCase(fetchAllMemories.rejected, (state, action) => { + state.loading.all = false; + state.error.all = action.payload || 'Failed to fetch memories'; + }); + + // ======================================================================== + // Fetch Recent Memories + // ======================================================================== + builder + .addCase(fetchRecentMemories.pending, (state) => { + state.loading.recent = true; + state.error.recent = null; + }) + .addCase(fetchRecentMemories.fulfilled, (state, action) => { + state.loading.recent = false; + state.recentMemories = action.payload; + }) + .addCase(fetchRecentMemories.rejected, (state, action) => { + state.loading.recent = false; + state.error.recent = action.payload || 'Failed to fetch recent memories'; + }); + + // ======================================================================== + // Fetch Year Memories + // ======================================================================== + builder + .addCase(fetchYearMemories.pending, (state) => { + state.loading.year = true; + state.error.year = null; + }) + .addCase(fetchYearMemories.fulfilled, (state, action) => { + state.loading.year = false; + state.yearMemories = action.payload; + }) + .addCase(fetchYearMemories.rejected, (state, action) => { + state.loading.year = false; + state.error.year = action.payload || 'Failed to fetch year memories'; + }); + + // ======================================================================== + // Fetch On This Day + // ======================================================================== + builder + .addCase(fetchOnThisDay.pending, (state) => { + state.loading.onThisDay = true; + state.error.onThisDay = null; + }) + .addCase(fetchOnThisDay.fulfilled, (state, action) => { + state.loading.onThisDay = false; + state.onThisDayImages = action.payload.images; + state.onThisDayMeta = { + today: action.payload.today, + years: action.payload.years + }; + }) + .addCase(fetchOnThisDay.rejected, (state, action) => { + state.loading.onThisDay = false; + state.error.onThisDay = action.payload || 'Failed to fetch On This Day'; + }); + } +}); + +// ============================================================================ +// Exports +// ============================================================================ + +export const { + setSelectedMemory, + clearErrors, + resetMemories +} = memoriesSlice.actions; + +export default memoriesSlice.reducer; + +// ============================================================================ +// Selectors +// ============================================================================ + +export const selectAllMemories = (state: { memories: MemoriesState }) => state.memories.allMemories; +export const selectRecentMemories = (state: { memories: MemoriesState }) => state.memories.recentMemories; +export const selectYearMemories = (state: { memories: MemoriesState }) => state.memories.yearMemories; +export const selectOnThisDayImages = (state: { memories: MemoriesState }) => state.memories.onThisDayImages; +export const selectOnThisDayMeta = (state: { memories: MemoriesState }) => state.memories.onThisDayMeta; +export const selectSelectedMemory = (state: { memories: MemoriesState }) => state.memories.selectedMemory; +export const selectMemoriesLoading = (state: { memories: MemoriesState }) => state.memories.loading; +export const selectMemoriesError = (state: { memories: MemoriesState }) => state.memories.error; +export const selectLastFetched = (state: { memories: MemoriesState }) => state.memories.lastFetched; + +/** + * Select total memory count across all sections + */ +export const selectTotalMemoryCount = (state: { memories: MemoriesState }) => { + return state.memories.allMemories.length; +}; + +/** + * Check if any section is loading + */ +export const selectIsAnyLoading = (state: { memories: MemoriesState }) => { + const { loading } = state.memories; + return loading.all || loading.recent || loading.year || loading.onThisDay; +}; + +/** + * Check if there are any errors + */ +export const selectHasAnyError = (state: { memories: MemoriesState }) => { + const { error } = state.memories; + return !!(error.all || error.recent || error.year || error.onThisDay); +}; diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json index 32cfa71fc..c57466426 100644 --- a/frontend/tsconfig.json +++ b/frontend/tsconfig.json @@ -22,6 +22,7 @@ "noFallthroughCasesInSwitch": true, /* Type checking */ + "ignoreDeprecations": "6.0", "baseUrl": ".", "paths": { "@/*": ["./src/*"] From a3629677b505306b0012bf3a0d688a01d9a1fefe Mon Sep 17 00:00:00 2001 From: harshit Date: Sun, 14 Dec 2025 09:55:52 +0530 Subject: [PATCH 04/22] style: format code with Black and Prettier - Format Python files in backend/app/ with Black - Format TypeScript files in frontend/src/components/Memories/ with Prettier - Fix code style issues to meet project standards --- backend/app/database/images.py | 330 +++++----- backend/app/routes/memories.py | 238 ++++---- .../app/utils/extract_location_metadata.py | 281 +++++---- backend/app/utils/images.py | 30 +- backend/app/utils/memory_clustering.py | 574 ++++++++++-------- backend/app/utils/verify_memories_setup.py | 198 +++--- .../Memories/FeaturedMemoryCard.tsx | 265 ++++---- .../src/components/Memories/MemoriesPage.tsx | 151 +++-- .../src/components/Memories/MemoryCard.tsx | 97 +-- .../src/components/Memories/MemoryViewer.tsx | 337 +++++----- frontend/src/components/Memories/index.ts | 2 +- 11 files changed, 1383 insertions(+), 1120 deletions(-) diff --git a/backend/app/database/images.py b/backend/app/database/images.py index 11f7eee0f..bc74bd90d 100644 --- a/backend/app/database/images.py +++ b/backend/app/database/images.py @@ -79,9 +79,7 @@ def db_create_images_table() -> None: ) # Create indexes for Memories feature queries - cursor.execute( - "CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)" - ) + cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)") cursor.execute( "CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)" ) @@ -116,48 +114,60 @@ def db_migrate_add_memories_columns() -> None: """ conn = _connect() cursor = conn.cursor() - + try: # Check if images table exists - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='images'") + cursor.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND name='images'" + ) if not cursor.fetchone(): - logger.info("Images table does not exist yet, will be created by db_create_images_table()") + logger.info( + "Images table does not exist yet, will be created by db_create_images_table()" + ) conn.close() return - + # Get existing columns cursor.execute("PRAGMA table_info(images)") columns = {row[1] for row in cursor.fetchall()} - + # Add missing columns changes_made = False - - if 'latitude' not in columns: + + if "latitude" not in columns: cursor.execute("ALTER TABLE images ADD COLUMN latitude REAL") logger.info("Added column: latitude") changes_made = True - - if 'longitude' not in columns: + + if "longitude" not in columns: cursor.execute("ALTER TABLE images ADD COLUMN longitude REAL") logger.info("Added column: longitude") changes_made = True - - if 'captured_at' not in columns: + + if "captured_at" not in columns: cursor.execute("ALTER TABLE images ADD COLUMN captured_at DATETIME") logger.info("Added column: captured_at") changes_made = True - + # Create indexes - cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)") - cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)") - cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)") - cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)") - + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)" + ) + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)" + ) + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)" + ) + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)" + ) + if changes_made: logger.info("Memories feature columns migration completed") - + conn.commit() - + except Exception as e: logger.error(f"Error during Memories columns migration: {e}") conn.rollback() @@ -278,7 +288,9 @@ def db_get_all_images(tagged: Union[bool, None] = None) -> List[dict]: "isFavourite": bool(is_favourite), "latitude": latitude, "longitude": longitude, - "captured_at": captured_at if captured_at else None, # SQLite returns string + "captured_at": ( + captured_at if captured_at else None + ), # SQLite returns string "tags": [], } @@ -518,24 +530,22 @@ def db_toggle_image_favourite_status(image_id: str) -> bool: def db_get_images_by_date_range( - start_date: datetime, - end_date: datetime, - include_favorites_only: bool = False + start_date: datetime, end_date: datetime, include_favorites_only: bool = False ) -> List[dict]: """ Get images captured within a date range for Memories timeline. - + Args: start_date: Start of date range (inclusive) end_date: End of date range (inclusive) include_favorites_only: If True, only return favorite images - + Returns: List of image dictionaries with location and time data """ conn = _connect() cursor = conn.cursor() - + try: query = """ SELECT @@ -555,40 +565,42 @@ def db_get_images_by_date_range( LEFT JOIN mappings m ON ic.class_id = m.class_id WHERE i.captured_at BETWEEN ? AND ? """ - + params = [start_date, end_date] - + if include_favorites_only: query += " AND i.isFavourite = 1" - + query += """ GROUP BY i.id ORDER BY i.captured_at DESC """ - + cursor.execute(query, params) results = cursor.fetchall() - + images = [] for row in results: from app.utils.images import image_util_parse_metadata - - images.append({ - "id": row[0], - "path": row[1], - "folder_id": str(row[2]) if row[2] else None, - "thumbnailPath": row[3], - "metadata": image_util_parse_metadata(row[4]), - "isTagged": bool(row[5]), - "isFavourite": bool(row[6]), - "latitude": row[7], - "longitude": row[8], - "captured_at": row[9] if row[9] else None , - "tags": row[10].split(',') if row[10] else None - }) - + + images.append( + { + "id": row[0], + "path": row[1], + "folder_id": str(row[2]) if row[2] else None, + "thumbnailPath": row[3], + "metadata": image_util_parse_metadata(row[4]), + "isTagged": bool(row[5]), + "isFavourite": bool(row[6]), + "latitude": row[7], + "longitude": row[8], + "captured_at": row[9] if row[9] else None, + "tags": row[10].split(",") if row[10] else None, + } + ) + return images - + except Exception as e: logger.error(f"Error getting images by date range: {e}") return [] @@ -597,21 +609,19 @@ def db_get_images_by_date_range( def db_get_images_near_location( - latitude: float, - longitude: float, - radius_km: float = 5.0 + latitude: float, longitude: float, radius_km: float = 5.0 ) -> List[dict]: """ Get images near a location within radius_km using bounding box approximation. - + Args: latitude: Center latitude (-90 to 90) longitude: Center longitude (-180 to 180) radius_km: Search radius in kilometers (default: 5km) - + Returns: List of image dictionaries with location data - + Note: Uses simple bounding box (not precise Haversine distance). 1 degree latitude ≈ 111 km @@ -619,15 +629,16 @@ def db_get_images_near_location( """ conn = _connect() cursor = conn.cursor() - + try: import math - + # Calculate bounding box offsets lat_offset = radius_km / 111.0 lon_offset = radius_km / (111.0 * abs(math.cos(math.radians(latitude)))) - - cursor.execute(""" + + cursor.execute( + """ SELECT i.id, i.path, @@ -649,35 +660,39 @@ def db_get_images_near_location( AND i.longitude IS NOT NULL GROUP BY i.id ORDER BY i.captured_at DESC - """, ( - latitude - lat_offset, - latitude + lat_offset, - longitude - lon_offset, - longitude + lon_offset - )) - + """, + ( + latitude - lat_offset, + latitude + lat_offset, + longitude - lon_offset, + longitude + lon_offset, + ), + ) + results = cursor.fetchall() - + images = [] for row in results: from app.utils.images import image_util_parse_metadata - - images.append({ - "id": row[0], - "path": row[1], - "folder_id": str(row[2]) if row[2] else None, - "thumbnailPath": row[3], - "metadata": image_util_parse_metadata(row[4]), - "isTagged": bool(row[5]), - "isFavourite": bool(row[6]), - "latitude": row[7], - "longitude": row[8], - "captured_at": row[9] if row[9] else None , # SQLite returns string, - "tags": row[10].split(',') if row[10] else None - }) - + + images.append( + { + "id": row[0], + "path": row[1], + "folder_id": str(row[2]) if row[2] else None, + "thumbnailPath": row[3], + "metadata": image_util_parse_metadata(row[4]), + "isTagged": bool(row[5]), + "isFavourite": bool(row[6]), + "latitude": row[7], + "longitude": row[8], + "captured_at": row[9] if row[9] else None, # SQLite returns string, + "tags": row[10].split(",") if row[10] else None, + } + ) + return images - + except Exception as e: logger.error(f"Error getting images near location: {e}") return [] @@ -688,19 +703,20 @@ def db_get_images_near_location( def db_get_images_by_year_month(year: int, month: int) -> List[dict]: """ Get all images captured in a specific year and month. - + Args: year: Year (e.g., 2024) month: Month (1-12) - + Returns: List of image dictionaries captured in the specified month """ conn = _connect() cursor = conn.cursor() - + try: - cursor.execute(""" + cursor.execute( + """ SELECT i.id, i.path, @@ -720,30 +736,34 @@ def db_get_images_by_year_month(year: int, month: int) -> List[dict]: AND strftime('%m', i.captured_at) = ? GROUP BY i.id ORDER BY i.captured_at DESC - """, (str(year).zfill(4), str(month).zfill(2))) - + """, + (str(year).zfill(4), str(month).zfill(2)), + ) + results = cursor.fetchall() - + images = [] for row in results: from app.utils.images import image_util_parse_metadata - - images.append({ - "id": row[0], - "path": row[1], - "folder_id": str(row[2]) if row[2] else None, - "thumbnailPath": row[3], - "metadata": image_util_parse_metadata(row[4]), - "isTagged": bool(row[5]), - "isFavourite": bool(row[6]), - "latitude": row[7], - "longitude": row[8], - "captured_at": row[9] if row[9] else None , # SQLite returns string, - "tags": row[10].split(',') if row[10] else None - }) - + + images.append( + { + "id": row[0], + "path": row[1], + "folder_id": str(row[2]) if row[2] else None, + "thumbnailPath": row[3], + "metadata": image_util_parse_metadata(row[4]), + "isTagged": bool(row[5]), + "isFavourite": bool(row[6]), + "latitude": row[7], + "longitude": row[8], + "captured_at": row[9] if row[9] else None, # SQLite returns string, + "tags": row[10].split(",") if row[10] else None, + } + ) + return images - + except Exception as e: logger.error(f"Error getting images by year/month: {e}") return [] @@ -755,15 +775,16 @@ def db_get_images_with_location() -> List[dict]: """ Get all images that have valid GPS coordinates. Useful for displaying all photos on a map. - + Returns: List of image dictionaries that have latitude and longitude """ conn = _connect() cursor = conn.cursor() - + try: - cursor.execute(""" + cursor.execute( + """ SELECT i.id, i.path, @@ -783,30 +804,33 @@ def db_get_images_with_location() -> List[dict]: AND i.longitude IS NOT NULL GROUP BY i.id ORDER BY i.captured_at DESC - """) - + """ + ) + results = cursor.fetchall() - + images = [] for row in results: from app.utils.images import image_util_parse_metadata - - images.append({ - "id": row[0], - "path": row[1], - "folder_id": str(row[2]) if row[2] else None, - "thumbnailPath": row[3], - "metadata": image_util_parse_metadata(row[4]), - "isTagged": bool(row[5]), - "isFavourite": bool(row[6]), - "latitude": row[7], - "longitude": row[8], - "captured_at": row[9] if row[9] else None , # SQLite returns string, - "tags": row[10].split(',') if row[10] else None - }) - + + images.append( + { + "id": row[0], + "path": row[1], + "folder_id": str(row[2]) if row[2] else None, + "thumbnailPath": row[3], + "metadata": image_util_parse_metadata(row[4]), + "isTagged": bool(row[5]), + "isFavourite": bool(row[6]), + "latitude": row[7], + "longitude": row[8], + "captured_at": row[9] if row[9] else None, # SQLite returns string, + "tags": row[10].split(",") if row[10] else None, + } + ) + return images - + except Exception as e: logger.error(f"Error fetching images with location: {e}") return [] @@ -818,15 +842,16 @@ def db_get_all_images_for_memories() -> List[dict]: """ Get ALL images that can be used for memories (with OR without GPS). Includes images with timestamps for date-based memories. - + Returns: List of all image dictionaries (both GPS and non-GPS images) """ conn = _connect() cursor = conn.cursor() - + try: - cursor.execute(""" + cursor.execute( + """ SELECT i.id, i.path, @@ -844,30 +869,33 @@ def db_get_all_images_for_memories() -> List[dict]: LEFT JOIN mappings m ON ic.class_id = m.class_id GROUP BY i.id ORDER BY i.captured_at DESC - """) - + """ + ) + results = cursor.fetchall() - + images = [] for row in results: from app.utils.images import image_util_parse_metadata - - images.append({ - "id": row[0], - "path": row[1], - "folder_id": str(row[2]) if row[2] else None, - "thumbnailPath": row[3], - "metadata": image_util_parse_metadata(row[4]), - "isTagged": bool(row[5]), - "isFavourite": bool(row[6]), - "latitude": row[7] if row[7] else None, # Can be None - "longitude": row[8] if row[8] else None, # Can be None - "captured_at": row[9] if row[9] else None, - "tags": row[10].split(',') if row[10] else None - }) - + + images.append( + { + "id": row[0], + "path": row[1], + "folder_id": str(row[2]) if row[2] else None, + "thumbnailPath": row[3], + "metadata": image_util_parse_metadata(row[4]), + "isTagged": bool(row[5]), + "isFavourite": bool(row[6]), + "latitude": row[7] if row[7] else None, # Can be None + "longitude": row[8] if row[8] else None, # Can be None + "captured_at": row[9] if row[9] else None, + "tags": row[10].split(",") if row[10] else None, + } + ) + return images - + except Exception as e: logger.error(f"Error getting images with location: {e}") return [] diff --git a/backend/app/routes/memories.py b/backend/app/routes/memories.py index dd29447ac..c93b629f0 100644 --- a/backend/app/routes/memories.py +++ b/backend/app/routes/memories.py @@ -24,7 +24,7 @@ from app.database.images import ( db_get_images_with_location, db_get_images_by_date_range, - db_get_images_by_year_month + db_get_images_by_year_month, ) from app.utils.memory_clustering import MemoryClustering from app.logging.setup_logging import get_logger @@ -38,8 +38,10 @@ # Response Models # ============================================================================ + class MemoryImage(BaseModel): """Image within a memory.""" + id: str path: str thumbnailPath: str @@ -50,6 +52,7 @@ class MemoryImage(BaseModel): class Memory(BaseModel): """Memory object containing grouped images.""" + memory_id: str title: str description: str @@ -65,6 +68,7 @@ class Memory(BaseModel): class GenerateMemoriesResponse(BaseModel): """Response for generate memories endpoint.""" + success: bool message: str memory_count: int @@ -74,6 +78,7 @@ class GenerateMemoriesResponse(BaseModel): class TimelineResponse(BaseModel): """Response for timeline endpoint.""" + success: bool date_range: Dict[str, str] memory_count: int @@ -82,6 +87,7 @@ class TimelineResponse(BaseModel): class OnThisDayResponse(BaseModel): """Response for on-this-day endpoint.""" + success: bool today: str years: List[int] @@ -91,6 +97,7 @@ class OnThisDayResponse(BaseModel): class LocationCluster(BaseModel): """Location cluster with photo count.""" + location_name: str center_lat: float center_lon: float @@ -100,6 +107,7 @@ class LocationCluster(BaseModel): class LocationsResponse(BaseModel): """Response for locations endpoint.""" + success: bool location_count: int locations: List[LocationCluster] @@ -109,62 +117,72 @@ class LocationsResponse(BaseModel): # API Endpoints # ============================================================================ + @router.post("/generate", response_model=GenerateMemoriesResponse) async def generate_memories( - location_radius_km: float = Query(5.0, ge=0.1, le=100, description="Location clustering radius in km"), - date_tolerance_days: int = Query(3, ge=1, le=30, description="Date tolerance in days"), - min_images: int = Query(2, ge=1, le=10, description="Minimum images per memory") + location_radius_km: float = Query( + 5.0, ge=0.1, le=100, description="Location clustering radius in km" + ), + date_tolerance_days: int = Query( + 3, ge=1, le=30, description="Date tolerance in days" + ), + min_images: int = Query(2, ge=1, le=10, description="Minimum images per memory"), ): """ SIMPLIFIED: Generate memories from ALL images. - GPS images → location-based memories - Non-GPS images → monthly date-based memories - + Returns simple breakdown: {location_count, date_count, total} """ try: - logger.info(f"Generating memories: radius={location_radius_km}km, " - f"date_tolerance={date_tolerance_days}days, min_images={min_images}") - + logger.info( + f"Generating memories: radius={location_radius_km}km, " + f"date_tolerance={date_tolerance_days}days, min_images={min_images}" + ) + # Fetch ALL images from app.database.images import db_get_all_images_for_memories + images = db_get_all_images_for_memories() - + if not images: return GenerateMemoriesResponse( success=True, message="No images found", memory_count=0, image_count=0, - memories=[] + memories=[], ) - + logger.info(f"Processing {len(images)} images") - + # Cluster into memories clustering = MemoryClustering( location_radius_km=location_radius_km, date_tolerance_days=date_tolerance_days, - min_images_per_memory=min_images + min_images_per_memory=min_images, ) - + memories = clustering.cluster_memories(images) - + # Calculate breakdown - location_count = sum(1 for m in memories if m.get('type') == 'location') - date_count = sum(1 for m in memories if m.get('type') == 'date') - - logger.info(f"Generated {len(memories)} memories " - f"(location: {location_count}, date: {date_count})") - + location_count = sum(1 for m in memories if m.get("type") == "location") + date_count = sum(1 for m in memories if m.get("type") == "date") + + logger.info( + f"Generated {len(memories)} memories " + f"(location: {location_count}, date: {date_count})" + ) + return GenerateMemoriesResponse( success=True, message=f"{len(memories)} memories ({location_count} location, {date_count} date)", memory_count=len(memories), image_count=len(images), - memories=memories + memories=memories, ) - + except Exception as e: logger.error(f"Error generating memories: {e}", exc_info=True) raise HTTPException(status_code=500, detail=str(e)) @@ -173,26 +191,30 @@ async def generate_memories( @router.get("/timeline", response_model=TimelineResponse) async def get_timeline( days: int = Query(365, ge=1, le=3650, description="Number of days to look back"), - location_radius_km: float = Query(5.0, ge=0.1, le=100, description="Location clustering radius in km"), - date_tolerance_days: int = Query(3, ge=1, le=30, description="Date tolerance in days") + location_radius_km: float = Query( + 5.0, ge=0.1, le=100, description="Location clustering radius in km" + ), + date_tolerance_days: int = Query( + 3, ge=1, le=30, description="Date tolerance in days" + ), ): """ Get memories from the past N days as a timeline. - + This endpoint: 1. Calculates date range (today - N days to today) 2. Fetches images within that date range 3. Clusters them into memories 4. Returns timeline of memories - + Args: days: Number of days to look back (default: 365 = 1 year) location_radius_km: Location clustering radius (default: 5km) date_tolerance_days: Date tolerance for temporal clustering (default: 3) - + Returns: TimelineResponse with memories ordered by date - + Raises: HTTPException: If database query fails """ @@ -200,44 +222,41 @@ async def get_timeline( # Calculate date range end_date = datetime.now() start_date = end_date - timedelta(days=days) - + logger.info(f"Getting timeline from {start_date.date()} to {end_date.date()}") - + # Fetch images within date range images = db_get_images_by_date_range(start_date, end_date) - + if not images: return TimelineResponse( success=True, date_range={ "start": start_date.isoformat(), - "end": end_date.isoformat() + "end": end_date.isoformat(), }, memory_count=0, - memories=[] + memories=[], ) - + logger.info(f"Found {len(images)} images in date range") - + # Cluster into memories clustering = MemoryClustering( location_radius_km=location_radius_km, date_tolerance_days=date_tolerance_days, - min_images_per_memory=1 # Allow single images in timeline + min_images_per_memory=1, # Allow single images in timeline ) - + memories = clustering.cluster_memories(images) - + return TimelineResponse( success=True, - date_range={ - "start": start_date.isoformat(), - "end": end_date.isoformat() - }, + date_range={"start": start_date.isoformat(), "end": end_date.isoformat()}, memory_count=len(memories), - memories=memories + memories=memories, ) - + except Exception as e: logger.error(f"Error getting timeline: {e}") raise HTTPException(status_code=500, detail=f"Failed to get timeline: {str(e)}") @@ -247,16 +266,16 @@ async def get_timeline( async def get_on_this_day(): """ Get photos taken on this date in previous years. - + This endpoint: 1. Gets current month and day 2. Searches for images from this month-day in all previous years 3. Groups by year 4. Returns images sorted by year (most recent first) - + Returns: OnThisDayResponse with images from this date in previous years - + Raises: HTTPException: If database query fails """ @@ -264,139 +283,152 @@ async def get_on_this_day(): today = datetime.now() current_month = today.month current_day = today.day - + logger.info(f"Getting 'On This Day' for {today.strftime('%B %d')}") - + # Search for images from this month-day in past years # Go back 10 years maximum all_images = [] years_found = [] - + for year_offset in range(1, 11): # 1-10 years ago target_year = today.year - year_offset - + try: images = db_get_images_by_year_month(target_year, current_month) - + # Filter to specific day day_images = [ - img for img in images - if img.get('captured_at') and - datetime.fromisoformat(img['captured_at']).day == current_day + img + for img in images + if img.get("captured_at") + and datetime.fromisoformat(img["captured_at"]).day == current_day ] - + if day_images: all_images.extend(day_images) years_found.append(target_year) logger.info(f"Found {len(day_images)} images from {target_year}") - + except Exception as e: logger.warning(f"Error querying year {target_year}: {e}") continue - + # Sort by year (most recent first) all_images.sort( - key=lambda x: datetime.fromisoformat(x['captured_at']) if x.get('captured_at') else datetime.min, - reverse=True + key=lambda x: ( + datetime.fromisoformat(x["captured_at"]) + if x.get("captured_at") + else datetime.min + ), + reverse=True, ) - + return OnThisDayResponse( success=True, today=today.strftime("%B %d"), years=sorted(years_found, reverse=True), image_count=len(all_images), - images=all_images + images=all_images, ) - + except Exception as e: logger.error(f"Error getting 'On This Day': {e}") - raise HTTPException(status_code=500, detail=f"Failed to get 'On This Day': {str(e)}") + raise HTTPException( + status_code=500, detail=f"Failed to get 'On This Day': {str(e)}" + ) @router.get("/locations", response_model=LocationsResponse) async def get_locations( - location_radius_km: float = Query(5.0, ge=0.1, le=100, description="Location clustering radius in km"), - max_sample_images: int = Query(5, ge=1, le=20, description="Max sample images per location") + location_radius_km: float = Query( + 5.0, ge=0.1, le=100, description="Location clustering radius in km" + ), + max_sample_images: int = Query( + 5, ge=1, le=20, description="Max sample images per location" + ), ): """ Get all unique locations where photos were taken. - + This endpoint: 1. Fetches all images with GPS coordinates 2. Clusters them by location 3. Returns location clusters with photo counts 4. Includes sample images for each location - + Args: location_radius_km: Location clustering radius (default: 5km) max_sample_images: Maximum sample images per location (default: 5) - + Returns: LocationsResponse with list of location clusters - + Raises: HTTPException: If database query fails """ try: logger.info(f"Getting locations with radius={location_radius_km}km") - + # Fetch all images with location data images = db_get_images_with_location() - + if not images: - return LocationsResponse( - success=True, - location_count=0, - locations=[] - ) - + return LocationsResponse(success=True, location_count=0, locations=[]) + logger.info(f"Found {len(images)} images with location data") - + # Cluster by location only (no date clustering) clustering = MemoryClustering( location_radius_km=location_radius_km, date_tolerance_days=999999, # Large number to group all dates together - min_images_per_memory=1 + min_images_per_memory=1, ) - + # Use internal method to get location clusters location_clusters = clustering._cluster_by_location( clustering._filter_valid_images(images) ) - + # Create location cluster objects locations = [] for cluster_images in location_clusters: if not cluster_images: continue - + # Calculate center - center_lat = sum(img['latitude'] for img in cluster_images) / len(cluster_images) - center_lon = sum(img['longitude'] for img in cluster_images) / len(cluster_images) - + center_lat = sum(img["latitude"] for img in cluster_images) / len( + cluster_images + ) + center_lon = sum(img["longitude"] for img in cluster_images) / len( + cluster_images + ) + # Get location name location_name = clustering._reverse_geocode(center_lat, center_lon) - + # Get sample images (up to max_sample_images) sample_images = cluster_images[:max_sample_images] - - locations.append(LocationCluster( - location_name=location_name, - center_lat=center_lat, - center_lon=center_lon, - image_count=len(cluster_images), - sample_images=sample_images - )) - + + locations.append( + LocationCluster( + location_name=location_name, + center_lat=center_lat, + center_lon=center_lon, + image_count=len(cluster_images), + sample_images=sample_images, + ) + ) + # Sort by image count (most photos first) locations.sort(key=lambda loc: loc.image_count, reverse=True) - + return LocationsResponse( - success=True, - location_count=len(locations), - locations=locations + success=True, location_count=len(locations), locations=locations ) - + except Exception as e: logger.error(f"Error getting locations: {e}") - raise HTTPException(status_code=500, detail=f"Failed to get locations: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Failed to get locations: {str(e)}" + ) diff --git a/backend/app/utils/extract_location_metadata.py b/backend/app/utils/extract_location_metadata.py index ccd8290a4..2eede996b 100644 --- a/backend/app/utils/extract_location_metadata.py +++ b/backend/app/utils/extract_location_metadata.py @@ -27,156 +27,168 @@ class MetadataExtractor: """ Extracts location and datetime information from image metadata JSON. - + This class provides utilities to safely parse metadata and extract: - GPS coordinates (latitude, longitude) - Capture datetime """ - + def __init__(self): """Initialize the metadata extractor.""" self.stats = { - 'total': 0, - 'updated': 0, - 'with_location': 0, - 'with_datetime': 0, - 'with_both': 0, - 'skipped': 0, - 'errors': 0 + "total": 0, + "updated": 0, + "with_location": 0, + "with_datetime": 0, + "with_both": 0, + "skipped": 0, + "errors": 0, } - - def extract_gps_coordinates(self, metadata: Dict[str, Any]) -> Tuple[Optional[float], Optional[float]]: + + def extract_gps_coordinates( + self, metadata: Dict[str, Any] + ) -> Tuple[Optional[float], Optional[float]]: """ Extract GPS coordinates from metadata dictionary. - + Supports multiple metadata structures: - Top-level: {"latitude": 28.6, "longitude": 77.2} - Nested EXIF: {"exif": {"gps": {"latitude": 28.6, "longitude": 77.2}}} - Alternative names: lat, lon, Latitude, Longitude - + Args: metadata: Parsed metadata dictionary - + Returns: Tuple of (latitude, longitude) or (None, None) if not found - + Validates: - Latitude: -90 to 90 - Longitude: -180 to 180 """ latitude = None longitude = None - + try: if not isinstance(metadata, dict): return None, None - + # Method 1: Direct top-level fields - lat = metadata.get('latitude') - lon = metadata.get('longitude') - + lat = metadata.get("latitude") + lon = metadata.get("longitude") + # Method 2: Check nested 'exif' -> 'gps' structure if not lat or not lon: - exif = metadata.get('exif', {}) + exif = metadata.get("exif", {}) if isinstance(exif, dict): - gps = exif.get('gps', {}) + gps = exif.get("gps", {}) if isinstance(gps, dict): - lat = lat or gps.get('latitude') - lon = lon or gps.get('longitude') - + lat = lat or gps.get("latitude") + lon = lon or gps.get("longitude") + # Method 3: Check alternative field names if not lat or not lon: - lat = lat or metadata.get('lat') or metadata.get('Latitude') - lon = lon or metadata.get('lon') or metadata.get('Longitude') - + lat = lat or metadata.get("lat") or metadata.get("Latitude") + lon = lon or metadata.get("lon") or metadata.get("Longitude") + # Validate and convert coordinates if lat is not None and lon is not None: try: lat = float(lat) lon = float(lon) - + # Sanity check: valid coordinate ranges if -90 <= lat <= 90 and -180 <= lon <= 180: latitude = lat longitude = lon else: - logger.warning(f"Invalid coordinate range: lat={lat}, lon={lon}") + logger.warning( + f"Invalid coordinate range: lat={lat}, lon={lon}" + ) except (ValueError, TypeError) as e: logger.warning(f"Could not convert coordinates to float: {e}") - + except Exception as e: logger.error(f"Unexpected error extracting GPS coordinates: {e}") - + return latitude, longitude - + def extract_datetime(self, metadata: Dict[str, Any]) -> Optional[datetime]: """ Extract capture datetime from metadata dictionary. - + Supports multiple datetime formats and field names: - date_created, datetime, date_taken, timestamp, DateTime - Nested: exif.datetime, exif.DateTimeOriginal - Formats: ISO 8601, EXIF format (YYYY:MM:DD HH:MM:SS), etc. - + Args: metadata: Parsed metadata dictionary - + Returns: datetime object or None if not found/parseable """ captured_at = None - + try: if not isinstance(metadata, dict): return None - + # Method 1: Check common top-level field names date_str = None - for field in ['date_created', 'datetime', 'date_taken', 'timestamp', 'DateTime']: + for field in [ + "date_created", + "datetime", + "date_taken", + "timestamp", + "DateTime", + ]: if field in metadata: date_str = metadata[field] break - + # Method 2: Check nested 'exif' structure if not date_str: - exif = metadata.get('exif', {}) + exif = metadata.get("exif", {}) if isinstance(exif, dict): date_str = ( - exif.get('datetime') or - exif.get('DateTime') or - exif.get('DateTimeOriginal') or - exif.get('DateTimeDigitized') + exif.get("datetime") + or exif.get("DateTime") + or exif.get("DateTimeOriginal") + or exif.get("DateTimeDigitized") ) - + # Parse datetime string if date_str: date_str = str(date_str).strip() - + # Try multiple datetime formats datetime_formats = [ - '%Y-%m-%d %H:%M:%S', # 2024-01-15 14:30:45 - '%Y:%m:%d %H:%M:%S', # 2024:01:15 14:30:45 (EXIF format) - '%Y-%m-%dT%H:%M:%S', # 2024-01-15T14:30:45 (ISO) - '%Y-%m-%dT%H:%M:%S.%f', # 2024-01-15T14:30:45.123456 - '%Y-%m-%d', # 2024-01-15 - '%d/%m/%Y %H:%M:%S', # 15/01/2024 14:30:45 - '%d/%m/%Y', # 15/01/2024 - '%m/%d/%Y %H:%M:%S', # 01/15/2024 14:30:45 - '%m/%d/%Y', # 01/15/2024 + "%Y-%m-%d %H:%M:%S", # 2024-01-15 14:30:45 + "%Y:%m:%d %H:%M:%S", # 2024:01:15 14:30:45 (EXIF format) + "%Y-%m-%dT%H:%M:%S", # 2024-01-15T14:30:45 (ISO) + "%Y-%m-%dT%H:%M:%S.%f", # 2024-01-15T14:30:45.123456 + "%Y-%m-%d", # 2024-01-15 + "%d/%m/%Y %H:%M:%S", # 15/01/2024 14:30:45 + "%d/%m/%Y", # 15/01/2024 + "%m/%d/%Y %H:%M:%S", # 01/15/2024 14:30:45 + "%m/%d/%Y", # 01/15/2024 ] - + # Try ISO format first (handles timezone) - if 'T' in date_str: + if "T" in date_str: try: # Remove timezone suffix for simpler parsing - date_str_clean = date_str.replace('Z', '').split('+')[0].split('-') + date_str_clean = ( + date_str.replace("Z", "").split("+")[0].split("-") + ) # Rejoin only date-time parts (not timezone) if len(date_str_clean) >= 3: - date_str_clean = '-'.join(date_str_clean[:3]) + date_str_clean = "-".join(date_str_clean[:3]) captured_at = datetime.fromisoformat(date_str_clean) except Exception: pass - + # Try other formats if not captured_at: for fmt in datetime_formats: @@ -185,156 +197,163 @@ def extract_datetime(self, metadata: Dict[str, Any]) -> Optional[datetime]: break except (ValueError, TypeError): continue - + if not captured_at: logger.warning(f"Could not parse datetime: {date_str}") - + except Exception as e: logger.error(f"Unexpected error extracting datetime: {e}") - + return captured_at - - def extract_all(self, metadata_json: str) -> Tuple[Optional[float], Optional[float], Optional[datetime]]: + + def extract_all( + self, metadata_json: str + ) -> Tuple[Optional[float], Optional[float], Optional[datetime]]: """ Extract GPS coordinates and datetime from metadata JSON string. - + Args: metadata_json: JSON string from images.metadata column - + Returns: Tuple of (latitude, longitude, captured_at) """ latitude = None longitude = None captured_at = None - + # Handle null/empty metadata - if not metadata_json or metadata_json == 'null': + if not metadata_json or metadata_json == "null": return None, None, None - + try: # Parse JSON if isinstance(metadata_json, bytes): - metadata_json = metadata_json.decode('utf-8') - + metadata_json = metadata_json.decode("utf-8") + metadata = json.loads(metadata_json) - + # Extract GPS coordinates latitude, longitude = self.extract_gps_coordinates(metadata) - + # Extract datetime captured_at = self.extract_datetime(metadata) - + except json.JSONDecodeError as e: logger.warning(f"Invalid JSON in metadata: {e}") except Exception as e: logger.error(f"Unexpected error parsing metadata: {e}") - + return latitude, longitude, captured_at - + def migrate_metadata(self) -> Dict[str, int]: """ Main migration function to populate latitude, longitude, and captured_at columns for all images with metadata. - + This function: 1. Connects to the database 2. Retrieves all images with metadata 3. Extracts GPS coordinates and datetime 4. Updates the database with extracted values 5. Reports statistics - + Returns: Dictionary with migration statistics """ logger.info("=" * 70) logger.info("Starting metadata extraction migration...") logger.info("=" * 70) - + # Connect to database conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() - + try: # Fetch all images with metadata logger.info("Fetching images from database...") cursor.execute("SELECT id, metadata FROM images WHERE metadata IS NOT NULL") images = cursor.fetchall() - - self.stats['total'] = len(images) + + self.stats["total"] = len(images) logger.info(f"Found {self.stats['total']} images with metadata") - - if self.stats['total'] == 0: + + if self.stats["total"] == 0: logger.warning("No images found with metadata") return self.stats - + # Process each image updates = [] for image_id, metadata_json in images: try: lat, lon, dt = self.extract_all(metadata_json) - + # Only update if we extracted something if lat is not None or lon is not None or dt is not None: - updates.append({ - 'id': image_id, - 'latitude': lat, - 'longitude': lon, - 'captured_at': dt - }) - + updates.append( + { + "id": image_id, + "latitude": lat, + "longitude": lon, + "captured_at": dt, + } + ) + # Track statistics has_location = lat is not None and lon is not None has_datetime = dt is not None - + if has_location: - self.stats['with_location'] += 1 + self.stats["with_location"] += 1 if has_datetime: - self.stats['with_datetime'] += 1 + self.stats["with_datetime"] += 1 if has_location and has_datetime: - self.stats['with_both'] += 1 + self.stats["with_both"] += 1 else: - self.stats['skipped'] += 1 - + self.stats["skipped"] += 1 + except Exception as e: - self.stats['errors'] += 1 + self.stats["errors"] += 1 logger.error(f"Error processing image {image_id}: {e}") - + # Batch update database if updates: logger.info(f"Updating {len(updates)} images...") - + for update_data in updates: - cursor.execute(""" + cursor.execute( + """ UPDATE images SET latitude = ?, longitude = ?, captured_at = ? WHERE id = ? - """, ( - update_data['latitude'], - update_data['longitude'], - update_data['captured_at'], - update_data['id'] - )) - + """, + ( + update_data["latitude"], + update_data["longitude"], + update_data["captured_at"], + update_data["id"], + ), + ) + conn.commit() - self.stats['updated'] = len(updates) + self.stats["updated"] = len(updates) logger.info(f"Successfully updated {self.stats['updated']} images") - + # Print summary self._print_summary() - + except Exception as e: logger.error(f"Migration failed: {e}") conn.rollback() raise - + finally: conn.close() - + return self.stats - + def _print_summary(self): """Print migration summary statistics.""" logger.info("\n" + "=" * 70) @@ -342,16 +361,22 @@ def _print_summary(self): logger.info("=" * 70) logger.info(f"Total images processed: {self.stats['total']}") logger.info(f"Images updated: {self.stats['updated']}") - logger.info(f"Images with location data: {self.stats['with_location']} ({self._percentage('with_location')}%)") - logger.info(f"Images with datetime: {self.stats['with_datetime']} ({self._percentage('with_datetime')}%)") - logger.info(f"Images with both: {self.stats['with_both']} ({self._percentage('with_both')}%)") + logger.info( + f"Images with location data: {self.stats['with_location']} ({self._percentage('with_location')}%)" + ) + logger.info( + f"Images with datetime: {self.stats['with_datetime']} ({self._percentage('with_datetime')}%)" + ) + logger.info( + f"Images with both: {self.stats['with_both']} ({self._percentage('with_both')}%)" + ) logger.info(f"Images skipped (no data): {self.stats['skipped']}") logger.info(f"Errors encountered: {self.stats['errors']}") logger.info("=" * 70) - + def _percentage(self, key: str) -> str: """Calculate percentage for a statistic.""" - if self.stats['total'] == 0: + if self.stats["total"] == 0: return "0.0" return f"{(self.stats[key] / self.stats['total'] * 100):.1f}" @@ -359,7 +384,7 @@ def _percentage(self, key: str) -> str: def main(): """ Main entry point for the metadata extraction script. - + Usage: python -m app.utils.extract_location_metadata """ @@ -368,19 +393,19 @@ def main(): if not Path(DATABASE_PATH).exists(): logger.error(f"Database not found at: {DATABASE_PATH}") return - + # Create extractor and run migration extractor = MetadataExtractor() stats = extractor.migrate_metadata() - + # Exit with appropriate code - if stats['errors'] > 0: + if stats["errors"] > 0: logger.warning("Migration completed with errors") exit(1) else: logger.info("✅ Migration completed successfully!") exit(0) - + except Exception as e: logger.error(f"❌ Migration failed: {e}") exit(1) diff --git a/backend/app/utils/images.py b/backend/app/utils/images.py index c7b91a8c6..42474b3d3 100644 --- a/backend/app/utils/images.py +++ b/backend/app/utils/images.py @@ -153,7 +153,7 @@ def image_util_prepare_image_records( """ image_records = [] extractor = MetadataExtractor() - + for image_path in image_files: folder_id = image_util_find_folder_id_for_image(image_path, folder_path_to_id) @@ -170,24 +170,30 @@ def image_util_prepare_image_records( if image_util_generate_thumbnail(image_path, thumbnail_path): metadata = image_util_extract_metadata(image_path) logger.debug(f"Extracted metadata for {image_path}: {metadata}") - + # Automatically extract GPS coordinates and datetime from metadata # Don't fail upload if extraction fails metadata_json = json.dumps(metadata) latitude, longitude, captured_at = None, None, None - + try: latitude, longitude, captured_at = extractor.extract_all(metadata_json) - + # Log GPS extraction results if latitude and longitude: - logger.info(f"GPS extracted for {os.path.basename(image_path)}: ({latitude}, {longitude})") + logger.info( + f"GPS extracted for {os.path.basename(image_path)}: ({latitude}, {longitude})" + ) if captured_at: - logger.debug(f"Date extracted for {os.path.basename(image_path)}: {captured_at}") + logger.debug( + f"Date extracted for {os.path.basename(image_path)}: {captured_at}" + ) except Exception as e: - logger.warning(f"GPS extraction failed for {os.path.basename(image_path)}: {e}") + logger.warning( + f"GPS extraction failed for {os.path.basename(image_path)}: {e}" + ) # Continue without GPS - don't fail the upload - + # Build image record with GPS data # ALWAYS include latitude, longitude, captured_at (even if None) # to satisfy SQL INSERT statement named parameters @@ -200,9 +206,13 @@ def image_util_prepare_image_records( "isTagged": False, "latitude": latitude, # Can be None "longitude": longitude, # Can be None - "captured_at": captured_at.isoformat() if isinstance(captured_at, datetime.datetime) and captured_at else captured_at, # Can be None + "captured_at": ( + captured_at.isoformat() + if isinstance(captured_at, datetime.datetime) and captured_at + else captured_at + ), # Can be None } - + image_records.append(image_record) return image_records diff --git a/backend/app/utils/memory_clustering.py b/backend/app/utils/memory_clustering.py index cfa7cdc31..ab851bfd0 100644 --- a/backend/app/utils/memory_clustering.py +++ b/backend/app/utils/memory_clustering.py @@ -2,7 +2,7 @@ Memory Clustering Algorithm This module groups images into "memories" based on spatial proximity (location) -and temporal proximity (date/time). Uses DBSCAN for spatial clustering and +and temporal proximity (date/time). Uses DBSCAN for spatial clustering and date-based grouping for temporal clustering. A "memory" is a collection of photos taken at the same place around the same time. @@ -52,7 +52,6 @@ "Darjeeling, West Bengal": (27.0410, 88.2663), "Ooty, Tamil Nadu": (11.4102, 76.6950), "Coorg, Karnataka": (12.3375, 75.8069), - # International - Major Tourist Destinations "Paris, France": (48.8566, 2.3522), "London, UK": (51.5074, -0.1278), @@ -67,76 +66,80 @@ } -def find_nearest_city(latitude: float, longitude: float, max_distance_km: float = 50.0) -> Optional[str]: +def find_nearest_city( + latitude: float, longitude: float, max_distance_km: float = 50.0 +) -> Optional[str]: """ Find the nearest known city to given coordinates. - + Args: latitude: GPS latitude longitude: GPS longitude max_distance_km: Maximum distance to consider (default: 50km) - + Returns: City name if within range, None otherwise """ from math import radians, cos, sin, asin, sqrt - + def haversine_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> float: """Calculate distance between two points in km using Haversine formula.""" lat1, lon1, lat2, lon2 = map(radians, [lat1, lon1, lat2, lon2]) dlat = lat2 - lat1 dlon = lon2 - lon1 - a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2 + a = sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2 c = 2 * asin(sqrt(a)) km = 6371 * c # Radius of Earth in km return km - + nearest_city = None - min_distance = float('inf') - + min_distance = float("inf") + for city_name, (city_lat, city_lon) in CITY_COORDINATES.items(): distance = haversine_distance(latitude, longitude, city_lat, city_lon) if distance < min_distance and distance <= max_distance_km: min_distance = distance nearest_city = city_name - + return nearest_city class MemoryClustering: """ Clusters images into memories based on location and time proximity. - + Algorithm: 1. Spatial clustering: Group images by GPS coordinates using DBSCAN 2. Temporal clustering: Within each location cluster, group by date 3. Memory creation: Generate memory objects with metadata - + Parameters: location_radius_km: Maximum distance between photos in the same location (default: 5km) date_tolerance_days: Maximum days between photos in the same memory (default: 3) min_images_per_memory: Minimum images required to form a memory (default: 2) """ - + def __init__( self, location_radius_km: float = 5.0, date_tolerance_days: int = 3, - min_images_per_memory: int = 2 + min_images_per_memory: int = 2, ): """Initialize the memory clustering algorithm.""" self.location_radius_km = location_radius_km self.date_tolerance_days = date_tolerance_days self.min_images_per_memory = min_images_per_memory - + # Convert km to degrees for DBSCAN # Approximate: 1 degree latitude ≈ 111 km self.location_eps_degrees = location_radius_km / 111.0 - - logger.info(f"MemoryClustering initialized: radius={location_radius_km}km, " - f"date_tolerance={date_tolerance_days}days, " - f"min_images={min_images_per_memory}") - + + logger.info( + f"MemoryClustering initialized: radius={location_radius_km}km, " + f"date_tolerance={date_tolerance_days}days, " + f"min_images={min_images_per_memory}" + ) + def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: """ FLEXIBLE: Cluster ALL images into memories. @@ -144,31 +147,31 @@ def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]] - Has GPS only: Cluster by location using DBSCAN - Has Date only: Group by month (if ≥5 photos per month) - Has neither: Skip (can't create meaningful memory) - + Images work with EITHER date OR location - not both required! - + Args: - images: List of image dicts with id, path, thumbnailPath, + images: List of image dicts with id, path, thumbnailPath, latitude, longitude, captured_at - + Returns: List of memories with type='location' or type='date' """ logger.info(f"Starting flexible clustering for {len(images)} images") - + if not images: return [] - + try: # Separate images by what data they have gps_images = [] date_only_images = [] skipped_count = 0 - + for img in images: - has_gps = img.get('latitude') and img.get('longitude') - has_date = img.get('captured_at') - + has_gps = img.get("latitude") and img.get("longitude") + has_date = img.get("captured_at") + if has_gps: # Has GPS (with or without date) → location-based clustering gps_images.append(img) @@ -178,32 +181,36 @@ def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]] else: # Has neither GPS nor date → skip skipped_count += 1 - - logger.info(f"GPS-based: {len(gps_images)}, Date-only: {len(date_only_images)}, Skipped: {skipped_count}") - + + logger.info( + f"GPS-based: {len(gps_images)}, Date-only: {len(date_only_images)}, Skipped: {skipped_count}" + ) + memories = [] - + # Process location-based memories (these may also have dates) if gps_images: location_memories = self._cluster_location_images(gps_images) memories.extend(location_memories) - + # Process date-only memories (no GPS) if date_only_images: date_memories = self._cluster_date_images(date_only_images) memories.extend(date_memories) - + # Sort by date descending - memories.sort(key=lambda m: m.get('date_start', ''), reverse=True) - + memories.sort(key=lambda m: m.get("date_start", ""), reverse=True) + logger.info(f"Generated {len(memories)} total memories") return memories - + except Exception as e: logger.error(f"Clustering failed: {e}", exc_info=True) return [] - - def _cluster_location_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + + def _cluster_location_images( + self, images: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: """ SIMPLIFIED: Use existing DBSCAN clustering for GPS images. """ @@ -211,23 +218,27 @@ def _cluster_location_images(self, images: List[Dict[str, Any]]) -> List[Dict[st valid_images = self._filter_valid_images(images) if not valid_images: return [] - + location_clusters = self._cluster_by_location(valid_images) memories = [] - + for cluster in location_clusters: temporal_clusters = self._cluster_by_date(cluster) for temp_cluster in temporal_clusters: if len(temp_cluster) >= self.min_images_per_memory: - memory = self._create_simple_memory(temp_cluster, memory_type='location') + memory = self._create_simple_memory( + temp_cluster, memory_type="location" + ) memories.append(memory) - + return memories except Exception as e: logger.error(f"Location clustering failed: {e}") return [] - - def _cluster_date_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + + def _cluster_date_images( + self, images: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: """ FLEXIBLE: Group date-only images by year-month. Uses min_images_per_memory (default: 2) as threshold. @@ -235,41 +246,45 @@ def _cluster_date_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, A try: # Group by year-month monthly_groups = defaultdict(list) - + for img in images: - captured_at = img.get('captured_at') + captured_at = img.get("captured_at") if not captured_at: continue - + # Parse date if isinstance(captured_at, str): try: - dt = datetime.fromisoformat(captured_at.replace('Z', '')) + dt = datetime.fromisoformat(captured_at.replace("Z", "")) except: continue elif isinstance(captured_at, datetime): dt = captured_at else: continue - + # Group by year-month - month_key = dt.strftime('%Y-%m') + month_key = dt.strftime("%Y-%m") monthly_groups[month_key].append(img) - + # Create memories for months with enough photos (uses min_images_per_memory) memories = [] for month_key, month_images in monthly_groups.items(): if len(month_images) >= self.min_images_per_memory: - memory = self._create_simple_memory(month_images, memory_type='date') + memory = self._create_simple_memory( + month_images, memory_type="date" + ) if memory: memories.append(memory) - + return memories except Exception as e: logger.error(f"Date clustering failed: {e}") return [] - - def _create_simple_memory(self, images: List[Dict[str, Any]], memory_type: str = 'location') -> Dict[str, Any]: + + def _create_simple_memory( + self, images: List[Dict[str, Any]], memory_type: str = "location" + ) -> Dict[str, Any]: """ SIMPLIFIED: Create a memory object with minimal fields. Ensures all datetime objects are converted to ISO strings. @@ -279,42 +294,50 @@ def _create_simple_memory(self, images: List[Dict[str, Any]], memory_type: str = cleaned_images = [] for img in images: img_copy = img.copy() - if img_copy.get('captured_at') and isinstance(img_copy['captured_at'], datetime): - img_copy['captured_at'] = img_copy['captured_at'].isoformat() + if img_copy.get("captured_at") and isinstance( + img_copy["captured_at"], datetime + ): + img_copy["captured_at"] = img_copy["captured_at"].isoformat() cleaned_images.append(img_copy) - + # Sort by date - sorted_images = sorted(cleaned_images, key=lambda x: x.get('captured_at', '')) - + sorted_images = sorted( + cleaned_images, key=lambda x: x.get("captured_at", "") + ) + # Get date range - dates = [img.get('captured_at') for img in sorted_images if img.get('captured_at')] + dates = [ + img.get("captured_at") + for img in sorted_images + if img.get("captured_at") + ] if dates: if isinstance(dates[0], str): - dates = [datetime.fromisoformat(d.replace('Z', '')) for d in dates] + dates = [datetime.fromisoformat(d.replace("Z", "")) for d in dates] date_start = min(dates).isoformat() date_end = max(dates).isoformat() date_obj = min(dates) else: date_start = date_end = None date_obj = datetime.now() - + # Simple titles - if memory_type == 'location': + if memory_type == "location": # Calculate center first - lats = [img['latitude'] for img in images if img.get('latitude')] - lons = [img['longitude'] for img in images if img.get('longitude')] + lats = [img["latitude"] for img in images if img.get("latitude")] + lons = [img["longitude"] for img in images if img.get("longitude")] center_lat = np.mean(lats) if lats else 0 center_lon = np.mean(lons) if lons else 0 - + # Get actual location name using reverse geocoding location_name = self._reverse_geocode(center_lat, center_lon) - + # Create title based on date range if len(dates) > 1: # Multiple dates: show date range start_date = min(dates) end_date = max(dates) - if start_date.strftime('%B %Y') == end_date.strftime('%B %Y'): + if start_date.strftime("%B %Y") == end_date.strftime("%B %Y"): # Same month: "Jaipur in Nov 2025" title = f"{location_name} in {start_date.strftime('%b %Y')}" else: @@ -328,130 +351,142 @@ def _create_simple_memory(self, images: List[Dict[str, Any]], memory_type: str = title = location_name else: # Date-based: "Month Year" - title = date_obj.strftime('%B %Y') + title = date_obj.strftime("%B %Y") location_name = "" center_lat = 0 center_lon = 0 - + # Create memory memory_id = f"{memory_type}_{int(date_obj.timestamp())}_{len(images)}" - + return { - 'memory_id': memory_id, - 'title': title, - 'description': f"{len(images)} photos", - 'location_name': location_name, - 'date_start': date_start, - 'date_end': date_end, - 'image_count': len(images), - 'images': sorted_images, - 'thumbnail_image_id': sorted_images[0].get('id', ''), - 'center_lat': center_lat, - 'center_lon': center_lon, - 'type': memory_type # Add type field + "memory_id": memory_id, + "title": title, + "description": f"{len(images)} photos", + "location_name": location_name, + "date_start": date_start, + "date_end": date_end, + "image_count": len(images), + "images": sorted_images, + "thumbnail_image_id": sorted_images[0].get("id", ""), + "center_lat": center_lat, + "center_lon": center_lon, + "type": memory_type, # Add type field } except Exception as e: logger.error(f"Memory creation failed: {e}") return None - - def _cluster_gps_based_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + + def _cluster_gps_based_memories( + self, images: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: """ Cluster images with GPS data into location-based memories. This is the original clustering logic. - + Args: images: List of images with GPS coordinates - + Returns: List of location-based memories """ # Filter images with valid location data valid_images = self._filter_valid_images(images) - + if not valid_images: logger.warning("No images with valid location data") return [] - + logger.info(f"Processing {len(valid_images)} GPS images") - + # Step 1: Cluster by location (spatial) location_clusters = self._cluster_by_location(valid_images) logger.info(f"Created {len(location_clusters)} location clusters") - + # Step 2: Within each location cluster, cluster by date (temporal) memories = [] for location_cluster in location_clusters: temporal_clusters = self._cluster_by_date(location_cluster) - + # Step 3: Create memory objects for temporal_cluster in temporal_clusters: if len(temporal_cluster) >= self.min_images_per_memory: memory = self._create_memory(temporal_cluster) memories.append(memory) - + return memories - - def _cluster_date_based_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + + def _cluster_date_based_memories( + self, images: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: """ Cluster images WITHOUT GPS data into date-based memories. Groups photos by capture date/time only (screenshots, downloads, edits, etc.) - + Args: images: List of images without GPS coordinates - + Returns: List of date-based memories """ logger.info(f"Clustering {len(images)} non-GPS images by date") - + # Parse and filter images with valid dates valid_images = [] for img in images: img_copy = img.copy() - captured_at = img_copy.get('captured_at') - + captured_at = img_copy.get("captured_at") + if captured_at: if isinstance(captured_at, str): try: - captured_at = datetime.fromisoformat(captured_at.replace('Z', '')) - img_copy['captured_at'] = captured_at + captured_at = datetime.fromisoformat( + captured_at.replace("Z", "") + ) + img_copy["captured_at"] = captured_at except Exception: # Try alternative formats - for fmt in ['%Y-%m-%d %H:%M:%S', '%Y:%m:%d %H:%M:%S', '%Y-%m-%d']: + for fmt in [ + "%Y-%m-%d %H:%M:%S", + "%Y:%m:%d %H:%M:%S", + "%Y-%m-%d", + ]: try: captured_at = datetime.strptime(captured_at, fmt) - img_copy['captured_at'] = captured_at + img_copy["captured_at"] = captured_at break except Exception: continue else: - logger.debug(f"Could not parse date for image {img.get('id')}") + logger.debug( + f"Could not parse date for image {img.get('id')}" + ) continue elif isinstance(captured_at, datetime): - img_copy['captured_at'] = captured_at - + img_copy["captured_at"] = captured_at + valid_images.append(img_copy) - + if not valid_images: logger.warning("No non-GPS images with valid dates") return [] - + logger.info(f"Found {len(valid_images)} non-GPS images with valid dates") - + # Sort by date - valid_images.sort(key=lambda x: x['captured_at']) - + valid_images.sort(key=lambda x: x["captured_at"]) + # Group by date tolerance clusters = [] current_cluster = [valid_images[0]] - + for i in range(1, len(valid_images)): - prev_date = valid_images[i-1]['captured_at'] - curr_date = valid_images[i]['captured_at'] - + prev_date = valid_images[i - 1]["captured_at"] + curr_date = valid_images[i]["captured_at"] + # Check if within tolerance date_diff = abs((curr_date - prev_date).days) - + if date_diff <= self.date_tolerance_days: current_cluster.append(valid_images[i]) else: @@ -460,36 +495,36 @@ def _cluster_date_based_memories(self, images: List[Dict[str, Any]]) -> List[Dic clusters.append(current_cluster) # Start new cluster current_cluster = [valid_images[i]] - + # Add last cluster if it meets min size if current_cluster and len(current_cluster) >= self.min_images_per_memory: clusters.append(current_cluster) - + logger.info(f"Created {len(clusters)} date-based clusters") - + # Create memory objects memories = [] for cluster in clusters: memory = self._create_date_based_memory(cluster) memories.append(memory) - + return memories - + def _create_date_based_memory(self, images: List[Dict[str, Any]]) -> Dict[str, Any]: """ Create a date-based memory object for images without GPS. - + Args: images: List of image dictionaries in the cluster (no GPS) - + Returns: Memory dictionary with metadata """ # Get date range - dates = [img['captured_at'] for img in images if img.get('captured_at')] + dates = [img["captured_at"] for img in images if img.get("captured_at")] date_start = min(dates) if dates else None date_end = max(dates) if dates else None - + # Generate title for date-based memory if date_start: if date_start.date() == date_end.date(): @@ -501,292 +536,314 @@ def _create_date_based_memory(self, images: List[Dict[str, Any]]) -> Dict[str, A elif days <= 31: title = date_start.strftime("%B %Y") else: - title = date_start.strftime("%B - %B %Y") if date_start.month != date_end.month else date_start.strftime("%B %Y") + title = ( + date_start.strftime("%B - %B %Y") + if date_start.month != date_end.month + else date_start.strftime("%B %Y") + ) else: title = "Memories Collection" - + # Generate description description = self._generate_description(len(images), date_start, date_end) - + # Select thumbnail (middle image) thumbnail_idx = len(images) // 2 - thumbnail_image_id = images[thumbnail_idx]['id'] - + thumbnail_image_id = images[thumbnail_idx]["id"] + # Create memory ID (use timestamp only) - memory_id = f"mem_date_{date_start.strftime('%Y%m%d')}" if date_start else f"mem_date_unknown_{hash(tuple(img['id'] for img in images[:5]))}" - + memory_id = ( + f"mem_date_{date_start.strftime('%Y%m%d')}" + if date_start + else f"mem_date_unknown_{hash(tuple(img['id'] for img in images[:5]))}" + ) + # Convert captured_at datetime objects to ISO strings serialized_images = [] for img in images: img_copy = img.copy() - if img_copy.get('captured_at') and isinstance(img_copy['captured_at'], datetime): - img_copy['captured_at'] = img_copy['captured_at'].isoformat() + if img_copy.get("captured_at") and isinstance( + img_copy["captured_at"], datetime + ): + img_copy["captured_at"] = img_copy["captured_at"].isoformat() serialized_images.append(img_copy) - + return { - 'memory_id': memory_id, - 'title': title, - 'description': description, - 'location_name': 'Date-Based Memory', # Identifier for non-GPS memories - 'date_start': date_start.isoformat() if date_start else None, - 'date_end': date_end.isoformat() if date_end else None, - 'image_count': len(images), - 'images': serialized_images, - 'thumbnail_image_id': thumbnail_image_id, - 'center_lat': 0.0, # No GPS data - 'center_lon': 0.0 # No GPS data + "memory_id": memory_id, + "title": title, + "description": description, + "location_name": "Date-Based Memory", # Identifier for non-GPS memories + "date_start": date_start.isoformat() if date_start else None, + "date_end": date_end.isoformat() if date_end else None, + "image_count": len(images), + "images": serialized_images, + "thumbnail_image_id": thumbnail_image_id, + "center_lat": 0.0, # No GPS data + "center_lon": 0.0, # No GPS data } - - def _filter_valid_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + + def _filter_valid_images( + self, images: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: """ Filter images that have valid location and datetime data. - + Args: images: List of image dictionaries - + Returns: List of valid images with parsed datetime objects """ valid_images = [] - + for img in images: try: # Check for required fields - if not img.get('latitude') or not img.get('longitude'): + if not img.get("latitude") or not img.get("longitude"): continue - + # Parse captured_at if it's a string - captured_at = img.get('captured_at') + captured_at = img.get("captured_at") img_copy = img.copy() - + if captured_at: if isinstance(captured_at, str): try: # SQLite returns ISO format: "YYYY-MM-DDTHH:MM:SS" - captured_at = datetime.fromisoformat(captured_at.replace('Z', '')) - img_copy['captured_at'] = captured_at + captured_at = datetime.fromisoformat( + captured_at.replace("Z", "") + ) + img_copy["captured_at"] = captured_at except Exception as e: # Try alternative formats - for fmt in ['%Y-%m-%d %H:%M:%S', '%Y:%m:%d %H:%M:%S', '%Y-%m-%d']: + for fmt in [ + "%Y-%m-%d %H:%M:%S", + "%Y:%m:%d %H:%M:%S", + "%Y-%m-%d", + ]: try: captured_at = datetime.strptime(captured_at, fmt) - img_copy['captured_at'] = captured_at + img_copy["captured_at"] = captured_at break except Exception: continue else: # Could not parse date, but location is still valid - logger.debug(f"Could not parse date for image {img.get('id')}: {captured_at}") + logger.debug( + f"Could not parse date for image {img.get('id')}: {captured_at}" + ) elif isinstance(captured_at, datetime): - img_copy['captured_at'] = captured_at - + img_copy["captured_at"] = captured_at + valid_images.append(img_copy) - + except Exception as e: logger.warning(f"Error filtering image {img.get('id')}: {e}") continue - + return valid_images - - def _cluster_by_location(self, images: List[Dict[str, Any]]) -> List[List[Dict[str, Any]]]: + + def _cluster_by_location( + self, images: List[Dict[str, Any]] + ) -> List[List[Dict[str, Any]]]: """ Cluster images by geographic location using DBSCAN. - + Args: images: List of image dictionaries with latitude/longitude - + Returns: List of location clusters (each cluster is a list of images) """ if not images: return [] - + # Extract coordinates - coordinates = np.array([ - [img['latitude'], img['longitude']] - for img in images - ]) - + coordinates = np.array([[img["latitude"], img["longitude"]] for img in images]) + # Apply DBSCAN clustering # eps: maximum distance between two samples (in degrees) # min_samples: minimum number of samples to form a cluster clustering = DBSCAN( eps=self.location_eps_degrees, min_samples=1, # Even single photos can form a cluster - metric='haversine', # Use haversine distance for lat/lon - algorithm='ball_tree' + metric="haversine", # Use haversine distance for lat/lon + algorithm="ball_tree", ) - + # Convert to radians for haversine coordinates_rad = np.radians(coordinates) labels = clustering.fit_predict(coordinates_rad) - + # Group images by cluster label clusters = defaultdict(list) for idx, label in enumerate(labels): if label != -1: # -1 is noise in DBSCAN clusters[label].append(images[idx]) - + # Noise points (label -1) each become their own cluster for idx, label in enumerate(labels): if label == -1: clusters[f"noise_{idx}"].append(images[idx]) - + return list(clusters.values()) - - def _cluster_by_date(self, images: List[Dict[str, Any]]) -> List[List[Dict[str, Any]]]: + + def _cluster_by_date( + self, images: List[Dict[str, Any]] + ) -> List[List[Dict[str, Any]]]: """ Cluster images by date within a location cluster. - + Groups images that were taken within date_tolerance_days of each other. - + Args: images: List of image dictionaries with captured_at datetime - + Returns: List of temporal clusters (each cluster is a list of images) """ if not images: return [] - + # Sort by date sorted_images = sorted( - [img for img in images if img.get('captured_at')], - key=lambda x: x['captured_at'] + [img for img in images if img.get("captured_at")], + key=lambda x: x["captured_at"], ) - + # Images without dates go into a separate cluster - no_date_images = [img for img in images if not img.get('captured_at')] - + no_date_images = [img for img in images if not img.get("captured_at")] + if not sorted_images: return [no_date_images] if no_date_images else [] - + # Group by date tolerance clusters = [] current_cluster = [sorted_images[0]] - + for i in range(1, len(sorted_images)): - prev_date = sorted_images[i-1]['captured_at'] - curr_date = sorted_images[i]['captured_at'] - + prev_date = sorted_images[i - 1]["captured_at"] + curr_date = sorted_images[i]["captured_at"] + # Check if within tolerance date_diff = abs((curr_date - prev_date).days) - + if date_diff <= self.date_tolerance_days: current_cluster.append(sorted_images[i]) else: # Start new cluster clusters.append(current_cluster) current_cluster = [sorted_images[i]] - + # Add last cluster if current_cluster: clusters.append(current_cluster) - + # Add no-date images as separate cluster if exists if no_date_images: clusters.append(no_date_images) - + return clusters - + def _create_memory(self, images: List[Dict[str, Any]]) -> Dict[str, Any]: """ Create a memory object from a cluster of images. - + Args: images: List of image dictionaries in the cluster - + Returns: Memory dictionary with metadata """ # Calculate center coordinates - center_lat = np.mean([img['latitude'] for img in images]) - center_lon = np.mean([img['longitude'] for img in images]) - + center_lat = np.mean([img["latitude"] for img in images]) + center_lon = np.mean([img["longitude"] for img in images]) + # Get date range - dates = [img['captured_at'] for img in images if img.get('captured_at')] + dates = [img["captured_at"] for img in images if img.get("captured_at")] if dates: date_start = min(dates) date_end = max(dates) else: date_start = None date_end = None - + # Get location name location_name = self._reverse_geocode(center_lat, center_lon) - + # Generate title title = self._generate_title(location_name, date_start, len(images)) - + # Generate description description = self._generate_description(len(images), date_start, date_end) - + # Select thumbnail (first image or middle image) thumbnail_idx = len(images) // 2 - thumbnail_image_id = images[thumbnail_idx]['id'] - + thumbnail_image_id = images[thumbnail_idx]["id"] + # Create memory ID (use timestamp + location hash) memory_id = self._generate_memory_id(center_lat, center_lon, date_start) - + # Convert captured_at datetime objects to ISO strings for all images serialized_images = [] for img in images: img_copy = img.copy() - if img_copy.get('captured_at') and isinstance(img_copy['captured_at'], datetime): - img_copy['captured_at'] = img_copy['captured_at'].isoformat() + if img_copy.get("captured_at") and isinstance( + img_copy["captured_at"], datetime + ): + img_copy["captured_at"] = img_copy["captured_at"].isoformat() serialized_images.append(img_copy) - + return { - 'memory_id': memory_id, - 'title': title, - 'description': description, - 'location_name': location_name, - 'date_start': date_start.isoformat() if date_start else None, - 'date_end': date_end.isoformat() if date_end else None, - 'image_count': len(images), - 'images': serialized_images, - 'thumbnail_image_id': thumbnail_image_id, - 'center_lat': float(center_lat), - 'center_lon': float(center_lon) + "memory_id": memory_id, + "title": title, + "description": description, + "location_name": location_name, + "date_start": date_start.isoformat() if date_start else None, + "date_end": date_end.isoformat() if date_end else None, + "image_count": len(images), + "images": serialized_images, + "thumbnail_image_id": thumbnail_image_id, + "center_lat": float(center_lat), + "center_lon": float(center_lon), } - + def _reverse_geocode(self, latitude: float, longitude: float) -> str: """ Convert GPS coordinates to a human-readable location name. - + Uses city coordinate mapping for major cities, falls back to coordinates. - + Args: latitude: GPS latitude longitude: GPS longitude - + Returns: Location string (e.g., "Jaipur, Rajasthan" or formatted coordinates) """ # Try to find nearest known city city_name = find_nearest_city(latitude, longitude, max_distance_km=50.0) - + if city_name: - logger.debug(f"Mapped coordinates ({latitude:.4f}, {longitude:.4f}) to {city_name}") + logger.debug( + f"Mapped coordinates ({latitude:.4f}, {longitude:.4f}) to {city_name}" + ) return city_name - + # Fallback: Return formatted coordinates return f"{latitude:.4f}°, {longitude:.4f}°" - + def _generate_title( - self, - location_name: str, - date: Optional[datetime], - image_count: int + self, location_name: str, date: Optional[datetime], image_count: int ) -> str: """ Generate a title for the memory. - + Args: location_name: Human-readable location date: Date of the memory image_count: Number of images - + Returns: Title string """ @@ -795,21 +852,21 @@ def _generate_title( return f"{location_name} - {month_year}" else: return f"{location_name} - {image_count} photos" - + def _generate_description( self, image_count: int, date_start: Optional[datetime], - date_end: Optional[datetime] + date_end: Optional[datetime], ) -> str: """ Generate a description for the memory. - + Args: image_count: Number of images date_start: Start date date_end: End date - + Returns: Description string """ @@ -821,28 +878,25 @@ def _generate_description( return f"{image_count} photos over {days} days ({date_start.strftime('%b %d')} - {date_end.strftime('%b %d, %Y')})" else: return f"{image_count} photos" - + def _generate_memory_id( - self, - latitude: float, - longitude: float, - date: Optional[datetime] + self, latitude: float, longitude: float, date: Optional[datetime] ) -> str: """ Generate a unique ID for the memory. - + Args: latitude: Center latitude longitude: Center longitude date: Date of memory - + Returns: Unique memory ID """ # Create hash from location and date location_hash = hash((round(latitude, 2), round(longitude, 2))) if date: - date_str = date.strftime('%Y%m%d') + date_str = date.strftime("%Y%m%d") return f"mem_{date_str}_{abs(location_hash)}" else: return f"mem_nodate_{abs(location_hash)}" diff --git a/backend/app/utils/verify_memories_setup.py b/backend/app/utils/verify_memories_setup.py index e38b2d6ee..f7e55ef8f 100644 --- a/backend/app/utils/verify_memories_setup.py +++ b/backend/app/utils/verify_memories_setup.py @@ -12,14 +12,16 @@ import importlib from pathlib import Path + # ANSI color codes for terminal output class Colors: - GREEN = '\033[92m' - RED = '\033[91m' - YELLOW = '\033[93m' - BLUE = '\033[94m' - BOLD = '\033[1m' - RESET = '\033[0m' + GREEN = "\033[92m" + RED = "\033[91m" + YELLOW = "\033[93m" + BLUE = "\033[94m" + BOLD = "\033[1m" + RESET = "\033[0m" + def print_header(text): """Print section header""" @@ -27,67 +29,75 @@ def print_header(text): print(f"{Colors.BOLD}{Colors.BLUE}{text}{Colors.RESET}") print(f"{Colors.BOLD}{Colors.BLUE}{'='*60}{Colors.RESET}\n") + def print_success(text): """Print success message""" print(f"{Colors.GREEN}✓ {text}{Colors.RESET}") + def print_error(text): """Print error message""" print(f"{Colors.RED}✗ {text}{Colors.RESET}") + def print_warning(text): """Print warning message""" print(f"{Colors.YELLOW}⚠ {text}{Colors.RESET}") + def print_info(text): """Print info message""" print(f" {text}") + def check_dependencies(): """Check if all required packages are installed""" print_header("1. Checking Python Dependencies") - + required_packages = { - 'numpy': '1.26.4', - 'sklearn': '1.5.1', # scikit-learn imports as sklearn - 'fastapi': '0.111.0', - 'sqlalchemy': None, - 'pydantic': None, + "numpy": "1.26.4", + "sklearn": "1.5.1", # scikit-learn imports as sklearn + "fastapi": "0.111.0", + "sqlalchemy": None, + "pydantic": None, } - + all_installed = True - + for package, expected_version in required_packages.items(): try: module = importlib.import_module(package) - version = getattr(module, '__version__', 'Unknown') - + version = getattr(module, "__version__", "Unknown") + if expected_version and version != expected_version: - print_warning(f"{package} installed (v{version}), expected v{expected_version}") + print_warning( + f"{package} installed (v{version}), expected v{expected_version}" + ) else: print_success(f"{package} v{version}") except ImportError: print_error(f"{package} is NOT installed") all_installed = False - + return all_installed + def check_file_structure(): """Check if all required files exist""" print_header("2. Checking File Structure") - + backend_path = Path(__file__).parent.parent.parent - + required_files = [ - 'app/utils/extract_location_metadata.py', - 'app/utils/memory_clustering.py', - 'app/routes/memories.py', - 'app/database/images.py', - 'main.py', + "app/utils/extract_location_metadata.py", + "app/utils/memory_clustering.py", + "app/routes/memories.py", + "app/database/images.py", + "main.py", ] - + all_exist = True - + for file_path in required_files: full_path = backend_path / file_path if full_path.exists(): @@ -96,44 +106,47 @@ def check_file_structure(): else: print_error(f"{file_path} NOT FOUND") all_exist = False - + return all_exist + def check_database_schema(): """Check if database has required columns and indexes""" print_header("3. Checking Database Schema") - + backend_path = Path(__file__).parent.parent.parent - db_path = backend_path / 'app' / 'database' / 'PictoPy.db' - + db_path = backend_path / "app" / "database" / "PictoPy.db" + if not db_path.exists(): print_warning("Database file 'gallery.db' not found") print_info(" → Database will be created on first run") return None # Not an error, just not initialized yet - + try: conn = sqlite3.connect(str(db_path)) cursor = conn.cursor() - + # Check if images table exists - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='images'") + cursor.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND name='images'" + ) if not cursor.fetchone(): print_error("Table 'images' does not exist") conn.close() return False - + print_success("Table 'images' exists") - + # Check for required columns cursor.execute("PRAGMA table_info(images)") columns = {row[1]: row[2] for row in cursor.fetchall()} - + required_columns = { - 'latitude': 'FLOAT', - 'longitude': 'FLOAT', - 'captured_at': 'DATETIME', + "latitude": "FLOAT", + "longitude": "FLOAT", + "captured_at": "DATETIME", } - + all_columns_exist = True for col_name, col_type in required_columns.items(): if col_name in columns: @@ -143,44 +156,47 @@ def check_database_schema(): print_info(" → Run migration: python migrate_add_memories_columns.py") print_info(" → Or restart the app (auto-migration enabled)") all_columns_exist = False - + # Check for indexes cursor.execute("SELECT name FROM sqlite_master WHERE type='index'") indexes = [row[0] for row in cursor.fetchall()] - + required_indexes = [ - 'ix_images_latitude', - 'ix_images_longitude', - 'ix_images_captured_at', + "ix_images_latitude", + "ix_images_longitude", + "ix_images_captured_at", ] - + print() for index_name in required_indexes: if index_name in indexes: print_success(f"Index '{index_name}'") else: - print_warning(f"Index '{index_name}' not found (recommended for performance)") - + print_warning( + f"Index '{index_name}' not found (recommended for performance)" + ) + conn.close() return all_columns_exist - + except Exception as e: print_error(f"Database check failed: {e}") return False + def check_imports(): """Check if all modules can be imported""" print_header("4. Checking Module Imports") - + modules_to_check = [ - 'app.utils.extract_location_metadata', - 'app.utils.memory_clustering', - 'app.routes.memories', - 'app.database.images', + "app.utils.extract_location_metadata", + "app.utils.memory_clustering", + "app.routes.memories", + "app.database.images", ] - + all_imported = True - + for module_name in modules_to_check: try: importlib.import_module(module_name) @@ -188,28 +204,29 @@ def check_imports(): except Exception as e: print_error(f"{module_name} - {str(e)}") all_imported = False - + return all_imported + def check_api_routes(): """Check if Memories API routes are registered""" print_header("5. Checking API Routes") - + try: # Import main app sys.path.insert(0, str(Path(__file__).parent.parent.parent)) from main import app - + # Get all routes routes = [route.path for route in app.routes] - + required_routes = [ - '/api/memories/generate', - '/api/memories/timeline', - '/api/memories/on-this-day', - '/api/memories/locations', + "/api/memories/generate", + "/api/memories/timeline", + "/api/memories/on-this-day", + "/api/memories/locations", ] - + all_routes_exist = True for route_path in required_routes: if route_path in routes: @@ -218,51 +235,64 @@ def check_api_routes(): print_error(f"{route_path} NOT FOUND") print_info(" → Check if memories router is included in main.py") all_routes_exist = False - + return all_routes_exist - + except Exception as e: print_error(f"Failed to check routes: {e}") return False + def print_summary(results): """Print final summary""" print_header("Verification Summary") - + all_passed = all(result is not False for result in results.values()) - + for check_name, result in results.items(): status = "✓ PASS" if result else ("⚠ WARNING" if result is None else "✗ FAIL") - color = Colors.GREEN if result else (Colors.YELLOW if result is None else Colors.RED) + color = ( + Colors.GREEN + if result + else (Colors.YELLOW if result is None else Colors.RED) + ) print(f"{color}{status}{Colors.RESET} - {check_name}") - + print() if all_passed: - print(f"{Colors.BOLD}{Colors.GREEN}🎉 All checks passed! Memories feature is ready to use.{Colors.RESET}") + print( + f"{Colors.BOLD}{Colors.GREEN}🎉 All checks passed! Memories feature is ready to use.{Colors.RESET}" + ) print_info("Next steps:") print_info("1. Start the backend: cd backend && ./run.sh") - print_info("2. Run metadata extraction: python -m app.utils.extract_location_metadata") + print_info( + "2. Run metadata extraction: python -m app.utils.extract_location_metadata" + ) print_info("3. Test API endpoints: see MEMORIES_TESTING_GUIDE.md") else: - print(f"{Colors.BOLD}{Colors.RED}❌ Some checks failed. Please fix the issues above.{Colors.RESET}") + print( + f"{Colors.BOLD}{Colors.RED}❌ Some checks failed. Please fix the issues above.{Colors.RESET}" + ) print_info("See MEMORIES_README.md for setup instructions") - + print() + def main(): """Run all verification checks""" print(f"\n{Colors.BOLD}PictoPy Memories Feature Verification{Colors.RESET}") print(f"{Colors.BOLD}====================================={Colors.RESET}") - + results = { - 'Dependencies': check_dependencies(), - 'File Structure': check_file_structure(), - 'Database Schema': check_database_schema(), - 'Module Imports': check_imports(), - 'API Routes': check_api_routes(), + "Dependencies": check_dependencies(), + "File Structure": check_file_structure(), + "Database Schema": check_database_schema(), + "Module Imports": check_imports(), + "API Routes": check_api_routes(), } - + print_summary(results) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/frontend/src/components/Memories/FeaturedMemoryCard.tsx b/frontend/src/components/Memories/FeaturedMemoryCard.tsx index 2cbd7d14e..713120733 100644 --- a/frontend/src/components/Memories/FeaturedMemoryCard.tsx +++ b/frontend/src/components/Memories/FeaturedMemoryCard.tsx @@ -1,13 +1,17 @@ /** * FeaturedMemoryCard Component - * + * * Large, prominent card for "On This Day" section. * Shows hero image with "X years ago today" text overlay. */ import React from 'react'; import { MemoryImage } from '@/services/memoriesApi'; -import { calculateYearsAgo, formatPhotoCount, getThumbnailUrl } from '@/services/memoriesApi'; +import { + calculateYearsAgo, + formatPhotoCount, + getThumbnailUrl, +} from '@/services/memoriesApi'; interface FeaturedMemoryCardProps { images: MemoryImage[]; @@ -20,134 +24,143 @@ interface FeaturedMemoryCardProps { * Featured memory card for "On This Day" section * Shows larger hero image with prominent styling */ -export const FeaturedMemoryCard = React.memo(({ images, years, onClick }) => { - // Get the first image as hero - const heroImage = images[0]; - - if (!heroImage) return null; - - const thumbnailUrl = getThumbnailUrl(heroImage); - - // Calculate years ago from the captured date - const yearsAgo = heroImage.captured_at ? calculateYearsAgo(heroImage.captured_at) : 0; - - // Handle image load error - const handleImageError = (e: React.SyntheticEvent) => { - e.currentTarget.src = '/placeholder-image.png'; - }; - - return ( -
{ - if (e.key === 'Enter' || e.key === ' ') { - e.preventDefault(); - onClick(); - } - }} - aria-label={`View On This Day memory from ${yearsAgo} years ago`} - > -
- {/* Hero Image */} -
- On This Day - - {/* Gradient Overlay */} -
- - {/* Content Overlay */} -
- {/* "On This Day" Badge */} -
- - - - On This Day -
- - {/* Years Ago Text */} -

- {yearsAgo === 1 ? 'On this day last year' : yearsAgo > 0 ? `${yearsAgo} years ago` : 'Today'} -

- - {/* Photo Count */} -
- - - - {formatPhotoCount(images.length)} - {years.length > 1 && ` from ${years.length} ${years.length === 1 ? 'year' : 'years'}`} -
-
-
- - {/* Additional Images Preview (if more than 1) */} - {images.length > 1 && ( -
- {images.slice(1, 4).map((img, idx) => ( -
- +export const FeaturedMemoryCard = React.memo( + ({ images, years, onClick }) => { + // Get the first image as hero + const heroImage = images[0]; + + if (!heroImage) return null; + + const thumbnailUrl = getThumbnailUrl(heroImage); + + // Calculate years ago from the captured date + const yearsAgo = heroImage.captured_at + ? calculateYearsAgo(heroImage.captured_at) + : 0; + + // Handle image load error + const handleImageError = (e: React.SyntheticEvent) => { + e.currentTarget.src = '/placeholder-image.png'; + }; + + return ( +
{ + if (e.key === 'Enter' || e.key === ' ') { + e.preventDefault(); + onClick(); + } + }} + aria-label={`View On This Day memory from ${yearsAgo} years ago`} + > +
+ {/* Hero Image */} +
+ On This Day + + {/* Gradient Overlay */} +
+ + {/* Content Overlay */} +
+ {/* "On This Day" Badge */} +
+ + + + On This Day
- ))} - {images.length > 4 && ( -
- +{images.length - 4} + + {/* Years Ago Text */} +

+ {yearsAgo === 1 + ? 'On this day last year' + : yearsAgo > 0 + ? `${yearsAgo} years ago` + : 'Today'} +

+ + {/* Photo Count */} +
+ + + + {formatPhotoCount(images.length)} + {years.length > 1 && + ` from ${years.length} ${years.length === 1 ? 'year' : 'years'}`}
- )} +
- )} -
- - {/* CTA Text */} -
-

- Click to relive these memories → -

+ + {/* Additional Images Preview (if more than 1) */} + {images.length > 1 && ( +
+ {images.slice(1, 4).map((img, idx) => ( +
+ +
+ ))} + {images.length > 4 && ( +
+ +{images.length - 4} +
+ )} +
+ )} +
+ + {/* CTA Text */} +
+

+ Click to relive these memories → +

+
-
- ); -}); + ); + }, +); FeaturedMemoryCard.displayName = 'FeaturedMemoryCard'; diff --git a/frontend/src/components/Memories/MemoriesPage.tsx b/frontend/src/components/Memories/MemoriesPage.tsx index c0a7946cd..a1e7a4e5c 100644 --- a/frontend/src/components/Memories/MemoriesPage.tsx +++ b/frontend/src/components/Memories/MemoriesPage.tsx @@ -1,10 +1,10 @@ /** * MemoriesPage Component - * + * * Main page for the Memories feature. * Displays memories in sections: On This Day, Recent, This Year, All Memories. * Includes filter tabs for All/Location/Date memories. - * + * * Layout mimics Google Photos Memories with smart feed organization. */ @@ -24,7 +24,7 @@ import { selectAllMemories, selectMemoriesLoading, selectMemoriesError, - selectTotalMemoryCount + selectTotalMemoryCount, } from '@/store/slices/memoriesSlice'; import { MemoryCard } from './MemoryCard'; import { FeaturedMemoryCard } from './FeaturedMemoryCard'; @@ -35,12 +35,12 @@ import type { Memory } from '@/services/memoriesApi'; * Loading skeleton for memory cards */ const MemoryCardSkeleton: React.FC = () => ( -
-
-
-
-
-
+
+
+
+
+
+
); @@ -49,10 +49,10 @@ const MemoryCardSkeleton: React.FC = () => ( * Featured card skeleton for On This Day */ const FeaturedSkeleton: React.FC = () => ( -
-
-
-
+
+
+
+
); @@ -60,11 +60,16 @@ const FeaturedSkeleton: React.FC = () => ( /** * Section header component */ -const SectionHeader: React.FC<{ title: string; count?: number }> = ({ title, count }) => ( -

+const SectionHeader: React.FC<{ title: string; count?: number }> = ({ + title, + count, +}) => ( +

{title} {count !== undefined && count > 0 && ( - ({count}) + + ({count}) + )}

); @@ -72,11 +77,14 @@ const SectionHeader: React.FC<{ title: string; count?: number }> = ({ title, cou /** * Error message component with retry button */ -const ErrorMessage: React.FC<{ message: string; onRetry: () => void }> = ({ message, onRetry }) => ( -
+const ErrorMessage: React.FC<{ message: string; onRetry: () => void }> = ({ + message, + onRetry, +}) => ( +
void }> = ({ mess d="M12 8v4m0 4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z" /> -

{message}

+

{message}

@@ -104,9 +112,9 @@ const ErrorMessage: React.FC<{ message: string; onRetry: () => void }> = ({ mess * Empty state component */ const EmptyState: React.FC<{ message: string }> = ({ message }) => ( -
+
= ({ message }) => ( d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z" /> -

{message}

+

{message}

); @@ -129,7 +137,7 @@ const EmptyState: React.FC<{ message: string }> = ({ message }) => ( */ export const MemoriesPage: React.FC = () => { const dispatch = useAppDispatch(); - + // Selectors const onThisDayImages = useAppSelector(selectOnThisDayImages); const onThisDayMeta = useAppSelector(selectOnThisDayMeta); @@ -144,18 +152,22 @@ export const MemoriesPage: React.FC = () => { const [filter, setFilter] = useState<'all' | 'location' | 'date'>('all'); // Calculate counts - const locationCount = allMemories.filter(m => m.center_lat !== 0 || m.center_lon !== 0).length; - const dateCount = allMemories.filter(m => m.center_lat === 0 && m.center_lon === 0).length; + const locationCount = allMemories.filter( + (m) => m.center_lat !== 0 || m.center_lon !== 0, + ).length; + const dateCount = allMemories.filter( + (m) => m.center_lat === 0 && m.center_lon === 0, + ).length; // Simple filter function const applyFilter = (memories: Memory[]) => { if (filter === 'location') { - return memories.filter(m => m.center_lat !== 0 || m.center_lon !== 0); + return memories.filter((m) => m.center_lat !== 0 || m.center_lon !== 0); } if (filter === 'date') { - return memories.filter(m => m.center_lat === 0 && m.center_lon === 0); + return memories.filter((m) => m.center_lat === 0 && m.center_lon === 0); } - return memories; // 'all' + return memories; // 'all' }; // Apply filter @@ -182,12 +194,13 @@ export const MemoriesPage: React.FC = () => { description: `Photos from ${onThisDayMeta.years.join(', ')}`, location_name: 'Various locations', date_start: onThisDayImages[0]?.captured_at || null, - date_end: onThisDayImages[onThisDayImages.length - 1]?.captured_at || null, + date_end: + onThisDayImages[onThisDayImages.length - 1]?.captured_at || null, image_count: onThisDayImages.length, images: onThisDayImages, thumbnail_image_id: onThisDayImages[0]?.id || '', center_lat: onThisDayImages[0]?.latitude || 0, - center_lon: onThisDayImages[0]?.longitude || 0 + center_lon: onThisDayImages[0]?.longitude || 0, }; dispatch(setSelectedMemory(tempMemory)); } @@ -200,18 +213,21 @@ export const MemoriesPage: React.FC = () => { const handleRetryOnThisDay = () => dispatch(fetchOnThisDay()); // Check if any data exists - const hasAnyData = onThisDayImages.length > 0 || recentMemories.length > 0 || - yearMemories.length > 0 || allMemories.length > 0; + const hasAnyData = + onThisDayImages.length > 0 || + recentMemories.length > 0 || + yearMemories.length > 0 || + allMemories.length > 0; return (
{/* Header */} -
-
+
+
{ )}
- + {/* Refresh button */}
{/* Main Content */} -
+
{/* Simple Filter Buttons */} {hasAnyData && ( -
+
+
- - {/* Description */} - {memory.description && ( -

- {memory.description} -

- )}
- {/* Close Button */} - -
-
- - {/* Images Grid */} -
-
- {memory.images.map((image, index) => ( -
handleImageClick(index)} - > - {`Photo - - {/* Hover Overlay */} -
- +
+ {memory.images.map((image, index) => ( +
handleImageClick(index)} > - - -
+ + {/* Hover Overlay */} +
+ + + +
+
+ ))}
- ))} -
-
+
+ + {/* Footer (optional - for future features like share, download, etc.) */} +
+
+

+ Click any photo to view with zoom and slideshow +

- {/* Footer (optional - for future features like share, download, etc.) */} -
-
-

- Click any photo to view with zoom and slideshow -

- - {/* Future: Add share, download buttons here */} -
- {/* Placeholder for future actions */} + {/* Future: Add share, download buttons here */} +
+ {/* Placeholder for future actions */} +
+
-
-
)} {/* MediaView for full-screen image viewing with zoom/slideshow */} @@ -270,7 +281,7 @@ export const MemoryViewer: React.FC = () => { ({ + images={memory.images.map((img) => ({ id: img.id, path: img.path, thumbnailPath: img.thumbnailPath, @@ -287,8 +298,8 @@ export const MemoryViewer: React.FC = () => { file_size: 0, item_type: 'image', latitude: img.latitude || undefined, - longitude: img.longitude || undefined - } + longitude: img.longitude || undefined, + }, }))} /> )} diff --git a/frontend/src/components/Memories/index.ts b/frontend/src/components/Memories/index.ts index 881210fbb..9e65554e2 100644 --- a/frontend/src/components/Memories/index.ts +++ b/frontend/src/components/Memories/index.ts @@ -1,6 +1,6 @@ /** * Memories Component Exports - * + * * Barrel file for clean imports across the application. * Import components like: import { MemoriesPage, MemoryCard } from '@/components/Memories' */ From 9eba77c4f1c423e6a8b189f8677f0d4daa232cf7 Mon Sep 17 00:00:00 2001 From: harshit Date: Sun, 14 Dec 2025 10:00:39 +0530 Subject: [PATCH 05/22] docs: add comprehensive Memories feature documentation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✨ Added: - Updated docs/overview/features.md with Memories section - Created docs/frontend/memories.md with detailed documentation 📚 Documentation includes: - Feature overview (On This Day, Location/Date memories) - Memory types and sections (Recent, This Year, All) - Filtering options and implementation - Component architecture (MemoriesPage, MemoryCard, MemoryViewer) - State management with Redux Toolkit - API endpoints and parameters - Backend clustering algorithm (DBSCAN) - Reverse geocoding with city database - Bug fixes and improvements - Testing guidelines - Performance considerations - Future enhancement ideas The documentation provides both user-facing feature descriptions and technical implementation details for developers. --- docs/frontend/memories.md | 309 ++++++++++++++++++++++++++++++++++++++ docs/overview/features.md | 47 ++++++ 2 files changed, 356 insertions(+) create mode 100644 docs/frontend/memories.md diff --git a/docs/frontend/memories.md b/docs/frontend/memories.md new file mode 100644 index 000000000..1a3203f4c --- /dev/null +++ b/docs/frontend/memories.md @@ -0,0 +1,309 @@ +# Memories Feature Documentation + +## Overview + +The Memories feature automatically organizes photos into meaningful collections based on location and date, providing a Google Photos-style experience for reliving past moments. + +## Features + +### 1. On This Day +Shows photos from the same date in previous years with a prominent featured card. + +**Display:** +- "On this day last year" for photos from exactly 1 year ago +- "[X] years ago" for photos from multiple years ago +- Featured hero image with gradient overlay +- Photo count and year badges + +### 2. Memory Types + +#### Location-Based Memories +Photos grouped by GPS coordinates using DBSCAN clustering: +- **Radius**: 5km (configurable) +- **Title Format**: "Trip to [City Name], [Year]" +- **Example**: "Trip to Jaipur, 2025" +- **Reverse Geocoding**: Maps coordinates to actual city names +- **Supported Cities**: 30+ major cities worldwide (Indian, European, American, Asian, etc.) + +#### Date-Based Memories +Photos grouped by month for images without GPS: +- **Grouping**: Monthly clusters +- **Title Format**: "[Month] [Year]" +- **Flexibility**: Works even without location data + +### 3. Memory Sections + +#### Recent Memories +- **Timeframe**: Last 30 days +- **Use Case**: Recent trips and events +- **API**: `GET /api/memories/timeline?days=30` + +#### This Year +- **Timeframe**: Last 365 days (current year) +- **Use Case**: Year-in-review +- **API**: `GET /api/memories/timeline?days=365` + +#### All Memories +- **Timeframe**: All time +- **Use Case**: Complete memory collection +- **API**: `POST /api/memories/generate` + +### 4. Filtering + +**Filter Options:** +- **All**: Shows all memories (default) +- **Location**: Only memories with GPS coordinates +- **Date**: Only memories without GPS (date-based) + +**Implementation:** +```typescript +const applyFilter = (memories: Memory[]) => { + if (filter === 'location') { + return memories.filter(m => m.center_lat !== 0 || m.center_lon !== 0); + } + if (filter === 'date') { + return memories.filter(m => m.center_lat === 0 && m.center_lon === 0); + } + return memories; // 'all' +}; +``` + +### 5. Memory Viewer + +Full-screen modal for viewing memory photos: + +**Features:** +- Image grid with hover effects +- Click to open MediaView +- Zoom and pan support +- Slideshow mode +- Keyboard navigation +- Info panel with metadata +- Thumbnail strip + +**Controls:** +- **Zoom**: Mouse wheel or +/- keys +- **Navigation**: Arrow keys or buttons +- **Slideshow**: Play/Pause button or Space key +- **Info Panel**: Toggle with 'I' key +- **Close**: ESC key or X button + +## Components + +### MemoriesPage +Main page component with sections: +- Header with refresh button +- Filter buttons +- On This Day section +- Recent Memories grid +- This Year grid +- All Memories grid + +### MemoryCard +Individual memory card display: +- Thumbnail image +- Memory title (formatted based on type) +- Date range (relative format) +- Location (if available) +- Photo count badge +- Type badge (Location/Date) + +### FeaturedMemoryCard +Large featured card for "On This Day": +- Hero image with gradient overlay +- "On this day last year" text +- Photo count and year info +- Additional image previews + +### MemoryViewer +Modal for viewing memory album: +- Conditionally rendered to prevent event bubbling +- Grid layout of all photos +- MediaView integration for full-screen viewing +- Proper z-index layering + +## State Management + +Using Redux Toolkit with slices: + +```typescript +// Store structure +{ + memories: { + onThisDay: { + images: MemoryImage[], + meta: { today: string, years: number[] } + }, + recent: Memory[], + year: Memory[], + all: Memory[], + selectedMemory: Memory | null, + loading: { onThisDay, recent, year, all }, + error: { onThisDay, recent, year, all } + } +} +``` + +**Key Actions:** +- `fetchOnThisDay()` - Get photos from same date +- `fetchRecentMemories(days)` - Get timeline memories +- `fetchYearMemories(days)` - Get year memories +- `fetchAllMemories()` - Generate all memories +- `setSelectedMemory(memory)` - Open memory viewer + +## API Endpoints + +### GET `/api/memories/on-this-day` +Returns photos from the same date in previous years. + +**Response:** +```json +{ + "images": [...], + "today": "December 14", + "years": [2024, 2023, 2022] +} +``` + +### GET `/api/memories/timeline?days=30` +Returns timeline-based memories for specified days. + +**Parameters:** +- `days` (query): Number of days to look back + +**Response:** +```json +{ + "memories": [...] +} +``` + +### POST `/api/memories/generate` +Generates all memories with clustering. + +**Parameters (query):** +- `location_radius_km` (default: 5.0) +- `date_tolerance_days` (default: 3) +- `min_images` (default: 2) + +**Response:** +```json +{ + "memories": [...], + "breakdown": { + "total": 10, + "location": 6, + "date": 4 + } +} +``` + +## Backend Implementation + +### Memory Clustering Algorithm + +**Location-based (DBSCAN):** +1. Extract GPS coordinates from images +2. Convert to radians for haversine distance +3. Apply DBSCAN clustering (5km radius) +4. Group images by cluster +5. Reverse geocode center coordinates +6. Generate title with city name and year + +**Date-based (Monthly grouping):** +1. Filter images without GPS +2. Group by year-month +3. Create monthly memories +4. Use date as title + +### Reverse Geocoding + +Maps GPS coordinates to city names using pre-defined database: + +```python +def _reverse_geocode(self, lat: float, lon: float) -> str: + """Find nearest city within 50km""" + for city_name, (city_lat, city_lon) in self.CITY_COORDINATES.items(): + distance = haversine_distance(lat, lon, city_lat, city_lon) + if distance < 50: + return city_name + return f"{lat:.4f}°, {lon:.4f}°" +``` + +**Supported Cities:** +- India: Mumbai, Delhi, Bangalore, Hyderabad, Chennai, Kolkata, Pune, Ahmedabad, Jaipur, Lucknow, Kanpur, Nagpur, Visakhapatnam, Bhopal, Patna, Vadodara +- Europe: London, Paris, Berlin, Madrid, Rome, Amsterdam, Prague, Vienna, Barcelona, Budapest, Lisbon +- Americas: New York, Los Angeles, Toronto, San Francisco, Chicago, Vancouver +- Asia-Pacific: Tokyo, Seoul, Singapore, Hong Kong, Sydney, Melbourne + +## Bug Fixes & Improvements + +### Event Bubbling Fix +**Problem:** Clicking MediaView controls (slideshow, info) closed the entire viewer. + +**Solution:** Conditional rendering of MemoryViewer backdrop: +```tsx +{!showMediaView && ( +
+ {/* Grid content */} +
+)} +``` + +### Image Upload Fix +**Problem:** Images without GPS couldn't be inserted into database. + +**Solution:** Always include latitude/longitude fields (set to `None` if not available): +```python +image_record = { + "latitude": latitude, # Can be None + "longitude": longitude, # Can be None + "captured_at": captured_at +} +``` + +### Title Display Enhancement +**Problem:** Generic "Location - Nov 2025" titles. + +**Solution:** Format as "Trip to [City], [Year]" using reverse geocoding: +```typescript +const year = memory.date_start ? new Date(memory.date_start).getFullYear() : ''; +displayTitle = `Trip to ${displayLocation}${year ? `, ${year}` : ''}`; +``` + +## Testing + +### Backend Tests +Located in `backend/tests/`: +- 100 unit tests covering all routes +- Run with: `pytest tests/` + +### Frontend Tests +Located in `frontend/src/pages/__tests__/`: +- Page rendering tests +- Run with: `npm test` + +### Manual Testing +Use `backend/test_memories_api.py` for API endpoint testing: +```bash +python test_memories_api.py +``` + +## Performance Considerations + +1. **Lazy Loading**: Images load on-demand with `loading="lazy"` +2. **Thumbnail Optimization**: Uses Tauri's `convertFileSrc()` for efficient file access +3. **Redux Memoization**: Uses `React.memo()` for card components +4. **Efficient Queries**: SQLite indexes on `latitude`, `longitude`, `captured_at` +5. **Background Processing**: Memory generation runs asynchronously + +## Future Enhancements + +- [ ] Custom memory creation +- [ ] Memory sharing and export +- [ ] Advanced filtering (by location, date range, etc.) +- [ ] Memory annotations and descriptions +- [ ] Map view for location-based memories +- [ ] AI-generated memory titles +- [ ] Multi-photo featured cards +- [ ] Memory notifications and reminders diff --git a/docs/overview/features.md b/docs/overview/features.md index 801b9b086..4643de9e1 100644 --- a/docs/overview/features.md +++ b/docs/overview/features.md @@ -4,11 +4,15 @@ - **Intelligent Photo Tagging**: Automatically tags photos based on detected objects, faces, and facial recognition. - **Traditional Gallery Management**: Complete album organization and management tools. +- **Memories Feature**: Automatically organize photos into meaningful collections based on location and date, with Google Photos-style presentation. ### Advanced Image Analysis - Object detection using **YOLOv11** for identifying various items in images - Face detection and clustering powered by **FaceNet**. +- **Spatial Clustering**: Groups photos by location using DBSCAN algorithm (5km radius) +- **Temporal Grouping**: Organizes photos by date with monthly grouping +- **Reverse Geocoding**: Identifies actual city names from GPS coordinates ### Privacy-Focused Design @@ -29,6 +33,49 @@ - Embedded metadata - Find visually or semantically similar images +### Memories Feature + +Automatically creates meaningful photo collections inspired by Google Photos: + +#### **On This Day** +- Shows photos from the same date in previous years +- Featured card display with "On this day last year" messaging +- Nostalgic look back at past moments + +#### **Smart Grouping** +- **Location-based Memories**: Groups photos taken at the same location (5km radius using DBSCAN clustering) + - Displays as "Trip to [City Name], [Year]" (e.g., "Trip to Jaipur, 2025") + - Uses reverse geocoding to show actual city names + - Supports 30+ major cities worldwide +- **Date-based Memories**: Groups photos by month for images without GPS data + - Perfect for photos without location metadata + - Organized chronologically + +#### **Intelligent Filtering** +- Filter by All, Location, or Date memories +- View counts for each category +- Seamless navigation between memory types + +#### **Memory Sections** +- **Recent Memories**: Last 30 days of captured moments +- **This Year**: All memories from the current year +- **All Memories**: Complete collection organized by recency + +#### **Rich Viewing Experience** +- Full-screen image viewer with zoom support +- Slideshow mode for automatic playback +- Image metadata panel with EXIF data +- Keyboard shortcuts (Space, arrows, +/-, R, ESC) +- Thumbnail navigation strip +- Favorite marking and folder opening + +#### **Technical Implementation** +- Backend: Python with DBSCAN clustering algorithm +- Frontend: React + Redux Toolkit for state management +- Real-time memory generation with configurable parameters +- Flexible clustering: works with date OR location (not both required) +- Efficient SQLite queries for fast retrieval + ### Cross-Platform Compatibility - Available on major operating systems (Windows, macOS, Linux) From d5ad5440daa5f159ebf414630918e58adfc38ebe Mon Sep 17 00:00:00 2001 From: harshit Date: Sun, 14 Dec 2025 10:32:42 +0530 Subject: [PATCH 06/22] orrect DBSCAN clustering and API configuration issues --- frontend/src/components/Memories/MemoryCard.tsx | 3 ++- frontend/src/services/memoriesApi.ts | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/frontend/src/components/Memories/MemoryCard.tsx b/frontend/src/components/Memories/MemoryCard.tsx index 13383a40c..61bd22a0e 100644 --- a/frontend/src/components/Memories/MemoryCard.tsx +++ b/frontend/src/components/Memories/MemoryCard.tsx @@ -36,7 +36,8 @@ export const MemoryCard = React.memo(({ memory, onClick }) => { : '/photo.png'; // Default placeholder // Determine memory type - const isDateBased = memory.center_lat === 0 && memory.center_lon === 0; + // Backend uses 0,0 as sentinel for date-based memories (no GPS data) + const isDateBased = memory.center_lat == null || memory.center_lon == null; // Format title based on memory type let displayTitle = memory.title || 'Untitled Memory'; diff --git a/frontend/src/services/memoriesApi.ts b/frontend/src/services/memoriesApi.ts index 4f0253812..892746ee1 100644 --- a/frontend/src/services/memoriesApi.ts +++ b/frontend/src/services/memoriesApi.ts @@ -7,8 +7,9 @@ import axios, { AxiosError } from 'axios'; import { convertFileSrc } from '@tauri-apps/api/core'; +import { BACKEND_URL } from '@/config/Backend'; -const API_BASE_URL = 'http://localhost:8000/api/memories'; +const API_BASE_URL = `${BACKEND_URL}/api/memories`; // ============================================================================ // TypeScript Interfaces From 1b7ee7b7580636b6db0c0b775ed98393ae99f715 Mon Sep 17 00:00:00 2001 From: harshit Date: Sun, 14 Dec 2025 10:33:00 +0530 Subject: [PATCH 07/22] correct DBSCAN clustering and API configuration issues --- backend/app/database/images.py | 6 ++++-- backend/app/utils/memory_clustering.py | 18 ++++++++++-------- docs/backend/backend_python/openapi.json | 6 +++--- 3 files changed, 17 insertions(+), 13 deletions(-) diff --git a/backend/app/database/images.py b/backend/app/database/images.py index bc74bd90d..ee220b7be 100644 --- a/backend/app/database/images.py +++ b/backend/app/database/images.py @@ -635,7 +635,9 @@ def db_get_images_near_location( # Calculate bounding box offsets lat_offset = radius_km / 111.0 - lon_offset = radius_km / (111.0 * abs(math.cos(math.radians(latitude)))) + cos_lat = abs(math.cos(math.radians(latitude))) + # Clamp to avoid division by near-zero at poles + lon_offset = radius_km / (111.0 * max(cos_lat, 0.01)) cursor.execute( """ @@ -897,7 +899,7 @@ def db_get_all_images_for_memories() -> List[dict]: return images except Exception as e: - logger.error(f"Error getting images with location: {e}") + logger.error(f"Error getting images from database: {e}") return [] finally: conn.close() diff --git a/backend/app/utils/memory_clustering.py b/backend/app/utils/memory_clustering.py index ab851bfd0..44f801e93 100644 --- a/backend/app/utils/memory_clustering.py +++ b/backend/app/utils/memory_clustering.py @@ -130,9 +130,10 @@ def __init__( self.date_tolerance_days = date_tolerance_days self.min_images_per_memory = min_images_per_memory - # Convert km to degrees for DBSCAN - # Approximate: 1 degree latitude ≈ 111 km - self.location_eps_degrees = location_radius_km / 111.0 + # Convert km to radians for DBSCAN with haversine metric + # Earth radius in kilometers + EARTH_RADIUS_KM = 6371.0 + self.location_eps_radians = location_radius_km / EARTH_RADIUS_KM logger.info( f"MemoryClustering initialized: radius={location_radius_km}km, " @@ -256,7 +257,7 @@ def _cluster_date_images( if isinstance(captured_at, str): try: dt = datetime.fromisoformat(captured_at.replace("Z", "")) - except: + except (ValueError, AttributeError): continue elif isinstance(captured_at, datetime): dt = captured_at @@ -661,18 +662,19 @@ def _cluster_by_location( # Extract coordinates coordinates = np.array([[img["latitude"], img["longitude"]] for img in images]) + # Convert to radians for haversine metric + coordinates_rad = np.radians(coordinates) + # Apply DBSCAN clustering - # eps: maximum distance between two samples (in degrees) + # eps: maximum distance between two samples (in radians for haversine) # min_samples: minimum number of samples to form a cluster clustering = DBSCAN( - eps=self.location_eps_degrees, + eps=self.location_eps_radians, min_samples=1, # Even single photos can form a cluster metric="haversine", # Use haversine distance for lat/lon algorithm="ball_tree", ) - # Convert to radians for haversine - coordinates_rad = np.radians(coordinates) labels = clustering.fit_predict(coordinates_rad) # Group images by cluster label diff --git a/docs/backend/backend_python/openapi.json b/docs/backend/backend_python/openapi.json index 12e811104..a8c1103f7 100644 --- a/docs/backend/backend_python/openapi.json +++ b/docs/backend/backend_python/openapi.json @@ -1387,7 +1387,7 @@ "memories" ], "summary": "Get Timeline", - "description": "Get memories from the past N days as a timeline.\n\nThis endpoint:\n1. Calculates date range (today - N days to today)\n2. Fetches images within that date range\n3. Clusters them into memories\n4. Returns timeline of memories\n\nArgs:\n days: Number of days to look back (default: 365 = 1 year)\n location_radius_km: Location clustering radius (default: 5km)\n date_tolerance_days: Date tolerance for temporal clustering (default: 3)\n \nReturns:\n TimelineResponse with memories ordered by date\n \nRaises:\n HTTPException: If database query fails", + "description": "Get memories from the past N days as a timeline.\n\nThis endpoint:\n1. Calculates date range (today - N days to today)\n2. Fetches images within that date range\n3. Clusters them into memories\n4. Returns timeline of memories\n\nArgs:\n days: Number of days to look back (default: 365 = 1 year)\n location_radius_km: Location clustering radius (default: 5km)\n date_tolerance_days: Date tolerance for temporal clustering (default: 3)\n\nReturns:\n TimelineResponse with memories ordered by date\n\nRaises:\n HTTPException: If database query fails", "operationId": "get_timeline_api_memories_timeline_get", "parameters": [ { @@ -1463,7 +1463,7 @@ "memories" ], "summary": "Get On This Day", - "description": "Get photos taken on this date in previous years.\n\nThis endpoint:\n1. Gets current month and day\n2. Searches for images from this month-day in all previous years\n3. Groups by year\n4. Returns images sorted by year (most recent first)\n\nReturns:\n OnThisDayResponse with images from this date in previous years\n \nRaises:\n HTTPException: If database query fails", + "description": "Get photos taken on this date in previous years.\n\nThis endpoint:\n1. Gets current month and day\n2. Searches for images from this month-day in all previous years\n3. Groups by year\n4. Returns images sorted by year (most recent first)\n\nReturns:\n OnThisDayResponse with images from this date in previous years\n\nRaises:\n HTTPException: If database query fails", "operationId": "get_on_this_day_api_memories_on_this_day_get", "responses": { "200": { @@ -1485,7 +1485,7 @@ "memories" ], "summary": "Get Locations", - "description": "Get all unique locations where photos were taken.\n\nThis endpoint:\n1. Fetches all images with GPS coordinates\n2. Clusters them by location\n3. Returns location clusters with photo counts\n4. Includes sample images for each location\n\nArgs:\n location_radius_km: Location clustering radius (default: 5km)\n max_sample_images: Maximum sample images per location (default: 5)\n \nReturns:\n LocationsResponse with list of location clusters\n \nRaises:\n HTTPException: If database query fails", + "description": "Get all unique locations where photos were taken.\n\nThis endpoint:\n1. Fetches all images with GPS coordinates\n2. Clusters them by location\n3. Returns location clusters with photo counts\n4. Includes sample images for each location\n\nArgs:\n location_radius_km: Location clustering radius (default: 5km)\n max_sample_images: Maximum sample images per location (default: 5)\n\nReturns:\n LocationsResponse with list of location clusters\n\nRaises:\n HTTPException: If database query fails", "operationId": "get_locations_api_memories_locations_get", "parameters": [ { From 536455c8e96b25b84719d5a0d019758027448ce2 Mon Sep 17 00:00:00 2001 From: harshit Date: Sun, 14 Dec 2025 10:45:56 +0530 Subject: [PATCH 08/22] Add the missing /placeholder-image.png asset to frontend/public/ --- frontend/src/components/Memories/FeaturedMemoryCard.tsx | 2 +- frontend/src/components/Memories/MemoryViewer.tsx | 2 +- frontend/src/services/memoriesApi.ts | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/src/components/Memories/FeaturedMemoryCard.tsx b/frontend/src/components/Memories/FeaturedMemoryCard.tsx index 713120733..b7ce17c76 100644 --- a/frontend/src/components/Memories/FeaturedMemoryCard.tsx +++ b/frontend/src/components/Memories/FeaturedMemoryCard.tsx @@ -40,7 +40,7 @@ export const FeaturedMemoryCard = React.memo( // Handle image load error const handleImageError = (e: React.SyntheticEvent) => { - e.currentTarget.src = '/placeholder-image.png'; + e.currentTarget.src = '/photo.png'; }; return ( diff --git a/frontend/src/components/Memories/MemoryViewer.tsx b/frontend/src/components/Memories/MemoryViewer.tsx index 573234e42..65358d61d 100644 --- a/frontend/src/components/Memories/MemoryViewer.tsx +++ b/frontend/src/components/Memories/MemoryViewer.tsx @@ -82,7 +82,7 @@ export const MemoryViewer: React.FC = () => { // Handle image load error const handleImageError = (e: React.SyntheticEvent) => { - e.currentTarget.src = '/placeholder-image.png'; + e.currentTarget.src = '/photo.png'; }; return ( diff --git a/frontend/src/services/memoriesApi.ts b/frontend/src/services/memoriesApi.ts index 892746ee1..76a8f858a 100644 --- a/frontend/src/services/memoriesApi.ts +++ b/frontend/src/services/memoriesApi.ts @@ -467,5 +467,5 @@ export const getThumbnailUrl = (image: MemoryImage): string => { } // Fallback to placeholder - return '/placeholder-image.png'; + return '/photo.png'; }; From c5f1a6abd7fb5c6b803d3c0d43fa1b3da6c58b5c Mon Sep 17 00:00:00 2001 From: harshit Date: Mon, 26 Jan 2026 18:46:16 +0530 Subject: [PATCH 09/22] fix:resolved the iamge viewer error --- backend/app/routes/memories.py | 1 + docs/backend/backend_python/openapi.json | 12 +++++ frontend/src/components/Media/MediaView.tsx | 10 +++- .../src/components/Memories/MemoryViewer.tsx | 48 +++++++++++++++++-- frontend/src/services/memoriesApi.ts | 1 + frontend/src/store/slices/memoriesSlice.ts | 40 ++++++++++++++++ frontend/src/types/Media.ts | 1 + 7 files changed, 108 insertions(+), 5 deletions(-) diff --git a/backend/app/routes/memories.py b/backend/app/routes/memories.py index c93b629f0..28008189c 100644 --- a/backend/app/routes/memories.py +++ b/backend/app/routes/memories.py @@ -48,6 +48,7 @@ class MemoryImage(BaseModel): latitude: Optional[float] longitude: Optional[float] captured_at: Optional[str] + isFavourite: Optional[bool] = False # Add favorite status class Memory(BaseModel): diff --git a/docs/backend/backend_python/openapi.json b/docs/backend/backend_python/openapi.json index a8c1103f7..06375b840 100644 --- a/docs/backend/backend_python/openapi.json +++ b/docs/backend/backend_python/openapi.json @@ -2730,6 +2730,18 @@ } ], "title": "Captured At" + }, + "isFavourite": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "title": "Isfavourite", + "default": false } }, "type": "object", diff --git a/frontend/src/components/Media/MediaView.tsx b/frontend/src/components/Media/MediaView.tsx index 7c60862fa..53a4ccbf0 100644 --- a/frontend/src/components/Media/MediaView.tsx +++ b/frontend/src/components/Media/MediaView.tsx @@ -26,6 +26,7 @@ export function MediaView({ onClose, type = 'image', images = [], + onToggleFavorite, }: MediaViewProps) { const dispatch = useDispatch(); @@ -107,11 +108,16 @@ export function MediaView({ const handleToggleFavourite = useCallback(() => { if (currentImage) { if (currentImage?.id) { - toggleFavourite(currentImage.id); + // Use custom handler if provided, otherwise use default + if (onToggleFavorite) { + onToggleFavorite(currentImage.id); + } else { + toggleFavourite(currentImage.id); + } } if (location.pathname === ROUTES.FAVOURITES) handleClose(); } - }, [currentImage, toggleFavourite]); + }, [currentImage, toggleFavourite, onToggleFavorite, location.pathname, handleClose]); const handleZoomIn = useCallback(() => { imageViewerRef.current?.zoomIn(); diff --git a/frontend/src/components/Memories/MemoryViewer.tsx b/frontend/src/components/Memories/MemoryViewer.tsx index 65358d61d..80b1df3c2 100644 --- a/frontend/src/components/Memories/MemoryViewer.tsx +++ b/frontend/src/components/Memories/MemoryViewer.tsx @@ -11,8 +11,9 @@ import { useAppDispatch, useAppSelector } from '@/store/hooks'; import { setSelectedMemory, selectSelectedMemory, + toggleImageFavorite, } from '@/store/slices/memoriesSlice'; -import { setCurrentViewIndex } from '@/features/imageSlice'; +import { setCurrentViewIndex, setImages } from '@/features/imageSlice'; import { MediaView } from '@/components/Media/MediaView'; import { formatDateRangeRelative, @@ -21,6 +22,7 @@ import { generateMemoryTitle, formatLocationName, } from '@/services/memoriesApi'; +import { togglefav } from '@/api/api-functions/togglefav'; /** * Memory Viewer Modal Component @@ -35,12 +37,51 @@ export const MemoryViewer: React.FC = () => { dispatch(setSelectedMemory(null)); }, [dispatch]); + // Handle favorite toggle - update both API and Redux state + const handleToggleFavorite = useCallback( + async (imageId: string) => { + try { + // Call API to toggle favorite in database + await togglefav(imageId); + // Update Redux state to reflect the change immediately + dispatch(toggleImageFavorite(imageId)); + } catch (error) { + console.error('Failed to toggle favorite:', error); + } + }, + [dispatch], + ); + // Handle image click - open MediaView const handleImageClick = useCallback( (index: number) => { if (!memory) return; - // Just set the current index - MediaView will use the images prop + // Convert memory images to Image[] format for Redux state + const formattedImages = memory.images.map((img) => ({ + id: img.id, + path: img.path, + thumbnailPath: img.thumbnailPath, + folder_id: '', + isTagged: false, + isFavourite: img.isFavourite || false, + tags: [], + metadata: { + name: img.path.split('/').pop() || '', + date_created: img.captured_at, + width: 0, + height: 0, + file_location: img.path, + file_size: 0, + item_type: 'image' as const, + latitude: img.latitude || undefined, + longitude: img.longitude || undefined, + }, + })); + + // Set images in Redux state first + dispatch(setImages(formattedImages)); + // Then set the current index dispatch(setCurrentViewIndex(index)); setShowMediaView(true); }, @@ -281,13 +322,14 @@ export const MemoryViewer: React.FC = () => { ({ id: img.id, path: img.path, thumbnailPath: img.thumbnailPath, folder_id: '', // Memory images don't have folder_id isTagged: false, // Memory images don't track tagging - isFavourite: false, // Can be added later if needed + isFavourite: img.isFavourite || false, // Use actual favorite status from backend tags: [], // Can be added later if needed metadata: { name: img.path.split('/').pop() || '', diff --git a/frontend/src/services/memoriesApi.ts b/frontend/src/services/memoriesApi.ts index 76a8f858a..07ee6e6a0 100644 --- a/frontend/src/services/memoriesApi.ts +++ b/frontend/src/services/memoriesApi.ts @@ -25,6 +25,7 @@ export interface MemoryImage { latitude: number | null; longitude: number | null; captured_at: string | null; // ISO 8601 format + isFavourite?: boolean; // Favorite status } /** diff --git a/frontend/src/store/slices/memoriesSlice.ts b/frontend/src/store/slices/memoriesSlice.ts index 859c9c908..f3d5b509b 100644 --- a/frontend/src/store/slices/memoriesSlice.ts +++ b/frontend/src/store/slices/memoriesSlice.ts @@ -210,6 +210,45 @@ const memoriesSlice = createSlice({ state.selectedMemory = action.payload; }, + /** + * Toggle favorite status of an image across all memories + */ + toggleImageFavorite: (state, action: PayloadAction) => { + const imageId = action.payload; + + // Helper function to update image in a memory array + const updateMemoriesArray = (memories: Memory[]) => { + memories.forEach(memory => { + memory.images.forEach(image => { + if (image.id === imageId) { + image.isFavourite = !image.isFavourite; + } + }); + }); + }; + + // Update across all memory collections + updateMemoriesArray(state.allMemories); + updateMemoriesArray(state.recentMemories); + updateMemoriesArray(state.yearMemories); + + // Update onThisDay images + state.onThisDayImages.forEach(image => { + if (image.id === imageId) { + image.isFavourite = !image.isFavourite; + } + }); + + // Update selected memory if it exists + if (state.selectedMemory) { + state.selectedMemory.images.forEach(image => { + if (image.id === imageId) { + image.isFavourite = !image.isFavourite; + } + }); + } + }, + /** * Clear all errors */ @@ -311,6 +350,7 @@ const memoriesSlice = createSlice({ export const { setSelectedMemory, + toggleImageFavorite, clearErrors, resetMemories } = memoriesSlice.actions; diff --git a/frontend/src/types/Media.ts b/frontend/src/types/Media.ts index d7e0712fc..fbea71259 100644 --- a/frontend/src/types/Media.ts +++ b/frontend/src/types/Media.ts @@ -36,6 +36,7 @@ export interface MediaViewProps { onClose?: () => void; type?: string; images: Image[]; + onToggleFavorite?: (imageId: string) => void | Promise; } export interface SortingControlsProps { From ef58dae8ad2f64bfd71ea0699fc8bdb5e9bff32b Mon Sep 17 00:00:00 2001 From: harshit Date: Mon, 26 Jan 2026 23:49:38 +0530 Subject: [PATCH 10/22] fix:nullable values and migration guard --- backend/main.py | 14 +++++++++++++- docs/backend/backend_python/openapi.json | 22 +++++++++++++++++----- 2 files changed, 30 insertions(+), 6 deletions(-) diff --git a/backend/main.py b/backend/main.py index e09bceaae..1f4c3f70b 100644 --- a/backend/main.py +++ b/backend/main.py @@ -46,7 +46,19 @@ async def lifespan(app: FastAPI): generate_openapi_json() db_create_folders_table() db_create_images_table() - db_migrate_add_memories_columns() # Add Memories columns to existing database + + # Only run migrations in the primary process or when explicitly enabled + should_run_migrations = os.getenv("RUN_MIGRATIONS", "true").lower() == "true" + if should_run_migrations: + try: + db_migrate_add_memories_columns() + logger.info("Database migrations completed successfully") + except Exception as e: + logger.error(f"Failed to run database migrations: {e}", exc_info=True) + + else: + logger.info("Skipping migrations (RUN_MIGRATIONS not set or false)") + db_create_YOLO_classes_table() db_create_clusters_table() # Create clusters table first since faces references it db_create_faces_table() diff --git a/docs/backend/backend_python/openapi.json b/docs/backend/backend_python/openapi.json index c507bdcd1..421377d16 100644 --- a/docs/backend/backend_python/openapi.json +++ b/docs/backend/backend_python/openapi.json @@ -2659,11 +2659,25 @@ "title": "Thumbnail Image Id" }, "center_lat": { - "type": "number", + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], "title": "Center Lat" }, "center_lon": { - "type": "number", + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], "title": "Center Lon" } }, @@ -2677,9 +2691,7 @@ "date_end", "image_count", "images", - "thumbnail_image_id", - "center_lat", - "center_lon" + "thumbnail_image_id" ], "title": "Memory", "description": "Memory object containing grouped images." From 719bbc47b353b6a6f03594a48c7a8e4b4133117e Mon Sep 17 00:00:00 2001 From: harshit Date: Tue, 27 Jan 2026 00:32:40 +0530 Subject: [PATCH 11/22] minor fixes and linting and formatting --- backend/app/database/albums.py | 20 +- backend/app/database/face_clusters.py | 22 +- backend/app/database/faces.py | 30 +-- backend/app/database/folders.py | 28 +-- backend/app/database/images.py | 48 ++--- backend/app/database/metadata.py | 4 +- backend/app/logging/setup_logging.py | 53 +---- backend/app/models/FaceDetector.py | 4 +- backend/app/models/FaceNet.py | 8 +- backend/app/models/ObjectClassifier.py | 4 +- backend/app/models/YOLO.py | 20 +- backend/app/routes/albums.py | 56 ++--- backend/app/routes/face_clusters.py | 14 +- backend/app/routes/folders.py | 48 ++--- backend/app/routes/images.py | 12 +- backend/app/routes/memories.py | 80 ++------ backend/app/utils/API.py | 4 +- backend/app/utils/FaceNet.py | 4 +- backend/app/utils/YOLO.py | 8 +- .../app/utils/extract_location_metadata.py | 35 +--- backend/app/utils/faceSearch.py | 4 +- backend/app/utils/face_clusters.py | 94 +++------ backend/app/utils/folders.py | 22 +- backend/app/utils/image_metadata.py | 20 +- backend/app/utils/images.py | 74 ++----- backend/app/utils/memory_clustering.py | 123 +++-------- backend/app/utils/memory_monitor.py | 8 +- backend/app/utils/verify_memories_setup.py | 35 +--- backend/extract_metadata_simple.py | 69 +++---- backend/main.py | 27 +-- backend/migrate_add_memories_columns.py | 88 ++++---- backend/test_auto_gps_extraction.py | 38 ++-- backend/test_memories_api.py | 118 +++++------ backend/tests/test_albums.py | 51 ++--- backend/tests/test_face_clusters.py | 36 +--- backend/tests/test_folders.py | 48 ++--- backend/tests/test_user_preferences.py | 85 ++------ frontend/src/components/Media/MediaView.tsx | 8 +- .../src/components/Memories/MemoriesPage.tsx | 16 +- .../src/components/Memories/MemoryCard.tsx | 2 +- .../src/components/Memories/MemoryViewer.tsx | 18 +- frontend/src/services/memoriesApi.ts | 184 +++++++++++------ frontend/src/store/hooks.ts | 2 +- frontend/src/store/slices/memoriesSlice.ts | 191 +++++++++--------- 44 files changed, 666 insertions(+), 1197 deletions(-) diff --git a/backend/app/database/albums.py b/backend/app/database/albums.py index b9e5b149a..f3259025b 100644 --- a/backend/app/database/albums.py +++ b/backend/app/database/albums.py @@ -96,9 +96,7 @@ def db_insert_album( try: password_hash = None if password: - password_hash = bcrypt.hashpw( - password.encode("utf-8"), bcrypt.gensalt() - ).decode("utf-8") + password_hash = bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8") cursor.execute( """ INSERT INTO albums (album_id, album_name, description, is_hidden, password_hash) @@ -123,9 +121,7 @@ def db_update_album( try: if password is not None: # Update with new password - password_hash = bcrypt.hashpw( - password.encode("utf-8"), bcrypt.gensalt() - ).decode("utf-8") + password_hash = bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8") cursor.execute( """ UPDATE albums @@ -159,9 +155,7 @@ def db_get_album_images(album_id: str): conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() try: - cursor.execute( - "SELECT image_id FROM album_images WHERE album_id = ?", (album_id,) - ) + cursor.execute("SELECT image_id FROM album_images WHERE album_id = ?", (album_id,)) images = cursor.fetchall() return [img[0] for img in images] finally: @@ -172,9 +166,7 @@ def db_add_images_to_album(album_id: str, image_ids: list[str]): with get_db_connection() as conn: cursor = conn.cursor() - query = ( - f"SELECT id FROM images WHERE id IN ({','.join('?' for _ in image_ids)})" - ) + query = f"SELECT id FROM images WHERE id IN ({','.join('?' for _ in image_ids)})" cursor.execute(query, image_ids) valid_images = [row[0] for row in cursor.fetchall()] @@ -223,9 +215,7 @@ def verify_album_password(album_id: str, password: str) -> bool: conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() try: - cursor.execute( - "SELECT password_hash FROM albums WHERE album_id = ?", (album_id,) - ) + cursor.execute("SELECT password_hash FROM albums WHERE album_id = ?", (album_id,)) row = cursor.fetchone() if not row or not row[0]: return False diff --git a/backend/app/database/face_clusters.py b/backend/app/database/face_clusters.py index ceac7f556..8666af258 100644 --- a/backend/app/database/face_clusters.py +++ b/backend/app/database/face_clusters.py @@ -70,9 +70,7 @@ def db_delete_all_clusters(cursor: Optional[sqlite3.Cursor] = None) -> int: conn.close() -def db_insert_clusters_batch( - clusters: List[ClusterData], cursor: Optional[sqlite3.Cursor] = None -) -> List[ClusterId]: +def db_insert_clusters_batch(clusters: List[ClusterData], cursor: Optional[sqlite3.Cursor] = None) -> List[ClusterId]: """ Insert multiple clusters into the database in batch. @@ -145,9 +143,7 @@ def db_get_cluster_by_id(cluster_id: ClusterId) -> Optional[ClusterData]: row = cursor.fetchone() if row: - return ClusterData( - cluster_id=row[0], cluster_name=row[1], face_image_base64=row[2] - ) + return ClusterData(cluster_id=row[0], cluster_name=row[1], face_image_base64=row[2]) return None finally: conn.close() @@ -164,19 +160,13 @@ def db_get_all_clusters() -> List[ClusterData]: cursor = conn.cursor() try: - cursor.execute( - "SELECT cluster_id, cluster_name, face_image_base64 FROM face_clusters ORDER BY cluster_id" - ) + cursor.execute("SELECT cluster_id, cluster_name, face_image_base64 FROM face_clusters ORDER BY cluster_id") rows = cursor.fetchall() clusters = [] for row in rows: - clusters.append( - ClusterData( - cluster_id=row[0], cluster_name=row[1], face_image_base64=row[2] - ) - ) + clusters.append(ClusterData(cluster_id=row[0], cluster_name=row[1], face_image_base64=row[2])) return clusters finally: @@ -232,9 +222,7 @@ def db_update_cluster( conn.close() -def db_get_all_clusters_with_face_counts() -> ( - List[Dict[str, Union[str, Optional[str], int]]] -): +def db_get_all_clusters_with_face_counts() -> List[Dict[str, Union[str, Optional[str], int]]]: """ Retrieve all clusters with their face counts and stored face images. diff --git a/backend/app/database/faces.py b/backend/app/database/faces.py index 0e43f7117..291edfc57 100644 --- a/backend/app/database/faces.py +++ b/backend/app/database/faces.py @@ -113,32 +113,18 @@ def db_insert_face_embeddings_by_image_id( """ # Handle multiple faces in one image - if ( - isinstance(embeddings, list) - and len(embeddings) > 0 - and isinstance(embeddings[0], np.ndarray) - ): + if isinstance(embeddings, list) and len(embeddings) > 0 and isinstance(embeddings[0], np.ndarray): face_ids = [] for i, emb in enumerate(embeddings): - conf = ( - confidence[i] - if isinstance(confidence, list) and i < len(confidence) - else confidence - ) + conf = confidence[i] if isinstance(confidence, list) and i < len(confidence) else confidence bb = bbox[i] if isinstance(bbox, list) and i < len(bbox) else bbox - cid = ( - cluster_id[i] - if isinstance(cluster_id, list) and i < len(cluster_id) - else cluster_id - ) + cid = cluster_id[i] if isinstance(cluster_id, list) and i < len(cluster_id) else cluster_id face_id = db_insert_face_embeddings(image_id, emb, conf, bb, cid) face_ids.append(face_id) return face_ids else: # Single face - return db_insert_face_embeddings( - image_id, embeddings, confidence, bbox, cluster_id - ) + return db_insert_face_embeddings(image_id, embeddings, confidence, bbox, cluster_id) def get_all_face_embeddings(): @@ -243,9 +229,7 @@ def db_get_faces_unassigned_clusters() -> List[Dict[str, Union[FaceId, FaceEmbed conn.close() -def db_get_all_faces_with_cluster_names() -> ( - List[Dict[str, Union[FaceId, FaceEmbedding, Optional[str]]]] -): +def db_get_all_faces_with_cluster_names() -> List[Dict[str, Union[FaceId, FaceEmbedding, Optional[str]]]]: """ Get all faces with their corresponding cluster names. @@ -385,9 +369,7 @@ def db_get_cluster_mean_embeddings() -> List[Dict[str, Union[str, FaceEmbedding] stacked_embeddings = np.stack(embeddings_list) mean_embedding = np.mean(stacked_embeddings, axis=0) - cluster_means.append( - {"cluster_id": cluster_id, "mean_embedding": mean_embedding} - ) + cluster_means.append({"cluster_id": cluster_id, "mean_embedding": mean_embedding}) return cluster_means finally: diff --git a/backend/app/database/folders.py b/backend/app/database/folders.py index 3a2ac976d..b12d1f912 100644 --- a/backend/app/database/folders.py +++ b/backend/app/database/folders.py @@ -192,9 +192,7 @@ def db_delete_folder(folder_path: FolderPath) -> None: cursor = conn.cursor() try: abs_folder_path = os.path.abspath(folder_path) - cursor.execute( - "PRAGMA foreign_keys = ON;" - ) # Important for deleting rows in image_id_mapping and images table because they reference this folder_id + cursor.execute("PRAGMA foreign_keys = ON;") # Important for deleting rows in image_id_mapping and images table because they reference this folder_id conn.commit() cursor.execute( "SELECT folder_id FROM folders WHERE folder_path = ?", @@ -203,9 +201,7 @@ def db_delete_folder(folder_path: FolderPath) -> None: existing_folder = cursor.fetchone() if not existing_folder: - raise ValueError( - f"Error: Folder '{folder_path}' does not exist in the database." - ) + raise ValueError(f"Error: Folder '{folder_path}' does not exist in the database.") cursor.execute( "DELETE FROM folders WHERE folder_path = ?", @@ -217,9 +213,7 @@ def db_delete_folder(folder_path: FolderPath) -> None: conn.close() -def db_update_parent_ids_for_subtree( - root_folder_path: FolderPath, folder_map: FolderMap -) -> None: +def db_update_parent_ids_for_subtree(root_folder_path: FolderPath, folder_map: FolderMap) -> None: """ Update parent_folder_id for all folders in the subtree rooted at root_folder_path. Only updates folders whose parent_folder_id is NULL. @@ -252,9 +246,7 @@ def db_folder_exists(folder_path: FolderPath) -> bool: cursor = conn.cursor() try: abs_path = os.path.abspath(folder_path) - cursor.execute( - "SELECT folder_id FROM folders WHERE folder_path = ?", (abs_path,) - ) + cursor.execute("SELECT folder_id FROM folders WHERE folder_path = ?", (abs_path,)) result = cursor.fetchone() return bool(result) finally: @@ -273,18 +265,14 @@ def db_find_parent_folder_id(folder_path: FolderPath) -> Optional[FolderId]: conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() try: - cursor.execute( - "SELECT folder_id FROM folders WHERE folder_path = ?", (parent_path,) - ) + cursor.execute("SELECT folder_id FROM folders WHERE folder_path = ?", (parent_path,)) result = cursor.fetchone() return result[0] if result else None finally: conn.close() -def db_update_ai_tagging_batch( - folder_ids: List[FolderId], ai_tagging_enabled: bool -) -> int: +def db_update_ai_tagging_batch(folder_ids: List[FolderId], ai_tagging_enabled: bool) -> int: """ Update AI_Tagging status for multiple folders in a single transaction. folder_ids: list of folder IDs to update @@ -394,9 +382,7 @@ def db_get_folder_ids_by_paths( conn.close() -def db_get_all_folder_details() -> ( - List[Tuple[str, str, Optional[str], int, bool, Optional[bool]]] -): +def db_get_all_folder_details() -> List[Tuple[str, str, Optional[str], int, bool, Optional[bool]]]: """ Get all folder details including folder_id, folder_path, parent_folder_id, last_modified_time, AI_Tagging, and taggingCompleted. diff --git a/backend/app/database/images.py b/backend/app/database/images.py index ee220b7be..742fb0987 100644 --- a/backend/app/database/images.py +++ b/backend/app/database/images.py @@ -80,15 +80,9 @@ def db_create_images_table() -> None: # Create indexes for Memories feature queries cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)") - cursor.execute( - "CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)" - ) - cursor.execute( - "CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)" - ) - cursor.execute( - "CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)" - ) + cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)") + cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)") + cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)") # Create new image_classes junction table cursor.execute( @@ -117,13 +111,9 @@ def db_migrate_add_memories_columns() -> None: try: # Check if images table exists - cursor.execute( - "SELECT name FROM sqlite_master WHERE type='table' AND name='images'" - ) + cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='images'") if not cursor.fetchone(): - logger.info( - "Images table does not exist yet, will be created by db_create_images_table()" - ) + logger.info("Images table does not exist yet, will be created by db_create_images_table()") conn.close() return @@ -150,18 +140,10 @@ def db_migrate_add_memories_columns() -> None: changes_made = True # Create indexes - cursor.execute( - "CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)" - ) - cursor.execute( - "CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)" - ) - cursor.execute( - "CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)" - ) - cursor.execute( - "CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)" - ) + cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)") + cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)") + cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)") + cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)") if changes_made: logger.info("Memories feature columns migration completed") @@ -288,9 +270,7 @@ def db_get_all_images(tagged: Union[bool, None] = None) -> List[dict]: "isFavourite": bool(is_favourite), "latitude": latitude, "longitude": longitude, - "captured_at": ( - captured_at if captured_at else None - ), # SQLite returns string + "captured_at": (captured_at if captured_at else None), # SQLite returns string "tags": [], } @@ -529,9 +509,7 @@ def db_toggle_image_favourite_status(image_id: str) -> bool: # ============================================================================ -def db_get_images_by_date_range( - start_date: datetime, end_date: datetime, include_favorites_only: bool = False -) -> List[dict]: +def db_get_images_by_date_range(start_date: datetime, end_date: datetime, include_favorites_only: bool = False) -> List[dict]: """ Get images captured within a date range for Memories timeline. @@ -608,9 +586,7 @@ def db_get_images_by_date_range( conn.close() -def db_get_images_near_location( - latitude: float, longitude: float, radius_km: float = 5.0 -) -> List[dict]: +def db_get_images_near_location(latitude: float, longitude: float, radius_km: float = 5.0) -> List[dict]: """ Get images near a location within radius_km using bounding box approximation. diff --git a/backend/app/database/metadata.py b/backend/app/database/metadata.py index d431f6e2b..573cb1697 100644 --- a/backend/app/database/metadata.py +++ b/backend/app/database/metadata.py @@ -55,9 +55,7 @@ def db_get_metadata() -> Optional[Dict[str, Any]]: conn.close() -def db_update_metadata( - metadata: Dict[str, Any], cursor: Optional[sqlite3.Cursor] = None -) -> bool: +def db_update_metadata(metadata: Dict[str, Any], cursor: Optional[sqlite3.Cursor] = None) -> bool: """ Update the metadata in the database. diff --git a/backend/app/logging/setup_logging.py b/backend/app/logging/setup_logging.py index 0eedecaa9..3fed7a4ce 100644 --- a/backend/app/logging/setup_logging.py +++ b/backend/app/logging/setup_logging.py @@ -78,13 +78,7 @@ def format(self, record: logging.LogRecord) -> str: component_start = formatted_message.find(f"[{component_prefix}]") if component_start >= 0: component_end = component_start + len(f"[{component_prefix}]") - formatted_message = ( - formatted_message[:component_start] - + self.COLORS[component_color] - + formatted_message[component_start:component_end] - + self.COLORS["reset"] - + formatted_message[component_end:] - ) + formatted_message = formatted_message[:component_start] + self.COLORS[component_color] + formatted_message[component_start:component_end] + self.COLORS["reset"] + formatted_message[component_end:] # Add color to the log level level_color = self.level_colors.get(record.levelname, "") @@ -99,13 +93,7 @@ def format(self, record: logging.LogRecord) -> str: level_start = formatted_message.find(f" {record.levelname} ") if level_start >= 0: level_end = level_start + len(f" {record.levelname} ") - formatted_message = ( - formatted_message[:level_start] - + color_codes - + formatted_message[level_start:level_end] - + self.COLORS["reset"] - + formatted_message[level_end:] - ) + formatted_message = formatted_message[:level_start] + color_codes + formatted_message[level_start:level_end] + self.COLORS["reset"] + formatted_message[level_end:] return formatted_message @@ -117,12 +105,7 @@ def load_config() -> Dict[str, Any]: Returns: Dict containing the logging configuration """ - config_path = ( - Path(__file__).parent.parent.parent.parent - / "utils" - / "logging" - / "logging_config.json" - ) + config_path = Path(__file__).parent.parent.parent.parent / "utils" / "logging" / "logging_config.json" try: with open(config_path, "r") as f: return json.load(f) @@ -142,16 +125,12 @@ def setup_logging(component_name: str, environment: Optional[str] = None) -> Non """ config = load_config() if not config: - print( - "No logging configuration found. Using default settings.", file=sys.stderr - ) + print("No logging configuration found. Using default settings.", file=sys.stderr) return # Get environment settings if not environment: - environment = os.environ.get( - "ENV", config.get("default_environment", "development") - ) + environment = os.environ.get("ENV", config.get("default_environment", "development")) env_settings = config.get("environments", {}).get(environment, {}) log_level = getattr(logging, env_settings.get("level", "INFO"), logging.INFO) @@ -159,9 +138,7 @@ def setup_logging(component_name: str, environment: Optional[str] = None) -> Non console_logging = env_settings.get("console_logging", True) # Get component configuration - component_config = config.get("components", {}).get( - component_name, {"prefix": component_name.upper(), "color": "white"} - ) + component_config = config.get("components", {}).get(component_name, {"prefix": component_name.upper(), "color": "white"}) # Configure root logger root_logger = logging.getLogger() @@ -184,14 +161,8 @@ def setup_logging(component_name: str, environment: Optional[str] = None) -> Non console_handler.setLevel(log_level) # Create formatter with component and color information - fmt = ( - config.get("formatters", {}) - .get("default", {}) - .get("format", "[%(component)s] | %(levelname)s | %(message)s") - ) - formatter = ColorFormatter( - fmt, component_config, config.get("colors", {}), use_colors - ) + fmt = config.get("formatters", {}).get("default", {}).get("format", "[%(component)s] | %(levelname)s | %(message)s") + formatter = ColorFormatter(fmt, component_config, config.get("colors", {}), use_colors) console_handler.setFormatter(formatter) root_logger.addHandler(console_handler) @@ -269,13 +240,9 @@ def configure_uvicorn_logging(component_name: str) -> None: # Make sure the handler uses our ColorFormatter config = load_config() - component_config = config.get("components", {}).get( - component_name, {"prefix": component_name.upper(), "color": "white"} - ) + component_config = config.get("components", {}).get(component_name, {"prefix": component_name.upper(), "color": "white"}) level_colors = config.get("colors", {}) - env_settings = config.get("environments", {}).get( - os.environ.get("ENV", config.get("default_environment", "development")), {} - ) + env_settings = config.get("environments", {}).get(os.environ.get("ENV", config.get("default_environment", "development")), {}) use_colors = env_settings.get("colored_output", True) fmt = "[%(component)s] | %(module)s | %(levelname)s | %(message)s" diff --git a/backend/app/models/FaceDetector.py b/backend/app/models/FaceDetector.py index 9e10fd5fc..407e81d58 100644 --- a/backend/app/models/FaceDetector.py +++ b/backend/app/models/FaceDetector.py @@ -56,9 +56,7 @@ def detect_faces(self, image_id: str, image_path: str, forSearch: bool = False): embeddings.append(embedding) if not forSearch and embeddings: - db_insert_face_embeddings_by_image_id( - image_id, embeddings, confidence=confidences, bbox=bboxes - ) + db_insert_face_embeddings_by_image_id(image_id, embeddings, confidence=confidences, bbox=bboxes) return { "ids": f"{class_ids}", diff --git a/backend/app/models/FaceNet.py b/backend/app/models/FaceNet.py index df17d3a77..de6ddd34f 100644 --- a/backend/app/models/FaceNet.py +++ b/backend/app/models/FaceNet.py @@ -11,16 +11,12 @@ class FaceNet: def __init__(self, model_path): - self.session = onnxruntime.InferenceSession( - model_path, providers=ONNX_util_get_execution_providers() - ) + self.session = onnxruntime.InferenceSession(model_path, providers=ONNX_util_get_execution_providers()) self.input_tensor_name = self.session.get_inputs()[0].name self.output_tensor_name = self.session.get_outputs()[0].name def get_embedding(self, preprocessed_image): - result = self.session.run( - [self.output_tensor_name], {self.input_tensor_name: preprocessed_image} - )[0] + result = self.session.run([self.output_tensor_name], {self.input_tensor_name: preprocessed_image})[0] embedding = result[0] return FaceNet_util_normalize_embedding(embedding) diff --git a/backend/app/models/ObjectClassifier.py b/backend/app/models/ObjectClassifier.py index 1371705b1..bc2dd5174 100644 --- a/backend/app/models/ObjectClassifier.py +++ b/backend/app/models/ObjectClassifier.py @@ -8,9 +8,7 @@ class ObjectClassifier: def __init__(self): - self.yolo_classifier = YOLO( - YOLO_util_get_model_path("object"), conf_threshold=0.4, iou_threshold=0.5 - ) + self.yolo_classifier = YOLO(YOLO_util_get_model_path("object"), conf_threshold=0.4, iou_threshold=0.5) def get_classes(self, img_path) -> list[int] | None: img = cv2.imread(img_path) diff --git a/backend/app/models/YOLO.py b/backend/app/models/YOLO.py index 66e55d377..aaca82d11 100644 --- a/backend/app/models/YOLO.py +++ b/backend/app/models/YOLO.py @@ -20,9 +20,7 @@ def __init__(self, path, conf_threshold=0.7, iou_threshold=0.5): self.conf_threshold = conf_threshold self.iou_threshold = iou_threshold # Create ONNX session once and reuse it - self.session = onnxruntime.InferenceSession( - self.model_path, providers=ONNX_util_get_execution_providers() - ) + self.session = onnxruntime.InferenceSession(self.model_path, providers=ONNX_util_get_execution_providers()) # Initialize model info self.get_input_details() @@ -44,9 +42,7 @@ def detect_objects(self, image): def inference(self, input_tensor): time.perf_counter() - outputs = self.session.run( - self.output_names, {self.input_names[0]: input_tensor} - ) + outputs = self.session.run(self.output_names, {self.input_names[0]: input_tensor}) return outputs def get_input_details(self): @@ -91,16 +87,10 @@ def extract_boxes(self, predictions): return boxes def rescale_boxes(self, boxes): - input_shape = np.array( - [self.input_width, self.input_height, self.input_width, self.input_height] - ) + input_shape = np.array([self.input_width, self.input_height, self.input_width, self.input_height]) boxes = np.divide(boxes, input_shape, dtype=np.float32) - boxes *= np.array( - [self.img_width, self.img_height, self.img_width, self.img_height] - ) + boxes *= np.array([self.img_width, self.img_height, self.img_width, self.img_height]) return boxes def draw_detections(self, image, draw_scores=True, mask_alpha=0.4): - return YOLO_util_draw_detections( - image, self.boxes, self.scores, self.class_ids, mask_alpha - ) + return YOLO_util_draw_detections(image, self.boxes, self.scores, self.class_ids, mask_alpha) diff --git a/backend/app/routes/albums.py b/backend/app/routes/albums.py index ae0408613..20a58c2a0 100644 --- a/backend/app/routes/albums.py +++ b/backend/app/routes/albums.py @@ -63,9 +63,7 @@ def create_album(body: CreateAlbumRequest): album_id = str(uuid.uuid4()) try: - db_insert_album( - album_id, body.name, body.description, body.is_hidden, body.password - ) + db_insert_album(album_id, body.name, body.description, body.is_hidden, body.password) return CreateAlbumResponse(success=True, album_id=album_id) except Exception as e: raise HTTPException( @@ -85,9 +83,7 @@ def get_album(album_id: str = Path(...)): if not album: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail=ErrorResponse( - success=False, error="Album Not Found", message="Album not found" - ).model_dump(), + detail=ErrorResponse(success=False, error="Album Not Found", message="Album not found").model_dump(), ) try: @@ -153,16 +149,12 @@ def update_album(album_id: str = Path(...), body: UpdateAlbumRequest = Body(...) ) try: - db_update_album( - album_id, body.name, body.description, body.is_hidden, body.password - ) + db_update_album(album_id, body.name, body.description, body.is_hidden, body.password) return SuccessResponse(success=True, msg="Album updated successfully") except Exception as e: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=ErrorResponse( - success=False, error="Failed to Update Album", message=str(e) - ).model_dump(), + detail=ErrorResponse(success=False, error="Failed to Update Album", message=str(e)).model_dump(), ) @@ -186,9 +178,7 @@ def delete_album(album_id: str = Path(...)): except Exception as e: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=ErrorResponse( - success=False, error="Failed to Delete Album", message=str(e) - ).model_dump(), + detail=ErrorResponse(success=False, error="Failed to Delete Album", message=str(e)).model_dump(), ) @@ -197,9 +187,7 @@ def delete_album(album_id: str = Path(...)): # GET requests do not accept a body by default. # Since we need to send a password securely, switching this to POST -- necessary. # Open to suggestions if better approach possible. -def get_album_images( - album_id: str = Path(...), body: GetAlbumImagesRequest = Body(...) -): +def get_album_images(album_id: str = Path(...), body: GetAlbumImagesRequest = Body(...)): album = db_get_album(album_id) if not album: raise HTTPException( @@ -245,9 +233,7 @@ def get_album_images( except Exception as e: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=ErrorResponse( - success=False, error="Failed to Retrieve Images", message=str(e) - ).model_dump(), + detail=ErrorResponse(success=False, error="Failed to Retrieve Images", message=str(e)).model_dump(), ) @@ -277,15 +263,11 @@ def add_images_to_album(album_id: str = Path(...), body: ImageIdsRequest = Body( try: db_add_images_to_album(album_id, body.image_ids) - return SuccessResponse( - success=True, msg=f"Added {len(body.image_ids)} images to album" - ) + return SuccessResponse(success=True, msg=f"Added {len(body.image_ids)} images to album") except Exception as e: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=ErrorResponse( - success=False, error="Failed to Add Images", message=str(e) - ).model_dump(), + detail=ErrorResponse(success=False, error="Failed to Add Images", message=str(e)).model_dump(), ) @@ -305,23 +287,17 @@ def remove_image_from_album(album_id: str = Path(...), image_id: str = Path(...) try: db_remove_image_from_album(album_id, image_id) - return SuccessResponse( - success=True, msg="Image removed from album successfully" - ) + return SuccessResponse(success=True, msg="Image removed from album successfully") except Exception as e: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=ErrorResponse( - success=False, error="Failed to Remove Image", message=str(e) - ).model_dump(), + detail=ErrorResponse(success=False, error="Failed to Remove Image", message=str(e)).model_dump(), ) # DELETE /albums/{album_id}/images - Remove multiple images from album @router.delete("/{album_id}/images", response_model=SuccessResponse) -def remove_images_from_album( - album_id: str = Path(...), body: ImageIdsRequest = Body(...) -): +def remove_images_from_album(album_id: str = Path(...), body: ImageIdsRequest = Body(...)): album = db_get_album(album_id) if not album: raise HTTPException( @@ -345,13 +321,9 @@ def remove_images_from_album( try: db_remove_images_from_album(album_id, body.image_ids) - return SuccessResponse( - success=True, msg=f"Removed {len(body.image_ids)} images from album" - ) + return SuccessResponse(success=True, msg=f"Removed {len(body.image_ids)} images from album") except Exception as e: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=ErrorResponse( - success=False, error="Failed to Remove Images", message=str(e) - ).model_dump(), + detail=ErrorResponse(success=False, error="Failed to Remove Images", message=str(e)).model_dump(), ) diff --git a/backend/app/routes/face_clusters.py b/backend/app/routes/face_clusters.py index 99974ac4a..00282a3a3 100644 --- a/backend/app/routes/face_clusters.py +++ b/backend/app/routes/face_clusters.py @@ -213,9 +213,7 @@ def get_cluster_images(cluster_id: str): ) def face_tagging( payload: FaceSearchRequest, - input_type: Annotated[ - InputType, Query(description="Choose input type: 'path' or 'base64'") - ] = InputType.path, + input_type: Annotated[InputType, Query(description="Choose input type: 'path' or 'base64'")] = InputType.path, ): image_path = None @@ -276,14 +274,8 @@ def face_tagging( ).model_dump(), ) - format_match = ( - base64_data.split(";")[0].split("/")[-1] if ";" in base64_data else "jpeg" - ) - extension = ( - format_match - if format_match in ["jpeg", "jpg", "png", "gif", "webp"] - else "jpeg" - ) + format_match = base64_data.split(";")[0].split("/")[-1] if ";" in base64_data else "jpeg" + extension = format_match if format_match in ["jpeg", "jpg", "png", "gif", "webp"] else "jpeg" image_id = str(uuid.uuid4())[:8] temp_dir = "temp_uploads" os.makedirs(temp_dir, exist_ok=True) diff --git a/backend/app/routes/folders.py b/backend/app/routes/folders.py index a66cca27c..563078316 100644 --- a/backend/app/routes/folders.py +++ b/backend/app/routes/folders.py @@ -74,9 +74,7 @@ def post_folder_add_sequence(folder_path: str, folder_id: int): API_util_restart_sync_microservice_watcher() except Exception as e: - logger.error( - f"Error in post processing after folder {folder_path} was added: {e}" - ) + logger.error(f"Error in post processing after folder {folder_path} was added: {e}") return False return True @@ -96,9 +94,7 @@ def post_AI_tagging_enabled_sequence(): return True -def post_sync_folder_sequence( - folder_path: str, folder_id: int, added_folders: List[Tuple[str, str]] -): +def post_sync_folder_sequence(folder_path: str, folder_id: int, added_folders: List[Tuple[str, str]]): """ Post-sync sequence for a folder. This function is called after a folder is synced. @@ -122,9 +118,7 @@ def post_sync_folder_sequence( # Restart sync microservice watcher after processing images API_util_restart_sync_microservice_watcher() except Exception as e: - logger.error( - f"Error in post processing after folder {folder_path} was synced: {e}" - ) + logger.error(f"Error in post processing after folder {folder_path} was synced: {e}") return False return True @@ -143,9 +137,7 @@ def add_folder(request: AddFolderRequest, app_state=Depends(get_state)): # Step 1: Data Validation if not os.path.isdir(request.folder_path): - raise ValueError( - f"Error: '{request.folder_path}' is not a valid directory." - ) + raise ValueError(f"Error: '{request.folder_path}' is not a valid directory.") if ( not os.access(request.folder_path, os.R_OK) @@ -196,9 +188,7 @@ def add_folder(request: AddFolderRequest, app_state=Depends(get_state)): executor.submit(post_folder_add_sequence, request.folder_path, root_folder_id) return AddFolderResponse( - data=AddFolderData( - folder_id=root_folder_id, folder_path=request.folder_path - ), + data=AddFolderData(folder_id=root_folder_id, folder_path=request.folder_path), success=True, message=f"Successfully added folder tree starting at: {request.folder_path}", ) @@ -242,9 +232,7 @@ def enable_ai_tagging(request: UpdateAITaggingRequest, app_state=Depends(get_sta executor.submit(post_AI_tagging_enabled_sequence) return UpdateAITaggingResponse( - data=UpdateAITaggingData( - updated_count=updated_count, folder_ids=request.folder_ids - ), + data=UpdateAITaggingData(updated_count=updated_count, folder_ids=request.folder_ids), success=True, message=f"Successfully enabled AI tagging for {updated_count} folder(s)", ) @@ -283,9 +271,7 @@ def disable_ai_tagging(request: UpdateAITaggingRequest): updated_count = db_disable_ai_tagging_batch(request.folder_ids) return UpdateAITaggingResponse( - data=UpdateAITaggingData( - updated_count=updated_count, folder_ids=request.folder_ids - ), + data=UpdateAITaggingData(updated_count=updated_count, folder_ids=request.folder_ids), success=True, message=f"Successfully disabled AI tagging for {updated_count} folder(s)", ) @@ -324,9 +310,7 @@ def delete_folders(request: DeleteFoldersRequest): deleted_count = db_delete_folders_batch(request.folder_ids) return DeleteFoldersResponse( - data=DeleteFoldersData( - deleted_count=deleted_count, folder_ids=request.folder_ids - ), + data=DeleteFoldersData(deleted_count=deleted_count, folder_ids=request.folder_ids), success=True, message=f"Successfully deleted {deleted_count} folder(s)", ) @@ -361,9 +345,7 @@ def sync_folder(request: SyncFolderRequest, app_state=Depends(get_state)): try: # Step 1: Get current state from both sources db_child_folders = db_get_direct_child_folders(request.folder_id) - filesystem_folders = folder_util_get_filesystem_direct_child_folders( - request.folder_path - ) + filesystem_folders = folder_util_get_filesystem_direct_child_folders(request.folder_path) # Step 2: Compare and identify differences filesystem_folder_set = set(filesystem_folders) @@ -373,17 +355,11 @@ def sync_folder(request: SyncFolderRequest, app_state=Depends(get_state)): folders_to_add = filesystem_folder_set - db_folder_paths # Step 3: Perform synchronization operations - deleted_count, deleted_folders = folder_util_delete_obsolete_folders( - db_child_folders, folders_to_delete - ) - added_count, added_folders_with_ids = folder_util_add_multiple_folder_trees( - folders_to_add, request.folder_id - ) + deleted_count, deleted_folders = folder_util_delete_obsolete_folders(db_child_folders, folders_to_delete) + added_count, added_folders_with_ids = folder_util_add_multiple_folder_trees(folders_to_add, request.folder_id) # Extract just the paths for the API response - added_folders = [ - folder_path for folder_id, folder_path in added_folders_with_ids - ] + added_folders = [folder_path for folder_id, folder_path in added_folders_with_ids] executor: ProcessPoolExecutor = app_state.executor executor.submit( diff --git a/backend/app/routes/images.py b/backend/app/routes/images.py index 2e40cd825..eafb3afdb 100644 --- a/backend/app/routes/images.py +++ b/backend/app/routes/images.py @@ -48,9 +48,7 @@ class GetAllImagesResponse(BaseModel): response_model=GetAllImagesResponse, responses={500: {"model": ErrorResponse}}, ) -def get_all_images( - tagged: Optional[bool] = Query(None, description="Filter images by tagged status") -): +def get_all_images(tagged: Optional[bool] = Query(None, description="Filter images by tagged status")): """Get all images from the database.""" try: # Get all images with tags from database (single query with optional filter) @@ -101,13 +99,9 @@ def toggle_favourite(req: ToggleFavouriteRequest): try: success = db_toggle_image_favourite_status(image_id) if not success: - raise HTTPException( - status_code=404, detail="Image not found or failed to toggle" - ) + raise HTTPException(status_code=404, detail="Image not found or failed to toggle") # Fetch updated status to return - image = next( - (img for img in db_get_all_images() if img["id"] == image_id), None - ) + image = next((img for img in db_get_all_images() if img["id"] == image_id), None) return { "success": True, "image_id": image_id, diff --git a/backend/app/routes/memories.py b/backend/app/routes/memories.py index 28008189c..d1632952a 100644 --- a/backend/app/routes/memories.py +++ b/backend/app/routes/memories.py @@ -15,11 +15,10 @@ """ from datetime import datetime, timedelta -from typing import List, Dict, Any, Optional -from collections import defaultdict +from typing import List, Dict, Optional from fastapi import APIRouter, HTTPException, Query -from pydantic import BaseModel, Field +from pydantic import BaseModel from app.database.images import ( db_get_images_with_location, @@ -63,8 +62,8 @@ class Memory(BaseModel): image_count: int images: List[MemoryImage] thumbnail_image_id: str - center_lat: float - center_lon: float + center_lat: Optional[float] = None + center_lon: Optional[float] = None class GenerateMemoriesResponse(BaseModel): @@ -121,12 +120,8 @@ class LocationsResponse(BaseModel): @router.post("/generate", response_model=GenerateMemoriesResponse) async def generate_memories( - location_radius_km: float = Query( - 5.0, ge=0.1, le=100, description="Location clustering radius in km" - ), - date_tolerance_days: int = Query( - 3, ge=1, le=30, description="Date tolerance in days" - ), + location_radius_km: float = Query(5.0, ge=0.1, le=100, description="Location clustering radius in km"), + date_tolerance_days: int = Query(3, ge=1, le=30, description="Date tolerance in days"), min_images: int = Query(2, ge=1, le=10, description="Minimum images per memory"), ): """ @@ -137,10 +132,7 @@ async def generate_memories( Returns simple breakdown: {location_count, date_count, total} """ try: - logger.info( - f"Generating memories: radius={location_radius_km}km, " - f"date_tolerance={date_tolerance_days}days, min_images={min_images}" - ) + logger.info(f"Generating memories: radius={location_radius_km}km, date_tolerance={date_tolerance_days}days, min_images={min_images}") # Fetch ALL images from app.database.images import db_get_all_images_for_memories @@ -171,10 +163,7 @@ async def generate_memories( location_count = sum(1 for m in memories if m.get("type") == "location") date_count = sum(1 for m in memories if m.get("type") == "date") - logger.info( - f"Generated {len(memories)} memories " - f"(location: {location_count}, date: {date_count})" - ) + logger.info(f"Generated {len(memories)} memories (location: {location_count}, date: {date_count})") return GenerateMemoriesResponse( success=True, @@ -192,12 +181,8 @@ async def generate_memories( @router.get("/timeline", response_model=TimelineResponse) async def get_timeline( days: int = Query(365, ge=1, le=3650, description="Number of days to look back"), - location_radius_km: float = Query( - 5.0, ge=0.1, le=100, description="Location clustering radius in km" - ), - date_tolerance_days: int = Query( - 3, ge=1, le=30, description="Date tolerance in days" - ), + location_radius_km: float = Query(5.0, ge=0.1, le=100, description="Location clustering radius in km"), + date_tolerance_days: int = Query(3, ge=1, le=30, description="Date tolerance in days"), ): """ Get memories from the past N days as a timeline. @@ -299,12 +284,7 @@ async def get_on_this_day(): images = db_get_images_by_year_month(target_year, current_month) # Filter to specific day - day_images = [ - img - for img in images - if img.get("captured_at") - and datetime.fromisoformat(img["captured_at"]).day == current_day - ] + day_images = [img for img in images if img.get("captured_at") and datetime.fromisoformat(img["captured_at"]).day == current_day] if day_images: all_images.extend(day_images) @@ -317,11 +297,7 @@ async def get_on_this_day(): # Sort by year (most recent first) all_images.sort( - key=lambda x: ( - datetime.fromisoformat(x["captured_at"]) - if x.get("captured_at") - else datetime.min - ), + key=lambda x: (datetime.fromisoformat(x["captured_at"]) if x.get("captured_at") else datetime.min), reverse=True, ) @@ -335,19 +311,13 @@ async def get_on_this_day(): except Exception as e: logger.error(f"Error getting 'On This Day': {e}") - raise HTTPException( - status_code=500, detail=f"Failed to get 'On This Day': {str(e)}" - ) + raise HTTPException(status_code=500, detail=f"Failed to get 'On This Day': {str(e)}") @router.get("/locations", response_model=LocationsResponse) async def get_locations( - location_radius_km: float = Query( - 5.0, ge=0.1, le=100, description="Location clustering radius in km" - ), - max_sample_images: int = Query( - 5, ge=1, le=20, description="Max sample images per location" - ), + location_radius_km: float = Query(5.0, ge=0.1, le=100, description="Location clustering radius in km"), + max_sample_images: int = Query(5, ge=1, le=20, description="Max sample images per location"), ): """ Get all unique locations where photos were taken. @@ -387,9 +357,7 @@ async def get_locations( ) # Use internal method to get location clusters - location_clusters = clustering._cluster_by_location( - clustering._filter_valid_images(images) - ) + location_clusters = clustering._cluster_by_location(clustering._filter_valid_images(images)) # Create location cluster objects locations = [] @@ -398,12 +366,8 @@ async def get_locations( continue # Calculate center - center_lat = sum(img["latitude"] for img in cluster_images) / len( - cluster_images - ) - center_lon = sum(img["longitude"] for img in cluster_images) / len( - cluster_images - ) + center_lat = sum(img["latitude"] for img in cluster_images) / len(cluster_images) + center_lon = sum(img["longitude"] for img in cluster_images) / len(cluster_images) # Get location name location_name = clustering._reverse_geocode(center_lat, center_lon) @@ -424,12 +388,8 @@ async def get_locations( # Sort by image count (most photos first) locations.sort(key=lambda loc: loc.image_count, reverse=True) - return LocationsResponse( - success=True, location_count=len(locations), locations=locations - ) + return LocationsResponse(success=True, location_count=len(locations), locations=locations) except Exception as e: logger.error(f"Error getting locations: {e}") - raise HTTPException( - status_code=500, detail=f"Failed to get locations: {str(e)}" - ) + raise HTTPException(status_code=500, detail=f"Failed to get locations: {str(e)}") diff --git a/backend/app/utils/API.py b/backend/app/utils/API.py index 32bb9a0fa..c734c83a5 100644 --- a/backend/app/utils/API.py +++ b/backend/app/utils/API.py @@ -20,9 +20,7 @@ def API_util_restart_sync_microservice_watcher(): logger.info("Successfully restarted sync microservice watcher") return True else: - logger.warning( - f"Failed to restart sync microservice watcher. Status code: {response.status_code}" - ) + logger.warning(f"Failed to restart sync microservice watcher. Status code: {response.status_code}") return False except requests.exceptions.RequestException as e: diff --git a/backend/app/utils/FaceNet.py b/backend/app/utils/FaceNet.py index 8a541bd21..7c49be31c 100644 --- a/backend/app/utils/FaceNet.py +++ b/backend/app/utils/FaceNet.py @@ -18,9 +18,7 @@ def FaceNet_util_normalize_embedding(embedding): def FaceNet_util_cosine_similarity(embedding1, embedding2): - return np.dot(embedding1, embedding2) / ( - np.linalg.norm(embedding1) * np.linalg.norm(embedding2) - ) + return np.dot(embedding1, embedding2) / (np.linalg.norm(embedding1) * np.linalg.norm(embedding2)) def FaceNet_util_get_model_path(): diff --git a/backend/app/utils/YOLO.py b/backend/app/utils/YOLO.py index cf63d41dc..056d199d4 100644 --- a/backend/app/utils/YOLO.py +++ b/backend/app/utils/YOLO.py @@ -158,9 +158,7 @@ def YOLO_util_xywh2xyxy(x): return y -def YOLO_util_draw_detections( - image, boxes, scores, class_ids, mask_alpha=0.3, confidence_threshold=0.3 -): +def YOLO_util_draw_detections(image, boxes, scores, class_ids, mask_alpha=0.3, confidence_threshold=0.3): det_img = image.copy() img_height, img_width = image.shape[:2] @@ -225,9 +223,7 @@ def YOLO_util_draw_text( ) -def YOLO_util_draw_masks( - image: np.ndarray, boxes: np.ndarray, classes: np.ndarray, mask_alpha: float = 0.3 -) -> np.ndarray: +def YOLO_util_draw_masks(image: np.ndarray, boxes: np.ndarray, classes: np.ndarray, mask_alpha: float = 0.3) -> np.ndarray: mask_img = image.copy() # Draw bounding boxes and labels of detections diff --git a/backend/app/utils/extract_location_metadata.py b/backend/app/utils/extract_location_metadata.py index 2eede996b..39ed04c3f 100644 --- a/backend/app/utils/extract_location_metadata.py +++ b/backend/app/utils/extract_location_metadata.py @@ -45,9 +45,7 @@ def __init__(self): "errors": 0, } - def extract_gps_coordinates( - self, metadata: Dict[str, Any] - ) -> Tuple[Optional[float], Optional[float]]: + def extract_gps_coordinates(self, metadata: Dict[str, Any]) -> Tuple[Optional[float], Optional[float]]: """ Extract GPS coordinates from metadata dictionary. @@ -102,9 +100,7 @@ def extract_gps_coordinates( latitude = lat longitude = lon else: - logger.warning( - f"Invalid coordinate range: lat={lat}, lon={lon}" - ) + logger.warning(f"Invalid coordinate range: lat={lat}, lon={lon}") except (ValueError, TypeError) as e: logger.warning(f"Could not convert coordinates to float: {e}") @@ -151,12 +147,7 @@ def extract_datetime(self, metadata: Dict[str, Any]) -> Optional[datetime]: if not date_str: exif = metadata.get("exif", {}) if isinstance(exif, dict): - date_str = ( - exif.get("datetime") - or exif.get("DateTime") - or exif.get("DateTimeOriginal") - or exif.get("DateTimeDigitized") - ) + date_str = exif.get("datetime") or exif.get("DateTime") or exif.get("DateTimeOriginal") or exif.get("DateTimeDigitized") # Parse datetime string if date_str: @@ -179,9 +170,7 @@ def extract_datetime(self, metadata: Dict[str, Any]) -> Optional[datetime]: if "T" in date_str: try: # Remove timezone suffix for simpler parsing - date_str_clean = ( - date_str.replace("Z", "").split("+")[0].split("-") - ) + date_str_clean = date_str.replace("Z", "").split("+")[0].split("-") # Rejoin only date-time parts (not timezone) if len(date_str_clean) >= 3: date_str_clean = "-".join(date_str_clean[:3]) @@ -206,9 +195,7 @@ def extract_datetime(self, metadata: Dict[str, Any]) -> Optional[datetime]: return captured_at - def extract_all( - self, metadata_json: str - ) -> Tuple[Optional[float], Optional[float], Optional[datetime]]: + def extract_all(self, metadata_json: str) -> Tuple[Optional[float], Optional[float], Optional[datetime]]: """ Extract GPS coordinates and datetime from metadata JSON string. @@ -361,15 +348,9 @@ def _print_summary(self): logger.info("=" * 70) logger.info(f"Total images processed: {self.stats['total']}") logger.info(f"Images updated: {self.stats['updated']}") - logger.info( - f"Images with location data: {self.stats['with_location']} ({self._percentage('with_location')}%)" - ) - logger.info( - f"Images with datetime: {self.stats['with_datetime']} ({self._percentage('with_datetime')}%)" - ) - logger.info( - f"Images with both: {self.stats['with_both']} ({self._percentage('with_both')}%)" - ) + logger.info(f"Images with location data: {self.stats['with_location']} ({self._percentage('with_location')}%)") + logger.info(f"Images with datetime: {self.stats['with_datetime']} ({self._percentage('with_datetime')}%)") + logger.info(f"Images with both: {self.stats['with_both']} ({self._percentage('with_both')}%)") logger.info(f"Images skipped (no data): {self.stats['skipped']}") logger.info(f"Errors encountered: {self.stats['errors']}") logger.info("=" * 70) diff --git a/backend/app/utils/faceSearch.py b/backend/app/utils/faceSearch.py index 385cce908..3222ba768 100644 --- a/backend/app/utils/faceSearch.py +++ b/backend/app/utils/faceSearch.py @@ -76,9 +76,7 @@ def perform_face_search(image_path: str) -> GetAllImagesResponse: ) for image in images: - similarity = FaceNet_util_cosine_similarity( - new_embedding, image["embeddings"] - ) + similarity = FaceNet_util_cosine_similarity(new_embedding, image["embeddings"]) if similarity >= CONFIDENCE_PERCENT: matches.append( ImageData( diff --git a/backend/app/utils/face_clusters.py b/backend/app/utils/face_clusters.py index 4c373c981..78281a5d8 100644 --- a/backend/app/utils/face_clusters.py +++ b/backend/app/utils/face_clusters.py @@ -141,13 +141,9 @@ def cluster_util_face_clusters_sync(force_full_reclustering: bool = False): face_image_base64 = _generate_cluster_face_image(cluster_id, cursor) if face_image_base64: # Update the cluster with the generated face image - success = _update_cluster_face_image( - cluster_id, face_image_base64, cursor - ) + success = _update_cluster_face_image(cluster_id, face_image_base64, cursor) if not success: - raise RuntimeError( - f"Failed to update face image for cluster {cluster_id}" - ) + raise RuntimeError(f"Failed to update face image for cluster {cluster_id}") # Update metadata with new reclustering time, preserving other values current_metadata = metadata or {} @@ -225,9 +221,7 @@ def cluster_util_cluster_all_face_embeddings( existing_cluster_names.append(face["cluster_name"]) else: invalid_count += 1 - logger.warning( - f"Skipping invalid embedding for face_id {face['face_id']} (NaN or zero vector)" - ) + logger.warning(f"Skipping invalid embedding for face_id {face['face_id']} (NaN or zero vector)") if invalid_count > 0: logger.warning(f"Filtered out {invalid_count} invalid embeddings") @@ -246,18 +240,14 @@ def cluster_util_cluster_all_face_embeddings( # Guard against NaN distances (shouldn't happen after validation, but double-check) if not np.isfinite(distances).all(): - logger.error( - "NaN or infinite values detected in distance matrix after validation" - ) + logger.error("NaN or infinite values detected in distance matrix after validation") # Replace NaN/inf with max distance (1.0) distances = np.nan_to_num(distances, nan=1.0, posinf=1.0, neginf=1.0) # Apply similarity threshold - mark dissimilar faces as completely different max_distance = 1 - similarity_threshold # Convert similarity to distance distances[distances > max_distance] = 1.0 # Mark as completely different - logger.info( - f"Applied similarity threshold: {similarity_threshold} (max_distance: {max_distance:.3f})" - ) + logger.info(f"Applied similarity threshold: {similarity_threshold} (max_distance: {max_distance:.3f})") # Perform DBSCAN clustering with precomputed distances dbscan = DBSCAN( @@ -268,9 +258,7 @@ def cluster_util_cluster_all_face_embeddings( ) cluster_labels = dbscan.fit_predict(distances) - logger.info( - f"DBSCAN found {len(set(cluster_labels)) - (1 if -1 in cluster_labels else 0)} clusters" - ) + logger.info(f"DBSCAN found {len(set(cluster_labels)) - (1 if -1 in cluster_labels else 0)} clusters") # Group faces by cluster labels clusters = defaultdict(list) @@ -308,9 +296,7 @@ def cluster_util_cluster_all_face_embeddings( # Post-clustering merge: merge similar clusters based on representative faces # Use similarity_threshold if merge_threshold not explicitly provided effective_merge_threshold = merge_threshold if merge_threshold is not None else 0.7 - results = _merge_similar_clusters( - results, merge_threshold=effective_merge_threshold - ) + results = _merge_similar_clusters(results, merge_threshold=effective_merge_threshold) return results @@ -361,9 +347,7 @@ def cluster_util_assign_cluster_to_faces_without_clusterId( mean_embeddings.append(mean_emb) else: invalid_clusters += 1 - logger.warning( - f"Skipping invalid cluster mean for cluster_id {cluster_data['cluster_id']}" - ) + logger.warning(f"Skipping invalid cluster mean for cluster_id {cluster_data['cluster_id']}") if invalid_clusters > 0: logger.warning(f"Filtered out {invalid_clusters} invalid cluster means") @@ -406,21 +390,15 @@ def cluster_util_assign_cluster_to_faces_without_clusterId( nearest_cluster_idx = np.argmin(distances) nearest_cluster_id = cluster_ids[nearest_cluster_idx] - face_cluster_mappings.append( - {"face_id": face_id, "cluster_id": nearest_cluster_id} - ) + face_cluster_mappings.append({"face_id": face_id, "cluster_id": nearest_cluster_id}) if skipped_invalid > 0: - logger.warning( - f"Skipped {skipped_invalid} faces with invalid embeddings during assignment" - ) + logger.warning(f"Skipped {skipped_invalid} faces with invalid embeddings during assignment") return face_cluster_mappings -def _merge_similar_clusters( - results: List[ClusterResult], merge_threshold: float = 0.85 -) -> List[ClusterResult]: +def _merge_similar_clusters(results: List[ClusterResult], merge_threshold: float = 0.85) -> List[ClusterResult]: """ Merge clusters that are too similar based on their mean embeddings. @@ -455,9 +433,7 @@ def _merge_similar_clusters( cluster_means[cluster_uuid] = mean_embedding else: invalid_clusters.append(cluster_uuid) - logger.warning( - f"Cluster {cluster_uuid} has invalid mean embedding, excluding from merge" - ) + logger.warning(f"Cluster {cluster_uuid} has invalid mean embedding, excluding from merge") # Remove invalid clusters from consideration for invalid_uuid in invalid_clusters: @@ -487,17 +463,13 @@ def _merge_similar_clusters( # Guard against NaN similarity if not np.isfinite(similarity): - logger.warning( - f"NaN similarity between clusters {uuid1} and {uuid2}, skipping merge" - ) + logger.warning(f"NaN similarity between clusters {uuid1} and {uuid2}, skipping merge") continue # If very similar, merge cluster2 into cluster1 if similarity >= merge_threshold: merge_mapping[uuid2] = uuid1 - logger.info( - f"Merging cluster {uuid2} into {uuid1} (similarity: {similarity:.3f})" - ) + logger.info(f"Merging cluster {uuid2} into {uuid1} (similarity: {similarity:.3f})") # Apply merges if merge_mapping: @@ -545,17 +517,13 @@ def resolve_final_cluster(uuid): for result in merged_results: result.cluster_name = final_cluster_names.get(result.cluster_uuid) - logger.info( - f"Merged {len(merge_mapping)} clusters. Final count: {len(set(r.cluster_uuid for r in merged_results))}" - ) + logger.info(f"Merged {len(merge_mapping)} clusters. Final count: {len(set(r.cluster_uuid for r in merged_results))}") return merged_results return results -def _calculate_cosine_distances( - face_embedding: NDArray, cluster_means: NDArray -) -> NDArray: +def _calculate_cosine_distances(face_embedding: NDArray, cluster_means: NDArray) -> NDArray: """ Calculate cosine distances between a face embedding and cluster means. Handles edge cases with zero vectors and ensures finite results. @@ -576,9 +544,7 @@ def _calculate_cosine_distances( # Normalize cluster means with safe division cluster_norm_values = np.linalg.norm(cluster_means, axis=1, keepdims=True) - cluster_norm_values = np.maximum( - cluster_norm_values, 1e-6 - ) # Prevent division by zero + cluster_norm_values = np.maximum(cluster_norm_values, 1e-6) # Prevent division by zero cluster_norms = cluster_means / cluster_norm_values # Calculate cosine similarities (dot product of normalized vectors) @@ -593,9 +559,7 @@ def _calculate_cosine_distances( return cosine_distances -def _update_cluster_face_image( - cluster_id: str, face_image_base64: str, cursor: Optional[sqlite3.Cursor] = None -) -> bool: +def _update_cluster_face_image(cluster_id: str, face_image_base64: str, cursor: Optional[sqlite3.Cursor] = None) -> bool: """ Update the face image for a specific cluster. @@ -633,9 +597,7 @@ def _update_cluster_face_image( conn.close() -def _get_cluster_face_data( - cluster_uuid: str, cursor: sqlite3.Cursor -) -> Optional[tuple]: +def _get_cluster_face_data(cluster_uuid: str, cursor: sqlite3.Cursor) -> Optional[tuple]: """ Get the image path and bounding box for the first face in a cluster. @@ -678,9 +640,7 @@ def _get_cluster_face_data( return None -def _calculate_square_crop_bounds( - bbox: Dict, img_shape: tuple, padding: int = 50 -) -> tuple: +def _calculate_square_crop_bounds(bbox: Dict, img_shape: tuple, padding: int = 50) -> tuple: """ Calculate square crop bounds centered on a face bounding box. @@ -743,9 +703,7 @@ def _calculate_square_crop_bounds( return (square_x_start, square_y_start, square_x_end, square_y_end) -def _crop_and_resize_face( - img: np.ndarray, crop_bounds: tuple, target_size: int = 300 -) -> Optional[np.ndarray]: +def _crop_and_resize_face(img: np.ndarray, crop_bounds: tuple, target_size: int = 300) -> Optional[np.ndarray]: """ Crop and resize a face region from an image. @@ -795,9 +753,7 @@ def _encode_image_to_base64(img: np.ndarray, format: str = ".jpg") -> Optional[s return None -def _generate_cluster_face_image( - cluster_uuid: str, cursor: sqlite3.Cursor -) -> Optional[str]: +def _generate_cluster_face_image(cluster_uuid: str, cursor: sqlite3.Cursor) -> Optional[str]: """ Generate a base64 encoded face image for a cluster. @@ -848,11 +804,7 @@ def _determine_cluster_name(faces_in_cluster: List[Dict]) -> Optional[str]: Most common non-null cluster name, or None if no named clusters exist """ # Extract non-null cluster names - existing_names = [ - face["existing_cluster_name"] - for face in faces_in_cluster - if face["existing_cluster_name"] is not None - ] + existing_names = [face["existing_cluster_name"] for face in faces_in_cluster if face["existing_cluster_name"] is not None] if not existing_names: return None diff --git a/backend/app/utils/folders.py b/backend/app/utils/folders.py index ec014f479..b4596887c 100644 --- a/backend/app/utils/folders.py +++ b/backend/app/utils/folders.py @@ -14,9 +14,7 @@ logger = get_logger(__name__) -def folder_util_add_folder_tree( - root_path, parent_folder_id=None, AI_Tagging=False, taggingCompleted=None -): +def folder_util_add_folder_tree(root_path, parent_folder_id=None, AI_Tagging=False, taggingCompleted=None): """ Recursively collect folder data and insert all folders in a single database transaction. All folders are initially inserted with NULL parent_id, which is updated after insertion. @@ -35,9 +33,7 @@ def folder_util_add_folder_tree( parent_id = parent_folder_id else: parent_path = os.path.dirname(dirpath) - parent_id = ( - folder_map[parent_path][0] if parent_path in folder_map else None - ) + parent_id = folder_map[parent_path][0] if parent_path in folder_map else None # Store both folder_id and parent_id in the map folder_map[dirpath] = (this_folder_id, parent_id) @@ -103,9 +99,7 @@ def folder_util_get_filesystem_direct_child_folders(folder_path: str) -> List[st ) -def folder_util_delete_obsolete_folders( - db_child_folders: List[Tuple[str, str]], folders_to_delete: set -) -> Tuple[int, List[str]]: +def folder_util_delete_obsolete_folders(db_child_folders: List[Tuple[str, str]], folders_to_delete: set) -> Tuple[int, List[str]]: """ Delete folders from the database that are no longer present in the filesystem. @@ -120,11 +114,7 @@ def folder_util_delete_obsolete_folders( return 0, [] # Get the folder IDs for the folders to delete - folder_ids_to_delete = [ - folder_id - for folder_id, folder_path in db_child_folders - if folder_path in folders_to_delete - ] + folder_ids_to_delete = [folder_id for folder_id, folder_path in db_child_folders if folder_path in folders_to_delete] if folder_ids_to_delete: deleted_count = db_delete_folders_batch(folder_ids_to_delete) @@ -133,9 +123,7 @@ def folder_util_delete_obsolete_folders( return 0, [] -def folder_util_add_multiple_folder_trees( - folders_to_add: set, parent_folder_id: str -) -> Tuple[int, List[Tuple[str, str]]]: +def folder_util_add_multiple_folder_trees(folders_to_add: set, parent_folder_id: str) -> Tuple[int, List[Tuple[str, str]]]: """ Add multiple folder trees with same parent to the database. diff --git a/backend/app/utils/image_metadata.py b/backend/app/utils/image_metadata.py index c5a91d3e6..b62ffa3ec 100644 --- a/backend/app/utils/image_metadata.py +++ b/backend/app/utils/image_metadata.py @@ -32,9 +32,7 @@ def extract_metadata(image_path): tag = TAGS.get(tag_id, tag_id) data = exifdata.get(tag_id) if isinstance(data, (tuple, list)): - data = [ - float(d) if isinstance(d, IFDRational) else d for d in data - ] + data = [float(d) if isinstance(d, IFDRational) else d for d in data] elif isinstance(data, IFDRational): data = float(data) @@ -46,9 +44,7 @@ def extract_metadata(image_path): metadata[str(tag).lower().replace(" ", "_")] = data except Exception as exif_error: - logger.warning( - f"Failed to extract EXIF data from {image_path}. Error: {exif_error}" - ) + logger.warning(f"Failed to extract EXIF data from {image_path}. Error: {exif_error}") except FileNotFoundError: raise # Re-raise if file is not found @@ -63,18 +59,12 @@ def extract_metadata(image_path): try: metadata["file_size"] = os.path.getsize(image_path) except OSError as file_error: - logger.warning( - f"Could not retrieve file size for {image_path}. Error: {file_error}" - ) + logger.warning(f"Could not retrieve file size for {image_path}. Error: {file_error}") # Image creation date try: creation_time = os.path.getctime(image_path) - metadata["creation_date"] = datetime.fromtimestamp(creation_time).strftime( - "%Y-%m-%d %H:%M:%S" - ) + metadata["creation_date"] = datetime.fromtimestamp(creation_time).strftime("%Y-%m-%d %H:%M:%S") except OSError as time_error: - logger.warning( - f"Could not retrieve creation date for {image_path}. Error: {time_error}" - ) + logger.warning(f"Could not retrieve creation date for {image_path}. Error: {time_error}") return metadata diff --git a/backend/app/utils/images.py b/backend/app/utils/images.py index 42474b3d3..6f10d5ab6 100644 --- a/backend/app/utils/images.py +++ b/backend/app/utils/images.py @@ -3,7 +3,7 @@ import datetime import json import logging -from typing import List, Tuple, Dict, Any, Mapping, Optional +from typing import List, Tuple, Dict, Any, Mapping from PIL import Image, ExifTags from pathlib import Path @@ -62,9 +62,7 @@ def image_util_process_folder_images(folder_data: List[Tuple[str, int, bool]]) - folder_path_to_id = {os.path.abspath(folder_path): folder_id} # Step 3: Prepare image records for this folder - folder_image_records = image_util_prepare_image_records( - image_files, folder_path_to_id - ) + folder_image_records = image_util_prepare_image_records(image_files, folder_path_to_id) all_image_records.extend(folder_image_records) except Exception as e: @@ -137,9 +135,7 @@ def image_util_classify_and_face_detect_images( face_detector.close() -def image_util_prepare_image_records( - image_files: List[str], folder_path_to_id: Dict[str, int] -) -> List[Dict]: +def image_util_prepare_image_records(image_files: List[str], folder_path_to_id: Dict[str, int]) -> List[Dict]: """ Prepare image records with thumbnails for database insertion. Automatically extracts GPS coordinates and capture datetime from metadata. @@ -162,9 +158,7 @@ def image_util_prepare_image_records( image_id = str(uuid.uuid4()) thumbnail_name = f"thumbnail_{image_id}.jpg" - thumbnail_path = os.path.abspath( - os.path.join(THUMBNAIL_IMAGES_PATH, thumbnail_name) - ) + thumbnail_path = os.path.abspath(os.path.join(THUMBNAIL_IMAGES_PATH, thumbnail_name)) # Generate thumbnail if image_util_generate_thumbnail(image_path, thumbnail_path): @@ -181,17 +175,11 @@ def image_util_prepare_image_records( # Log GPS extraction results if latitude and longitude: - logger.info( - f"GPS extracted for {os.path.basename(image_path)}: ({latitude}, {longitude})" - ) + logger.info(f"GPS extracted for {os.path.basename(image_path)}: ({latitude}, {longitude})") if captured_at: - logger.debug( - f"Date extracted for {os.path.basename(image_path)}: {captured_at}" - ) + logger.debug(f"Date extracted for {os.path.basename(image_path)}: {captured_at}") except Exception as e: - logger.warning( - f"GPS extraction failed for {os.path.basename(image_path)}: {e}" - ) + logger.warning(f"GPS extraction failed for {os.path.basename(image_path)}: {e}") # Continue without GPS - don't fail the upload # Build image record with GPS data @@ -206,11 +194,7 @@ def image_util_prepare_image_records( "isTagged": False, "latitude": latitude, # Can be None "longitude": longitude, # Can be None - "captured_at": ( - captured_at.isoformat() - if isinstance(captured_at, datetime.datetime) and captured_at - else captured_at - ), # Can be None + "captured_at": (captured_at.isoformat() if isinstance(captured_at, datetime.datetime) and captured_at else captured_at), # Can be None } image_records.append(image_record) @@ -218,9 +202,7 @@ def image_util_prepare_image_records( return image_records -def image_util_get_images_from_folder( - folder_path: str, recursive: bool = True -) -> List[str]: +def image_util_get_images_from_folder(folder_path: str, recursive: bool = True) -> List[str]: """Get all image files from a folder. Args: @@ -252,9 +234,7 @@ def image_util_get_images_from_folder( return image_files -def image_util_generate_thumbnail( - image_path: str, thumbnail_path: str, size: Tuple[int, int] = (600, 600) -) -> bool: +def image_util_generate_thumbnail(image_path: str, thumbnail_path: str, size: Tuple[int, int] = (600, 600)) -> bool: """Generate thumbnail for a single image.""" try: with Image.open(image_path) as img: @@ -321,9 +301,7 @@ def image_util_create_folder_path_mapping( return folder_path_to_id -def image_util_find_folder_id_for_image( - image_path: str, folder_path_to_id: Dict[str, int] -) -> int: +def image_util_find_folder_id_for_image(image_path: str, folder_path_to_id: Dict[str, int]) -> int: """ Find the most specific folder ID for a given image path. @@ -367,11 +345,7 @@ def _convert_to_degrees(value): """Converts a GPS coordinate value from DMS to decimal degrees.""" def to_float(v): - return ( - float(v.numerator) / float(v.denominator) - if hasattr(v, "numerator") - else float(v) - ) + return float(v.numerator) / float(v.denominator) if hasattr(v, "numerator") else float(v) d, m, s = (to_float(v) for v in value[:3]) return d + (m / 60.0) + (s / 3600.0) @@ -452,11 +426,7 @@ def image_util_extract_metadata(image_path: str) -> dict: # Robust EXIF extraction with safe fallback try: - exif_data = ( - img.getexif() - if hasattr(img, "getexif") - else getattr(img, "_getexif", lambda: None)() - ) + exif_data = img.getexif() if hasattr(img, "getexif") else getattr(img, "_getexif", lambda: None)() except Exception: exif_data = None @@ -466,11 +436,7 @@ def image_util_extract_metadata(image_path: str) -> dict: for k, v in exif.items(): if ExifTags.TAGS.get(k) == "DateTimeOriginal": - dt_original = ( - v.decode("utf-8", "ignore") - if isinstance(v, (bytes, bytearray)) - else str(v) - ) + dt_original = v.decode("utf-8", "ignore") if isinstance(v, (bytes, bytearray)) else str(v) break # Safe parse; fall back to mtime without losing width/height @@ -481,13 +447,9 @@ def image_util_extract_metadata(image_path: str) -> dict: "%Y:%m:%d %H:%M:%S", ).isoformat() except ValueError: - date_created = datetime.datetime.fromtimestamp( - stats.st_mtime - ).isoformat() + date_created = datetime.datetime.fromtimestamp(stats.st_mtime).isoformat() else: - date_created = datetime.datetime.fromtimestamp( - stats.st_mtime - ).isoformat() + date_created = datetime.datetime.fromtimestamp(stats.st_mtime).isoformat() metadata_dict = { "name": os.path.basename(image_path), @@ -509,9 +471,7 @@ def image_util_extract_metadata(image_path: str) -> dict: logger.error(f"Pillow could not open image {image_path}: {e}") return { "name": os.path.basename(image_path), - "date_created": datetime.datetime.fromtimestamp( - stats.st_mtime - ).isoformat(), + "date_created": datetime.datetime.fromtimestamp(stats.st_mtime).isoformat(), "file_location": image_path, "file_size": stats.st_size, "width": 0, diff --git a/backend/app/utils/memory_clustering.py b/backend/app/utils/memory_clustering.py index 44f801e93..a8f942eaa 100644 --- a/backend/app/utils/memory_clustering.py +++ b/backend/app/utils/memory_clustering.py @@ -11,9 +11,8 @@ Date: 2025-12-14 """ -import math -from datetime import datetime, timedelta -from typing import List, Dict, Any, Optional, Tuple +from datetime import datetime +from typing import List, Dict, Any, Optional from collections import defaultdict import numpy as np @@ -66,9 +65,7 @@ } -def find_nearest_city( - latitude: float, longitude: float, max_distance_km: float = 50.0 -) -> Optional[str]: +def find_nearest_city(latitude: float, longitude: float, max_distance_km: float = 50.0) -> Optional[str]: """ Find the nearest known city to given coordinates. @@ -135,11 +132,7 @@ def __init__( EARTH_RADIUS_KM = 6371.0 self.location_eps_radians = location_radius_km / EARTH_RADIUS_KM - logger.info( - f"MemoryClustering initialized: radius={location_radius_km}km, " - f"date_tolerance={date_tolerance_days}days, " - f"min_images={min_images_per_memory}" - ) + logger.info(f"MemoryClustering initialized: radius={location_radius_km}km, date_tolerance={date_tolerance_days}days, min_images={min_images_per_memory}") def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: """ @@ -183,9 +176,7 @@ def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]] # Has neither GPS nor date → skip skipped_count += 1 - logger.info( - f"GPS-based: {len(gps_images)}, Date-only: {len(date_only_images)}, Skipped: {skipped_count}" - ) + logger.info(f"GPS-based: {len(gps_images)}, Date-only: {len(date_only_images)}, Skipped: {skipped_count}") memories = [] @@ -209,9 +200,7 @@ def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]] logger.error(f"Clustering failed: {e}", exc_info=True) return [] - def _cluster_location_images( - self, images: List[Dict[str, Any]] - ) -> List[Dict[str, Any]]: + def _cluster_location_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: """ SIMPLIFIED: Use existing DBSCAN clustering for GPS images. """ @@ -227,9 +216,7 @@ def _cluster_location_images( temporal_clusters = self._cluster_by_date(cluster) for temp_cluster in temporal_clusters: if len(temp_cluster) >= self.min_images_per_memory: - memory = self._create_simple_memory( - temp_cluster, memory_type="location" - ) + memory = self._create_simple_memory(temp_cluster, memory_type="location") memories.append(memory) return memories @@ -237,9 +224,7 @@ def _cluster_location_images( logger.error(f"Location clustering failed: {e}") return [] - def _cluster_date_images( - self, images: List[Dict[str, Any]] - ) -> List[Dict[str, Any]]: + def _cluster_date_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: """ FLEXIBLE: Group date-only images by year-month. Uses min_images_per_memory (default: 2) as threshold. @@ -272,9 +257,7 @@ def _cluster_date_images( memories = [] for month_key, month_images in monthly_groups.items(): if len(month_images) >= self.min_images_per_memory: - memory = self._create_simple_memory( - month_images, memory_type="date" - ) + memory = self._create_simple_memory(month_images, memory_type="date") if memory: memories.append(memory) @@ -283,9 +266,7 @@ def _cluster_date_images( logger.error(f"Date clustering failed: {e}") return [] - def _create_simple_memory( - self, images: List[Dict[str, Any]], memory_type: str = "location" - ) -> Dict[str, Any]: + def _create_simple_memory(self, images: List[Dict[str, Any]], memory_type: str = "location") -> Dict[str, Any]: """ SIMPLIFIED: Create a memory object with minimal fields. Ensures all datetime objects are converted to ISO strings. @@ -295,23 +276,15 @@ def _create_simple_memory( cleaned_images = [] for img in images: img_copy = img.copy() - if img_copy.get("captured_at") and isinstance( - img_copy["captured_at"], datetime - ): + if img_copy.get("captured_at") and isinstance(img_copy["captured_at"], datetime): img_copy["captured_at"] = img_copy["captured_at"].isoformat() cleaned_images.append(img_copy) # Sort by date - sorted_images = sorted( - cleaned_images, key=lambda x: x.get("captured_at", "") - ) + sorted_images = sorted(cleaned_images, key=lambda x: x.get("captured_at", "")) # Get date range - dates = [ - img.get("captured_at") - for img in sorted_images - if img.get("captured_at") - ] + dates = [img.get("captured_at") for img in sorted_images if img.get("captured_at")] if dates: if isinstance(dates[0], str): dates = [datetime.fromisoformat(d.replace("Z", "")) for d in dates] @@ -378,9 +351,7 @@ def _create_simple_memory( logger.error(f"Memory creation failed: {e}") return None - def _cluster_gps_based_memories( - self, images: List[Dict[str, Any]] - ) -> List[Dict[str, Any]]: + def _cluster_gps_based_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: """ Cluster images with GPS data into location-based memories. This is the original clustering logic. @@ -417,9 +388,7 @@ def _cluster_gps_based_memories( return memories - def _cluster_date_based_memories( - self, images: List[Dict[str, Any]] - ) -> List[Dict[str, Any]]: + def _cluster_date_based_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: """ Cluster images WITHOUT GPS data into date-based memories. Groups photos by capture date/time only (screenshots, downloads, edits, etc.) @@ -441,9 +410,7 @@ def _cluster_date_based_memories( if captured_at: if isinstance(captured_at, str): try: - captured_at = datetime.fromisoformat( - captured_at.replace("Z", "") - ) + captured_at = datetime.fromisoformat(captured_at.replace("Z", "")) img_copy["captured_at"] = captured_at except Exception: # Try alternative formats @@ -459,9 +426,7 @@ def _cluster_date_based_memories( except Exception: continue else: - logger.debug( - f"Could not parse date for image {img.get('id')}" - ) + logger.debug(f"Could not parse date for image {img.get('id')}") continue elif isinstance(captured_at, datetime): img_copy["captured_at"] = captured_at @@ -537,11 +502,7 @@ def _create_date_based_memory(self, images: List[Dict[str, Any]]) -> Dict[str, A elif days <= 31: title = date_start.strftime("%B %Y") else: - title = ( - date_start.strftime("%B - %B %Y") - if date_start.month != date_end.month - else date_start.strftime("%B %Y") - ) + title = date_start.strftime("%B - %B %Y") if date_start.month != date_end.month else date_start.strftime("%B %Y") else: title = "Memories Collection" @@ -553,19 +514,13 @@ def _create_date_based_memory(self, images: List[Dict[str, Any]]) -> Dict[str, A thumbnail_image_id = images[thumbnail_idx]["id"] # Create memory ID (use timestamp only) - memory_id = ( - f"mem_date_{date_start.strftime('%Y%m%d')}" - if date_start - else f"mem_date_unknown_{hash(tuple(img['id'] for img in images[:5]))}" - ) + memory_id = f"mem_date_{date_start.strftime('%Y%m%d')}" if date_start else f"mem_date_unknown_{hash(tuple(img['id'] for img in images[:5]))}" # Convert captured_at datetime objects to ISO strings serialized_images = [] for img in images: img_copy = img.copy() - if img_copy.get("captured_at") and isinstance( - img_copy["captured_at"], datetime - ): + if img_copy.get("captured_at") and isinstance(img_copy["captured_at"], datetime): img_copy["captured_at"] = img_copy["captured_at"].isoformat() serialized_images.append(img_copy) @@ -583,9 +538,7 @@ def _create_date_based_memory(self, images: List[Dict[str, Any]]) -> Dict[str, A "center_lon": 0.0, # No GPS data } - def _filter_valid_images( - self, images: List[Dict[str, Any]] - ) -> List[Dict[str, Any]]: + def _filter_valid_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: """ Filter images that have valid location and datetime data. @@ -611,11 +564,9 @@ def _filter_valid_images( if isinstance(captured_at, str): try: # SQLite returns ISO format: "YYYY-MM-DDTHH:MM:SS" - captured_at = datetime.fromisoformat( - captured_at.replace("Z", "") - ) + captured_at = datetime.fromisoformat(captured_at.replace("Z", "")) img_copy["captured_at"] = captured_at - except Exception as e: + except Exception: # Try alternative formats for fmt in [ "%Y-%m-%d %H:%M:%S", @@ -630,9 +581,7 @@ def _filter_valid_images( continue else: # Could not parse date, but location is still valid - logger.debug( - f"Could not parse date for image {img.get('id')}: {captured_at}" - ) + logger.debug(f"Could not parse date for image {img.get('id')}: {captured_at}") elif isinstance(captured_at, datetime): img_copy["captured_at"] = captured_at @@ -644,9 +593,7 @@ def _filter_valid_images( return valid_images - def _cluster_by_location( - self, images: List[Dict[str, Any]] - ) -> List[List[Dict[str, Any]]]: + def _cluster_by_location(self, images: List[Dict[str, Any]]) -> List[List[Dict[str, Any]]]: """ Cluster images by geographic location using DBSCAN. @@ -690,9 +637,7 @@ def _cluster_by_location( return list(clusters.values()) - def _cluster_by_date( - self, images: List[Dict[str, Any]] - ) -> List[List[Dict[str, Any]]]: + def _cluster_by_date(self, images: List[Dict[str, Any]]) -> List[List[Dict[str, Any]]]: """ Cluster images by date within a location cluster. @@ -790,9 +735,7 @@ def _create_memory(self, images: List[Dict[str, Any]]) -> Dict[str, Any]: serialized_images = [] for img in images: img_copy = img.copy() - if img_copy.get("captured_at") and isinstance( - img_copy["captured_at"], datetime - ): + if img_copy.get("captured_at") and isinstance(img_copy["captured_at"], datetime): img_copy["captured_at"] = img_copy["captured_at"].isoformat() serialized_images.append(img_copy) @@ -827,17 +770,13 @@ def _reverse_geocode(self, latitude: float, longitude: float) -> str: city_name = find_nearest_city(latitude, longitude, max_distance_km=50.0) if city_name: - logger.debug( - f"Mapped coordinates ({latitude:.4f}, {longitude:.4f}) to {city_name}" - ) + logger.debug(f"Mapped coordinates ({latitude:.4f}, {longitude:.4f}) to {city_name}") return city_name # Fallback: Return formatted coordinates return f"{latitude:.4f}°, {longitude:.4f}°" - def _generate_title( - self, location_name: str, date: Optional[datetime], image_count: int - ) -> str: + def _generate_title(self, location_name: str, date: Optional[datetime], image_count: int) -> str: """ Generate a title for the memory. @@ -881,9 +820,7 @@ def _generate_description( else: return f"{image_count} photos" - def _generate_memory_id( - self, latitude: float, longitude: float, date: Optional[datetime] - ) -> str: + def _generate_memory_id(self, latitude: float, longitude: float, date: Optional[datetime]) -> str: """ Generate a unique ID for the memory. diff --git a/backend/app/utils/memory_monitor.py b/backend/app/utils/memory_monitor.py index 8078f4b41..00fa3389b 100644 --- a/backend/app/utils/memory_monitor.py +++ b/backend/app/utils/memory_monitor.py @@ -34,13 +34,7 @@ def wrapper(*args, **kwargs): end_time = time.time() # Log memory usage - logger.info( - f"Memory usage for {func.__name__}:\n" - f" Before: {mem_before:.2f}MB\n" - f" After: {mem_after:.2f}MB\n" - f" Difference: {mem_after - mem_before:.2f}MB\n" - f" Execution time: {(end_time - start_time)*1000:.2f}ms" - ) + logger.info(f"Memory usage for {func.__name__}:\n Before: {mem_before:.2f}MB\n After: {mem_after:.2f}MB\n Difference: {mem_after - mem_before:.2f}MB\n Execution time: {(end_time - start_time) * 1000:.2f}ms") return result diff --git a/backend/app/utils/verify_memories_setup.py b/backend/app/utils/verify_memories_setup.py index f7e55ef8f..bb4bb48d8 100644 --- a/backend/app/utils/verify_memories_setup.py +++ b/backend/app/utils/verify_memories_setup.py @@ -7,7 +7,6 @@ """ import sys -import os import sqlite3 import importlib from pathlib import Path @@ -25,9 +24,9 @@ class Colors: def print_header(text): """Print section header""" - print(f"\n{Colors.BOLD}{Colors.BLUE}{'='*60}{Colors.RESET}") + print(f"\n{Colors.BOLD}{Colors.BLUE}{'=' * 60}{Colors.RESET}") print(f"{Colors.BOLD}{Colors.BLUE}{text}{Colors.RESET}") - print(f"{Colors.BOLD}{Colors.BLUE}{'='*60}{Colors.RESET}\n") + print(f"{Colors.BOLD}{Colors.BLUE}{'=' * 60}{Colors.RESET}\n") def print_success(text): @@ -70,9 +69,7 @@ def check_dependencies(): version = getattr(module, "__version__", "Unknown") if expected_version and version != expected_version: - print_warning( - f"{package} installed (v{version}), expected v{expected_version}" - ) + print_warning(f"{package} installed (v{version}), expected v{expected_version}") else: print_success(f"{package} v{version}") except ImportError: @@ -127,9 +124,7 @@ def check_database_schema(): cursor = conn.cursor() # Check if images table exists - cursor.execute( - "SELECT name FROM sqlite_master WHERE type='table' AND name='images'" - ) + cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='images'") if not cursor.fetchone(): print_error("Table 'images' does not exist") conn.close() @@ -172,9 +167,7 @@ def check_database_schema(): if index_name in indexes: print_success(f"Index '{index_name}'") else: - print_warning( - f"Index '{index_name}' not found (recommended for performance)" - ) + print_warning(f"Index '{index_name}' not found (recommended for performance)") conn.close() return all_columns_exist @@ -251,28 +244,18 @@ def print_summary(results): for check_name, result in results.items(): status = "✓ PASS" if result else ("⚠ WARNING" if result is None else "✗ FAIL") - color = ( - Colors.GREEN - if result - else (Colors.YELLOW if result is None else Colors.RED) - ) + color = Colors.GREEN if result else (Colors.YELLOW if result is None else Colors.RED) print(f"{color}{status}{Colors.RESET} - {check_name}") print() if all_passed: - print( - f"{Colors.BOLD}{Colors.GREEN}🎉 All checks passed! Memories feature is ready to use.{Colors.RESET}" - ) + print(f"{Colors.BOLD}{Colors.GREEN}🎉 All checks passed! Memories feature is ready to use.{Colors.RESET}") print_info("Next steps:") print_info("1. Start the backend: cd backend && ./run.sh") - print_info( - "2. Run metadata extraction: python -m app.utils.extract_location_metadata" - ) + print_info("2. Run metadata extraction: python -m app.utils.extract_location_metadata") print_info("3. Test API endpoints: see MEMORIES_TESTING_GUIDE.md") else: - print( - f"{Colors.BOLD}{Colors.RED}❌ Some checks failed. Please fix the issues above.{Colors.RESET}" - ) + print(f"{Colors.BOLD}{Colors.RED}❌ Some checks failed. Please fix the issues above.{Colors.RESET}") print_info("See MEMORIES_README.md for setup instructions") print() diff --git a/backend/extract_metadata_simple.py b/backend/extract_metadata_simple.py index 2cbccda94..6c60c478b 100644 --- a/backend/extract_metadata_simple.py +++ b/backend/extract_metadata_simple.py @@ -8,97 +8,89 @@ from pathlib import Path # Database path -DB_PATH = Path(__file__).parent / 'app' / 'database' / 'PictoPy.db' +DB_PATH = Path(__file__).parent / "app" / "database" / "PictoPy.db" + def extract_and_update(): """Extract location and datetime from metadata JSON and update database columns.""" - + print("=" * 70) print("Starting metadata extraction...") print("=" * 70) - + conn = sqlite3.connect(DB_PATH) cursor = conn.cursor() - + # Get all images with metadata cursor.execute("SELECT id, metadata FROM images WHERE metadata IS NOT NULL AND metadata != ''") images = cursor.fetchall() - + print(f"\nFound {len(images)} images with metadata") - + updated_count = 0 location_count = 0 datetime_count = 0 both_count = 0 - + for image_id, metadata_str in images: try: # Parse JSON metadata metadata = json.loads(metadata_str) - + # Extract values - latitude = metadata.get('latitude') - longitude = metadata.get('longitude') - date_created = metadata.get('date_created') - + latitude = metadata.get("latitude") + longitude = metadata.get("longitude") + date_created = metadata.get("date_created") + has_location = latitude is not None and longitude is not None has_datetime = date_created is not None - + if has_location or has_datetime: # Update the database if has_location and has_datetime: - cursor.execute( - "UPDATE images SET latitude = ?, longitude = ?, captured_at = ? WHERE id = ?", - (latitude, longitude, date_created, image_id) - ) + cursor.execute("UPDATE images SET latitude = ?, longitude = ?, captured_at = ? WHERE id = ?", (latitude, longitude, date_created, image_id)) both_count += 1 elif has_location: - cursor.execute( - "UPDATE images SET latitude = ?, longitude = ? WHERE id = ?", - (latitude, longitude, image_id) - ) + cursor.execute("UPDATE images SET latitude = ?, longitude = ? WHERE id = ?", (latitude, longitude, image_id)) location_count += 1 elif has_datetime: - cursor.execute( - "UPDATE images SET captured_at = ? WHERE id = ?", - (date_created, image_id) - ) + cursor.execute("UPDATE images SET captured_at = ? WHERE id = ?", (date_created, image_id)) datetime_count += 1 - + updated_count += 1 - + # Show progress every 50 images if updated_count % 50 == 0: print(f" Processed {updated_count} images...") - + except Exception as e: print(f" Error processing image {image_id}: {e}") continue - + # Commit changes conn.commit() - + # Get final statistics cursor.execute("SELECT COUNT(*) FROM images WHERE latitude IS NOT NULL") total_with_location = cursor.fetchone()[0] - + cursor.execute("SELECT COUNT(*) FROM images WHERE captured_at IS NOT NULL") total_with_datetime = cursor.fetchone()[0] - + cursor.execute("SELECT COUNT(*) FROM images WHERE latitude IS NOT NULL AND captured_at IS NOT NULL") total_with_both = cursor.fetchone()[0] - + conn.close() - + # Print summary print("\n" + "=" * 70) print("METADATA EXTRACTION SUMMARY") print("=" * 70) print(f"Total images processed: {len(images)}") print(f"Images updated: {updated_count}") - print(f"Images with location data: {total_with_location} ({100*total_with_location/len(images):.1f}%)") - print(f"Images with datetime: {total_with_datetime} ({100*total_with_datetime/len(images):.1f}%)") - print(f"Images with both: {total_with_both} ({100*total_with_both/len(images):.1f}%)") + print(f"Images with location data: {total_with_location} ({100 * total_with_location / len(images):.1f}%)") + print(f"Images with datetime: {total_with_datetime} ({100 * total_with_datetime / len(images):.1f}%)") + print(f"Images with both: {total_with_both} ({100 * total_with_both / len(images):.1f}%)") print(f"Images skipped (no data): {len(images) - updated_count}") print("=" * 70) print("\n✅ Migration completed successfully!") @@ -107,5 +99,6 @@ def extract_and_update(): print(" 2. Test API: curl -X POST 'http://localhost:8000/api/memories/generate'") print() -if __name__ == '__main__': + +if __name__ == "__main__": extract_and_update() diff --git a/backend/main.py b/backend/main.py index 1f4c3f70b..670db5cfa 100644 --- a/backend/main.py +++ b/backend/main.py @@ -46,19 +46,19 @@ async def lifespan(app: FastAPI): generate_openapi_json() db_create_folders_table() db_create_images_table() - + # Only run migrations in the primary process or when explicitly enabled should_run_migrations = os.getenv("RUN_MIGRATIONS", "true").lower() == "true" if should_run_migrations: try: - db_migrate_add_memories_columns() + db_migrate_add_memories_columns() logger.info("Database migrations completed successfully") except Exception as e: logger.error(f"Failed to run database migrations: {e}", exc_info=True) - + else: logger.info("Skipping migrations (RUN_MIGRATIONS not set or false)") - + db_create_YOLO_classes_table() db_create_clusters_table() # Create clusters table first since faces references it db_create_faces_table() @@ -83,9 +83,7 @@ async def lifespan(app: FastAPI): "name": "PictoPy Postman Collection", "url": "https://www.postman.com/aossie-pictopy/pictopy/overview", }, - servers=[ - {"url": "http://localhost:52123", "description": "Local Development server"} - ], + servers=[{"url": "http://localhost:52123", "description": "Local Development server"}], ) @@ -106,9 +104,7 @@ def generate_openapi_json(): openapi_schema["info"]["contact"] = app.contact project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) - openapi_path = os.path.join( - project_root, "docs", "backend", "backend_python", "openapi.json" - ) + openapi_path = os.path.join(project_root, "docs", "backend", "backend_python", "openapi.json") os.makedirs(os.path.dirname(openapi_path), exist_ok=True) @@ -138,17 +134,10 @@ async def root(): app.include_router(folders_router, prefix="/folders", tags=["Folders"]) app.include_router(albums_router, prefix="/albums", tags=["Albums"]) app.include_router(images_router, prefix="/images", tags=["Images"]) -app.include_router( - face_clusters_router, prefix="/face-clusters", tags=["Face Clusters"] -) -app.include_router( - user_preferences_router, prefix="/user-preferences", tags=["User Preferences"] -) +app.include_router(face_clusters_router, prefix="/face-clusters", tags=["Face Clusters"]) +app.include_router(user_preferences_router, prefix="/user-preferences", tags=["User Preferences"]) app.include_router(memories_router) # Memories router (prefix already defined in router) -logger.info("✅ All routes initialized") -logger.info("✅ Memories feature enabled at /api/memories") - # Entry point for running with: python3 main.py if __name__ == "__main__": diff --git a/backend/migrate_add_memories_columns.py b/backend/migrate_add_memories_columns.py index bd2c47c2c..60cecb97c 100644 --- a/backend/migrate_add_memories_columns.py +++ b/backend/migrate_add_memories_columns.py @@ -4,7 +4,7 @@ This script adds: - latitude (REAL) column -- longitude (REAL) column +- longitude (REAL) column - captured_at (DATETIME) column - Performance indexes for these columns @@ -17,35 +17,42 @@ from pathlib import Path import sys + # ANSI color codes for terminal output class Colors: - GREEN = '\033[92m' - RED = '\033[91m' - YELLOW = '\033[93m' - BLUE = '\033[94m' - BOLD = '\033[1m' - RESET = '\033[0m' + GREEN = "\033[92m" + RED = "\033[91m" + YELLOW = "\033[93m" + BLUE = "\033[94m" + BOLD = "\033[1m" + RESET = "\033[0m" + + +DATABASE_PATH = Path(__file__).parent / "app" / "database" / "PictoPy.db" -DATABASE_PATH = Path(__file__).parent / 'app' / 'database' / 'PictoPy.db' def print_header(text): """Print section header""" - print(f"\n{Colors.BOLD}{Colors.BLUE}{'='*70}{Colors.RESET}") + print(f"\n{Colors.BOLD}{Colors.BLUE}{'=' * 70}{Colors.RESET}") print(f"{Colors.BOLD}{Colors.BLUE}{text}{Colors.RESET}") - print(f"{Colors.BOLD}{Colors.BLUE}{'='*70}{Colors.RESET}\n") + print(f"{Colors.BOLD}{Colors.BLUE}{'=' * 70}{Colors.RESET}\n") + def print_success(text): """Print success message""" print(f"{Colors.GREEN}✓ {text}{Colors.RESET}") + def print_error(text): """Print error message""" print(f"{Colors.RED}✗ {text}{Colors.RESET}") + def print_info(text): """Print info message""" print(f" {text}") + def check_database_exists(): """Check if database file exists""" if not DATABASE_PATH.exists(): @@ -53,10 +60,11 @@ def check_database_exists(): print_info("The database will be created when you first run the app.") print_info("Run this migration script AFTER the database is created.") return False - + print_success(f"Database found at: {DATABASE_PATH}") return True + def check_images_table(cursor): """Check if images table exists""" cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='images'") @@ -64,71 +72,75 @@ def check_images_table(cursor): print_error("Table 'images' does not exist") print_info("Run the app first to create the database schema.") return False - + print_success("Table 'images' exists") return True + def get_existing_columns(cursor): """Get list of existing columns in images table""" cursor.execute("PRAGMA table_info(images)") columns = {row[1]: row[2] for row in cursor.fetchall()} return columns + def add_columns(cursor): """Add new columns if they don't exist""" print_header("Adding Memories Feature Columns") - + columns = get_existing_columns(cursor) changes_made = False - + # Add latitude column - if 'latitude' not in columns: + if "latitude" not in columns: print_info("Adding column: latitude (REAL)") cursor.execute("ALTER TABLE images ADD COLUMN latitude REAL") print_success("Column 'latitude' added") changes_made = True else: print_success(f"Column 'latitude' already exists ({columns['latitude']})") - + # Add longitude column - if 'longitude' not in columns: + if "longitude" not in columns: print_info("Adding column: longitude (REAL)") cursor.execute("ALTER TABLE images ADD COLUMN longitude REAL") print_success("Column 'longitude' added") changes_made = True else: print_success(f"Column 'longitude' already exists ({columns['longitude']})") - + # Add captured_at column - if 'captured_at' not in columns: + if "captured_at" not in columns: print_info("Adding column: captured_at (DATETIME)") cursor.execute("ALTER TABLE images ADD COLUMN captured_at DATETIME") print_success("Column 'captured_at' added") changes_made = True else: print_success(f"Column 'captured_at' already exists ({columns['captured_at']})") - + return changes_made + def create_indexes(cursor): """Create indexes for performance""" print_header("Creating Performance Indexes") - + indexes = [ ("ix_images_latitude", "CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)"), ("ix_images_longitude", "CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)"), ("ix_images_captured_at", "CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)"), ("ix_images_favourite_captured_at", "CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)"), ] - + for index_name, sql in indexes: cursor.execute(sql) print_success(f"Index '{index_name}' created") + def show_final_schema(cursor): """Display final table schema""" print_header("Final 'images' Table Schema") - + cursor.execute("PRAGMA table_info(images)") print(f"\n{Colors.BOLD}Columns:{Colors.RESET}") for row in cursor.fetchall(): @@ -136,7 +148,7 @@ def show_final_schema(cursor): nullable = "NOT NULL" if not_null else "NULL" primary = " PRIMARY KEY" if pk else "" print(f" {col_name:<20} {col_type:<15} {nullable:<10}{primary}") - + cursor.execute("SELECT name FROM sqlite_master WHERE type='index' AND tbl_name='images'") indexes = cursor.fetchall() print(f"\n{Colors.BOLD}Indexes:{Colors.RESET}") @@ -144,14 +156,15 @@ def show_final_schema(cursor): print(f" - {index[0]}") print() + def migrate(): """Run the migration""" print_header("PictoPy Memories Feature - Database Migration") - + # Check database exists if not check_database_exists(): sys.exit(1) - + conn = None try: # Connect to database @@ -159,23 +172,23 @@ def migrate(): conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() print_success("Connected successfully") - + # Check images table exists if not check_images_table(cursor): sys.exit(1) - + # Add columns changes_made = add_columns(cursor) - + # Create indexes create_indexes(cursor) - + # Commit changes conn.commit() - + # Show final schema show_final_schema(cursor) - + # Summary print_header("Migration Summary") if changes_made: @@ -189,29 +202,30 @@ def migrate(): else: print(f"{Colors.BOLD}{Colors.GREEN}✅ Database is already up to date!{Colors.RESET}\n") print_info("All required columns and indexes already exist.") - + print(f"\n{Colors.BOLD}Next Steps:{Colors.RESET}") print_info("1. Run metadata extraction: python -m app.utils.extract_location_metadata") print_info("2. Verify setup: python -m app.utils.verify_memories_setup") print_info("3. Start the backend: ./run.sh") print() - + except sqlite3.Error as e: print_error(f"SQLite error: {e}") if conn: conn.rollback() sys.exit(1) - + except Exception as e: print_error(f"Unexpected error: {e}") if conn: conn.rollback() sys.exit(1) - + finally: if conn: conn.close() print_info("Database connection closed") -if __name__ == '__main__': + +if __name__ == "__main__": migrate() diff --git a/backend/test_auto_gps_extraction.py b/backend/test_auto_gps_extraction.py index 08f43db0c..ba63c364c 100644 --- a/backend/test_auto_gps_extraction.py +++ b/backend/test_auto_gps_extraction.py @@ -3,7 +3,7 @@ This script simulates adding a new image and verifies that: 1. GPS coordinates are automatically extracted -2. Capture datetime is automatically extracted +2. Capture datetime is automatically extracted 3. Data is properly saved to the database Usage: @@ -19,56 +19,51 @@ from app.utils.extract_location_metadata import MetadataExtractor + def test_gps_extraction(): """Test the GPS extraction functionality.""" print("=" * 70) print("Testing Automatic GPS Extraction") print("=" * 70) - + extractor = MetadataExtractor() - + # Test case 1: Sample metadata with GPS - sample_metadata = { - "latitude": 28.6139, - "longitude": 77.2090, - "CreateDate": "2024:11:15 14:30:00" - } - + sample_metadata = {"latitude": 28.6139, "longitude": 77.2090, "CreateDate": "2024:11:15 14:30:00"} + metadata_json = json.dumps(sample_metadata) lat, lon, captured_at = extractor.extract_all(metadata_json) - + print("\nTest Case 1: Metadata with GPS") print(f"Input: {sample_metadata}") - print(f"Extracted:") + print("Extracted:") print(f" - Latitude: {lat}") print(f" - Longitude: {lon}") print(f" - Captured At: {captured_at}") - + if lat and lon: print("✅ GPS extraction working!") else: print("❌ GPS extraction failed") - + # Test case 2: Metadata without GPS - sample_metadata_no_gps = { - "CreateDate": "2024:11:15 14:30:00" - } - + sample_metadata_no_gps = {"CreateDate": "2024:11:15 14:30:00"} + metadata_json_no_gps = json.dumps(sample_metadata_no_gps) lat2, lon2, captured_at2 = extractor.extract_all(metadata_json_no_gps) - + print("\nTest Case 2: Metadata without GPS") print(f"Input: {sample_metadata_no_gps}") - print(f"Extracted:") + print("Extracted:") print(f" - Latitude: {lat2}") print(f" - Longitude: {lon2}") print(f" - Captured At: {captured_at2}") - + if lat2 is None and lon2 is None and captured_at2: print("✅ Correctly handles images without GPS") else: print("❌ Unexpected behavior for images without GPS") - + print("\n" + "=" * 70) print("INTEGRATION STATUS:") print("=" * 70) @@ -81,5 +76,6 @@ def test_gps_extraction(): print("3. View Memories page to see the new images appear") print("=" * 70) + if __name__ == "__main__": test_gps_extraction() diff --git a/backend/test_memories_api.py b/backend/test_memories_api.py index aca15eeb3..257481e42 100644 --- a/backend/test_memories_api.py +++ b/backend/test_memories_api.py @@ -9,47 +9,39 @@ import requests import json -from typing import Dict, Any BASE_URL = "http://localhost:8000/api/memories" def print_response(endpoint: str, response: requests.Response): """Pretty print API response.""" - print("\n" + "="*70) - print(f"🔍 Testing: {endpoint}") - print("="*70) + print("\n" + "=" * 70) + print(f"Testing: {endpoint}") + print("=" * 70) print(f"Status Code: {response.status_code}") - + if response.status_code == 200: - print("✅ SUCCESS") + print("SUCCESS") data = response.json() print("\nResponse Preview:") print(json.dumps(data, indent=2)[:500] + "...") else: - print("❌ FAILED") + print("FAILED") print(f"Error: {response.text}") - print("="*70) + print("=" * 70) def test_generate_memories(): """Test POST /api/memories/generate""" - print("\n🚀 Testing: Generate Memories") - - response = requests.post( - f"{BASE_URL}/generate", - params={ - "location_radius_km": 5.0, - "date_tolerance_days": 3, - "min_images": 2 - } - ) - + print("\nTesting: Generate Memories") + + response = requests.post(f"{BASE_URL}/generate", params={"location_radius_km": 5.0, "date_tolerance_days": 3, "min_images": 2}) + print_response("POST /api/memories/generate", response) - + if response.status_code == 200: data = response.json() - print(f"\n📊 Summary:") + print("\nSummary:") print(f" - Memory Count: {data.get('memory_count', 0)}") print(f" - Image Count: {data.get('image_count', 0)}") print(f" - Message: {data.get('message', 'N/A')}") @@ -57,37 +49,30 @@ def test_generate_memories(): def test_timeline(): """Test GET /api/memories/timeline""" - print("\n🚀 Testing: Timeline") - - response = requests.get( - f"{BASE_URL}/timeline", - params={ - "days": 30, - "location_radius_km": 5.0, - "date_tolerance_days": 3 - } - ) - + print("\nTesting: Timeline") + + response = requests.get(f"{BASE_URL}/timeline", params={"days": 30, "location_radius_km": 5.0, "date_tolerance_days": 3}) + print_response("GET /api/memories/timeline", response) - + if response.status_code == 200: data = response.json() - print(f"\n📊 Summary:") + print("\nSummary:") print(f" - Memory Count: {data.get('memory_count', 0)}") print(f" - Date Range: {data.get('date_range', {})}") def test_on_this_day(): """Test GET /api/memories/on-this-day""" - print("\n🚀 Testing: On This Day") - + print("\nTesting: On This Day") + response = requests.get(f"{BASE_URL}/on-this-day") - + print_response("GET /api/memories/on-this-day", response) - + if response.status_code == 200: data = response.json() - print(f"\n📊 Summary:") + print("\nSummary:") print(f" - Today: {data.get('today', 'N/A')}") print(f" - Years Found: {data.get('years', [])}") print(f" - Image Count: {data.get('image_count', 0)}") @@ -95,23 +80,17 @@ def test_on_this_day(): def test_locations(): """Test GET /api/memories/locations""" - print("\n🚀 Testing: Locations") - - response = requests.get( - f"{BASE_URL}/locations", - params={ - "location_radius_km": 5.0, - "max_sample_images": 3 - } - ) - + print("\nTesting: Locations") + + response = requests.get(f"{BASE_URL}/locations", params={"location_radius_km": 5.0, "max_sample_images": 3}) + print_response("GET /api/memories/locations", response) - + if response.status_code == 200: data = response.json() - print(f"\n📊 Summary:") + print("\nSummary:") print(f" - Location Count: {data.get('location_count', 0)}") - if data.get('locations'): + if data.get("locations"): print(f" - Top Location: {data['locations'][0].get('location_name', 'N/A')}") print(f" - Photos at Top Location: {data['locations'][0].get('image_count', 0)}") @@ -121,14 +100,14 @@ def check_server(): try: response = requests.get("http://localhost:8000/health", timeout=2) if response.status_code == 200: - print("✅ Server is running!") + print("Server is running") return True else: - print("⚠️ Server responded but with unexpected status") + print("Server responded but with unexpected status") return False except requests.exceptions.ConnectionError: - print("❌ Server is not running!") - print("\n💡 Start the server with:") + print("Server is not running") + print("\nStart the server with:") print(" cd /Users/harshit/Code/pictopy/PictoPy/backend") print(" python main.py") return False @@ -136,30 +115,31 @@ def check_server(): def main(): """Run all tests.""" - print("\n" + "🎯 " * 20) - print(" MEMORIES API TEST SUITE") - print("🎯 " * 20 + "\n") - + print("\n" + "=" * 70) + print("MEMORIES API TEST SUITE") + print("=" * 70 + "\n") + # Check if server is running if not check_server(): return - - print("\n⏳ Running all tests...\n") - + + print("\nRunning all tests...\n") + try: # Run all tests test_generate_memories() test_timeline() test_on_this_day() test_locations() - - print("\n" + "✅ " * 20) - print(" ALL TESTS COMPLETED!") - print("✅ " * 20 + "\n") - + + print("\n" + "=" * 70) + print("ALL TESTS COMPLETED") + print("=" * 70 + "\n") + except Exception as e: - print(f"\n❌ Test failed with error: {e}") + print(f"\nTest failed with error: {e}") import traceback + traceback.print_exc() diff --git a/backend/tests/test_albums.py b/backend/tests/test_albums.py index cec9f670e..50997cdd1 100644 --- a/backend/tests/test_albums.py +++ b/backend/tests/test_albums.py @@ -68,9 +68,7 @@ class TestAlbumRoutes: ], ) def test_create_album_variants(self, album_data): - with patch("app.routes.albums.db_get_album_by_name") as mock_get_by_name, patch( - "app.routes.albums.db_insert_album" - ) as mock_insert: + with patch("app.routes.albums.db_get_album_by_name") as mock_get_by_name, patch("app.routes.albums.db_insert_album") as mock_insert: mock_get_by_name.return_value = None # No existing album mock_insert.return_value = None @@ -133,13 +131,8 @@ def test_get_all_albums_public_only(self, mock_db_album): assert isinstance(json_response["albums"], list) assert len(json_response["albums"]) == 1 assert json_response["albums"][0]["album_id"] == mock_db_album["album_id"] - assert ( - json_response["albums"][0]["album_name"] == mock_db_album["album_name"] - ) - assert ( - json_response["albums"][0]["description"] - == mock_db_album["description"] - ) + assert json_response["albums"][0]["album_name"] == mock_db_album["album_name"] + assert json_response["albums"][0]["description"] == mock_db_album["description"] assert json_response["albums"][0]["is_hidden"] == mock_db_album["is_hidden"] mock_get_all.assert_called_once_with(False) @@ -291,14 +284,8 @@ def test_get_album_by_id_not_found(self): ), ], ) - def test_update_album( - self, album_data, request_data, verify_password_return, expected_status - ): - with patch("app.routes.albums.db_get_album") as mock_get_album, patch( - "app.routes.albums.db_update_album" - ) as mock_update_album, patch( - "app.routes.albums.verify_album_password" - ) as mock_verify: + def test_update_album(self, album_data, request_data, verify_password_return, expected_status): + with patch("app.routes.albums.db_get_album") as mock_get_album, patch("app.routes.albums.db_update_album") as mock_update_album, patch("app.routes.albums.verify_album_password") as mock_verify: mock_get_album.return_value = album_data mock_verify.return_value = verify_password_return @@ -325,9 +312,7 @@ def test_delete_album_success(self, mock_db_album): mock_db_album["password_hash"], ) - with patch("app.routes.albums.db_get_album") as mock_get_album, patch( - "app.routes.albums.db_delete_album" - ) as mock_delete_album: + with patch("app.routes.albums.db_get_album") as mock_get_album, patch("app.routes.albums.db_delete_album") as mock_delete_album: mock_get_album.return_value = album_tuple mock_delete_album.return_value = None @@ -366,9 +351,7 @@ def test_add_images_to_album_success(self, mock_db_album): mock_db_album["password_hash"], ) - with patch("app.routes.albums.db_get_album") as mock_get_album, patch( - "app.routes.albums.db_add_images_to_album" - ) as mock_add_images: + with patch("app.routes.albums.db_get_album") as mock_get_album, patch("app.routes.albums.db_add_images_to_album") as mock_add_images: mock_get_album.return_value = album_tuple mock_add_images.return_value = None @@ -401,9 +384,7 @@ def test_get_album_images_success(self, mock_db_album): mock_db_album["password_hash"], ) - with patch("app.routes.albums.db_get_album") as mock_get_album, patch( - "app.routes.albums.db_get_album_images" - ) as mock_get_images: + with patch("app.routes.albums.db_get_album") as mock_get_album, patch("app.routes.albums.db_get_album_images") as mock_get_images: mock_get_album.return_value = album_tuple mock_get_images.return_value = expected_image_ids @@ -433,9 +414,7 @@ def test_remove_image_from_album_success(self, mock_db_album): mock_db_album["password_hash"], ) - with patch("app.routes.albums.db_get_album") as mock_get_album, patch( - "app.routes.albums.db_remove_image_from_album" - ) as mock_remove: + with patch("app.routes.albums.db_get_album") as mock_get_album, patch("app.routes.albums.db_remove_image_from_album") as mock_remove: mock_get_album.return_value = album_tuple mock_remove.return_value = None @@ -457,18 +436,12 @@ def test_remove_multiple_images_from_album(self, mock_db_album): album_id = mock_db_album["album_id"] image_ids_to_remove = {"image_ids": [str(uuid.uuid4()), str(uuid.uuid4())]} - with patch("app.routes.albums.db_get_album") as mock_get, patch( - "app.routes.albums.db_remove_images_from_album" - ) as mock_remove_bulk: + with patch("app.routes.albums.db_get_album") as mock_get, patch("app.routes.albums.db_remove_images_from_album") as mock_remove_bulk: mock_get.return_value = tuple(mock_db_album.values()) - response = client.request( - "DELETE", f"/albums/{album_id}/images", json=image_ids_to_remove - ) + response = client.request("DELETE", f"/albums/{album_id}/images", json=image_ids_to_remove) assert response.status_code == 200 json_response = response.json() assert json_response["success"] is True assert str(len(image_ids_to_remove["image_ids"])) in json_response["msg"] mock_get.assert_called_once_with(album_id) - mock_remove_bulk.assert_called_once_with( - album_id, image_ids_to_remove["image_ids"] - ) + mock_remove_bulk.assert_called_once_with(album_id, image_ids_to_remove["image_ids"]) diff --git a/backend/tests/test_face_clusters.py b/backend/tests/test_face_clusters.py index 1e6f7c398..e2bae970e 100644 --- a/backend/tests/test_face_clusters.py +++ b/backend/tests/test_face_clusters.py @@ -95,9 +95,7 @@ class TestFaceClustersAPI: @patch("app.routes.face_clusters.db_update_cluster") @patch("app.routes.face_clusters.db_get_cluster_by_id") - def test_rename_cluster_success( - self, mock_get_cluster, mock_update_cluster, sample_rename_request - ): + def test_rename_cluster_success(self, mock_get_cluster, mock_update_cluster, sample_rename_request): """Test successfully renaming a cluster.""" cluster_id = "cluster_123" mock_get_cluster.return_value = { @@ -106,9 +104,7 @@ def test_rename_cluster_success( } mock_update_cluster.return_value = True - response = client.put( - f"/face_clusters/{cluster_id}", json=sample_rename_request - ) + response = client.put(f"/face_clusters/{cluster_id}", json=sample_rename_request) assert response.status_code == 200 data = response.json() @@ -118,9 +114,7 @@ def test_rename_cluster_success( assert data["data"]["cluster_name"] == sample_rename_request["cluster_name"] mock_get_cluster.assert_called_once_with(cluster_id) - mock_update_cluster.assert_called_once_with( - cluster_id=cluster_id, cluster_name=sample_rename_request["cluster_name"] - ) + mock_update_cluster.assert_called_once_with(cluster_id=cluster_id, cluster_name=sample_rename_request["cluster_name"]) @patch("app.routes.face_clusters.db_get_cluster_by_id") def test_rename_cluster_not_found(self, mock_get_cluster): @@ -197,16 +191,12 @@ def test_rename_cluster_database_error(self, mock_get_cluster): @patch("app.routes.face_clusters.db_update_cluster") @patch("app.routes.face_clusters.db_get_cluster_by_id") - def test_rename_cluster_name_whitespace_trimming( - self, mock_get_cluster, mock_update_cluster, sample_cluster_data - ): + def test_rename_cluster_name_whitespace_trimming(self, mock_get_cluster, mock_update_cluster, sample_cluster_data): """Test that cluster names are properly trimmed of whitespace.""" mock_get_cluster.return_value = sample_cluster_data mock_update_cluster.return_value = True - response = client.put( - "/face_clusters/cluster_123", json={"cluster_name": " John Doe "} - ) + response = client.put("/face_clusters/cluster_123", json={"cluster_name": " John Doe "}) assert response.status_code == 200 response_data = response.json() @@ -222,9 +212,7 @@ def test_rename_cluster_name_whitespace_trimming( # ============================================================================ @patch("app.routes.face_clusters.db_get_all_clusters_with_face_counts") - def test_get_all_clusters_success( - self, mock_get_clusters, sample_clusters_with_counts - ): + def test_get_all_clusters_success(self, mock_get_clusters, sample_clusters_with_counts): """Test successfully retrieving all clusters.""" mock_get_clusters.return_value = sample_clusters_with_counts @@ -272,9 +260,7 @@ def test_get_all_clusters_database_error(self, mock_get_clusters): def test_get_all_clusters_response_structure(self, sample_clusters_with_counts): """Test that get all clusters returns correct response structure.""" - with patch( - "app.routes.face_clusters.db_get_all_clusters_with_face_counts" - ) as mock_get: + with patch("app.routes.face_clusters.db_get_all_clusters_with_face_counts") as mock_get: mock_get.return_value = sample_clusters_with_counts response = client.get("/face_clusters/") @@ -300,9 +286,7 @@ def test_get_all_clusters_response_structure(self, sample_clusters_with_counts): @patch("app.routes.face_clusters.db_get_images_by_cluster_id") @patch("app.routes.face_clusters.db_get_cluster_by_id") - def test_get_cluster_images_success( - self, mock_get_cluster, mock_get_images, sample_cluster_images - ): + def test_get_cluster_images_success(self, mock_get_cluster, mock_get_images, sample_cluster_images): """Test successfully retrieving images for a cluster.""" cluster_id = "cluster_123" mock_get_cluster.return_value = { @@ -390,9 +374,7 @@ def test_rename_cluster_missing_request_body(self): def test_rename_cluster_invalid_json(self): """Test rename cluster with invalid JSON structure.""" - response = client.put( - "/face_clusters/cluster_123", json={"invalid_field": "value"} - ) + response = client.put("/face_clusters/cluster_123", json={"invalid_field": "value"}) assert response.status_code == 422 diff --git a/backend/tests/test_folders.py b/backend/tests/test_folders.py index a0d26f0e5..46f220be8 100644 --- a/backend/tests/test_folders.py +++ b/backend/tests/test_folders.py @@ -163,9 +163,7 @@ def test_add_folder_success( mock_add_folder_tree.assert_called_once() @patch("app.routes.folders.db_folder_exists") - def test_add_folder_already_exists( - self, mock_folder_exists, client, temp_folder_structure - ): + def test_add_folder_already_exists(self, mock_folder_exists, client, temp_folder_structure): """Test adding folder that already exists in database.""" mock_folder_exists.return_value = True @@ -233,9 +231,7 @@ def test_add_folder_with_parent_id( """Test adding folder with specified parent_folder_id.""" mock_folder_exists.return_value = False - mock_find_parent.return_value = ( - None # Should not be called when parent_id provided - ) + mock_find_parent.return_value = None # Should not be called when parent_id provided mock_add_folder_tree.return_value = ("child-folder-id", {}) mock_update_parent_ids.return_value = None @@ -258,9 +254,7 @@ def test_add_folder_with_parent_id( @patch("app.routes.folders.folder_util_add_folder_tree") @patch("app.routes.folders.db_folder_exists") - def test_add_folder_database_error( - self, mock_folder_exists, mock_add_folder_tree, client, temp_folder_structure - ): + def test_add_folder_database_error(self, mock_folder_exists, mock_add_folder_tree, client, temp_folder_structure): """Test handling database errors during folder addition.""" mock_folder_exists.return_value = False mock_add_folder_tree.side_effect = Exception("Database connection failed") @@ -394,9 +388,7 @@ def test_enable_ai_tagging_database_error(self, mock_enable_batch, client): assert data["detail"]["error"] == "Internal server error" @patch("app.routes.folders.db_enable_ai_tagging_batch") - def test_enable_ai_tagging_background_processing_called( - self, mock_enable_batch, client - ): + def test_enable_ai_tagging_background_processing_called(self, mock_enable_batch, client): """Test that background processing is triggered after enabling AI tagging.""" mock_enable_batch.return_value = 2 @@ -419,9 +411,7 @@ def test_disable_ai_tagging_success(self, mock_disable_batch, client): """Test successfully disabling AI tagging for folders.""" mock_disable_batch.return_value = 5 # 5 folders updated - request_data = { - "folder_ids": ["folder-1", "folder-2", "folder-3", "folder-4", "folder-5"] - } + request_data = {"folder_ids": ["folder-1", "folder-2", "folder-3", "folder-4", "folder-5"]} response = client.post("/folders/disable-ai-tagging", json=request_data) @@ -438,9 +428,7 @@ def test_disable_ai_tagging_success(self, mock_disable_batch, client): "folder-5", ] - mock_disable_batch.assert_called_once_with( - ["folder-1", "folder-2", "folder-3", "folder-4", "folder-5"] - ) + mock_disable_batch.assert_called_once_with(["folder-1", "folder-2", "folder-3", "folder-4", "folder-5"]) @patch("app.routes.folders.db_disable_ai_tagging_batch") def test_disable_ai_tagging_single_folder(self, mock_disable_batch, client): @@ -491,9 +479,7 @@ def test_disable_ai_tagging_database_error(self, mock_disable_batch, client): assert data["detail"]["error"] == "Internal server error" @patch("app.routes.folders.db_disable_ai_tagging_batch") - def test_disable_ai_tagging_no_background_processing( - self, mock_disable_batch, client - ): + def test_disable_ai_tagging_no_background_processing(self, mock_disable_batch, client): """Test that no background processing is triggered when disabling AI tagging.""" mock_disable_batch.return_value = 2 @@ -586,9 +572,7 @@ def test_delete_folders_database_error(self, mock_delete_batch, client): # ============================================================================ @patch("app.routes.folders.db_get_all_folder_details") - def test_get_all_folders_success( - self, mock_get_all_folders, client, sample_folder_details - ): + def test_get_all_folders_success(self, mock_get_all_folders, client, sample_folder_details): """Test successfully retrieving all folders.""" mock_get_all_folders.return_value = sample_folder_details @@ -676,9 +660,7 @@ def test_disable_ai_tagging_no_folders_updated(self, mock_disable_batch, client) """Test disabling AI tagging when no folders are actually updated.""" mock_disable_batch.return_value = 0 - request_data = { - "folder_ids": ["non-existent-folder-1", "non-existent-folder-2"] - } + request_data = {"folder_ids": ["non-existent-folder-1", "non-existent-folder-2"]} response = client.post("/folders/disable-ai-tagging", json=request_data) @@ -737,9 +719,7 @@ def test_complete_folder_workflow( @patch("app.routes.folders.db_enable_ai_tagging_batch") @patch("app.routes.folders.db_disable_ai_tagging_batch") - def test_ai_tagging_toggle_workflow( - self, mock_disable_batch, mock_enable_batch, client - ): + def test_ai_tagging_toggle_workflow(self, mock_disable_batch, mock_enable_batch, client): """Test toggling AI tagging on and off for folders.""" folder_ids = ["folder-1", "folder-2"] @@ -752,9 +732,7 @@ def test_ai_tagging_toggle_workflow( assert enable_response.json()["data"]["updated_count"] == 2 disable_request = {"folder_ids": folder_ids} - disable_response = client.post( - "/folders/disable-ai-tagging", json=disable_request - ) + disable_response = client.post("/folders/disable-ai-tagging", json=disable_request) assert disable_response.status_code == 200 assert disable_response.json()["data"]["updated_count"] == 2 @@ -808,9 +786,7 @@ def mock_find_parent_side_effect(folder_path): @patch("app.routes.folders.db_delete_folders_batch") @patch("app.routes.folders.db_enable_ai_tagging_batch") - def test_complete_folder_lifecycle( - self, mock_enable_batch, mock_delete_batch, client - ): + def test_complete_folder_lifecycle(self, mock_enable_batch, mock_delete_batch, client): """Test complete folder lifecycle: enable AI -> delete.""" folder_ids = ["folder-1", "folder-2"] diff --git a/backend/tests/test_user_preferences.py b/backend/tests/test_user_preferences.py index 8a70c4b34..edeb4803f 100644 --- a/backend/tests/test_user_preferences.py +++ b/backend/tests/test_user_preferences.py @@ -56,9 +56,7 @@ class TestUserPreferencesAPI: """Test class for User Preferences API endpoints.""" @patch("app.routes.user_preferences.db_get_metadata") - def test_get_user_preferences_with_existing_data( - self, mock_get_metadata, sample_metadata_with_preferences - ): + def test_get_user_preferences_with_existing_data(self, mock_get_metadata, sample_metadata_with_preferences): """Test successful retrieval of user preferences when data exists.""" mock_get_metadata.return_value = sample_metadata_with_preferences @@ -78,9 +76,7 @@ def test_get_user_preferences_with_existing_data( mock_get_metadata.assert_called_once() @patch("app.routes.user_preferences.db_get_metadata") - def test_get_user_preferences_with_defaults( - self, mock_get_metadata, sample_metadata_without_preferences - ): + def test_get_user_preferences_with_defaults(self, mock_get_metadata, sample_metadata_without_preferences): """Test retrieval of user preferences with default values when no preferences exist.""" mock_get_metadata.return_value = sample_metadata_without_preferences @@ -133,9 +129,7 @@ def test_get_user_preferences_null_metadata(self, mock_get_metadata): def test_get_user_preferences_partial_data(self, mock_get_metadata): """Test retrieval when only some preference fields exist.""" - mock_get_metadata.return_value = { - "user_preferences": {"YOLO_model_size": "medium"} - } + mock_get_metadata.return_value = {"user_preferences": {"YOLO_model_size": "medium"}} response = client.get("/user_preferences/") @@ -201,9 +195,7 @@ def test_update_user_preferences_full_update( @patch("app.routes.user_preferences.db_update_metadata") @patch("app.routes.user_preferences.db_get_metadata") - def test_update_user_preferences_partial_update( - self, mock_get_metadata, mock_update_metadata, sample_metadata_with_preferences - ): + def test_update_user_preferences_partial_update(self, mock_get_metadata, mock_update_metadata, sample_metadata_with_preferences): """Test successful partial update of user preferences.""" mock_get_metadata.return_value = sample_metadata_with_preferences @@ -222,9 +214,7 @@ def test_update_user_preferences_partial_update( @patch("app.routes.user_preferences.db_update_metadata") @patch("app.routes.user_preferences.db_get_metadata") - def test_update_user_preferences_new_metadata( - self, mock_get_metadata, mock_update_metadata - ): + def test_update_user_preferences_new_metadata(self, mock_get_metadata, mock_update_metadata): """Test update when no existing metadata exists.""" mock_get_metadata.return_value = None @@ -253,9 +243,7 @@ def test_update_user_preferences_new_metadata( ) @patch("app.routes.user_preferences.db_update_metadata") @patch("app.routes.user_preferences.db_get_metadata") - def test_update_user_preferences_various_combinations( - self, mock_get_metadata, mock_update_metadata, yolo_size, gpu_accel - ): + def test_update_user_preferences_various_combinations(self, mock_get_metadata, mock_update_metadata, yolo_size, gpu_accel): """Test update with various parameter combinations.""" mock_get_metadata.return_value = {} @@ -283,17 +271,11 @@ def test_update_user_preferences_no_fields_provided(self): if "detail" in response_data: assert response_data["detail"]["success"] is False assert response_data["detail"]["error"] == "Validation Error" - assert ( - "At least one preference field must be provided" - in response_data["detail"]["message"] - ) + assert "At least one preference field must be provided" in response_data["detail"]["message"] else: assert response_data["success"] is False assert response_data["error"] == "Validation Error" - assert ( - "At least one preference field must be provided" - in response_data["message"] - ) + assert "At least one preference field must be provided" in response_data["message"] def test_update_user_preferences_all_none_fields(self): """Test update with all fields explicitly set to None.""" @@ -308,23 +290,15 @@ def test_update_user_preferences_all_none_fields(self): if "detail" in response_data: assert response_data["detail"]["success"] is False assert response_data["detail"]["error"] == "Validation Error" - assert ( - "At least one preference field must be provided" - in response_data["detail"]["message"] - ) + assert "At least one preference field must be provided" in response_data["detail"]["message"] else: assert response_data["success"] is False assert response_data["error"] == "Validation Error" - assert ( - "At least one preference field must be provided" - in response_data["message"] - ) + assert "At least one preference field must be provided" in response_data["message"] @patch("app.routes.user_preferences.db_update_metadata") @patch("app.routes.user_preferences.db_get_metadata") - def test_update_user_preferences_database_update_failed( - self, mock_get_metadata, mock_update_metadata - ): + def test_update_user_preferences_database_update_failed(self, mock_get_metadata, mock_update_metadata): """Test update when database update fails.""" mock_get_metadata.return_value = {} @@ -338,10 +312,7 @@ def test_update_user_preferences_database_update_failed( if "detail" in response_data: assert response_data["detail"]["success"] is False assert response_data["detail"]["error"] == "Update Failed" - assert ( - "Failed to update user preferences" - in response_data["detail"]["message"] - ) + assert "Failed to update user preferences" in response_data["detail"]["message"] else: assert response_data["success"] is False assert response_data["error"] == "Update Failed" @@ -369,9 +340,7 @@ def test_update_user_preferences_database_get_exception(self, mock_get_metadata) @patch("app.routes.user_preferences.db_update_metadata") @patch("app.routes.user_preferences.db_get_metadata") - def test_update_user_preferences_database_update_exception( - self, mock_get_metadata, mock_update_metadata - ): + def test_update_user_preferences_database_update_exception(self, mock_get_metadata, mock_update_metadata): """Test update when database update raises an exception.""" mock_get_metadata.return_value = {} @@ -413,16 +382,11 @@ def test_get_user_preferences_response_structure(self): def test_update_user_preferences_response_structure(self): """Test that update user preferences returns correct response structure.""" - with patch("app.routes.user_preferences.db_get_metadata") as mock_get, patch( - "app.routes.user_preferences.db_update_metadata" - ) as mock_update: - + with patch("app.routes.user_preferences.db_get_metadata") as mock_get, patch("app.routes.user_preferences.db_update_metadata") as mock_update: mock_get.return_value = {} mock_update.return_value = True - response = client.put( - "/user_preferences/", json={"YOLO_model_size": "medium"} - ) + response = client.put("/user_preferences/", json={"YOLO_model_size": "medium"}) assert response.status_code == 200 response_data = response.json() @@ -438,10 +402,7 @@ def test_update_user_preferences_response_structure(self): def test_update_user_preferences_preserves_other_metadata(self): """Test that updating preferences preserves other metadata fields.""" - with patch("app.routes.user_preferences.db_get_metadata") as mock_get, patch( - "app.routes.user_preferences.db_update_metadata" - ) as mock_update: - + with patch("app.routes.user_preferences.db_get_metadata") as mock_get, patch("app.routes.user_preferences.db_update_metadata") as mock_update: existing_metadata = { "user_preferences": {"YOLO_model_size": "small"}, "other_field": "should_be_preserved", @@ -467,9 +428,7 @@ def test_update_user_preferences_missing_request_body(self): def test_update_user_preferences_invalid_yolo_size(self): """Test update with invalid YOLO model size.""" - response = client.put( - "/user_preferences/", json={"YOLO_model_size": "invalid_size"} - ) + response = client.put("/user_preferences/", json={"YOLO_model_size": "invalid_size"}) assert response.status_code == 422 response_data = response.json() @@ -484,15 +443,9 @@ def test_update_user_preferences_invalid_json_structure(self): response_data = response.json() if "detail" in response_data: - assert ( - "At least one preference field must be provided" - in response_data["detail"]["message"] - ) + assert "At least one preference field must be provided" in response_data["detail"]["message"] else: - assert ( - "At least one preference field must be provided" - in response_data["message"] - ) + assert "At least one preference field must be provided" in response_data["message"] @pytest.mark.parametrize( "method,endpoint", diff --git a/frontend/src/components/Media/MediaView.tsx b/frontend/src/components/Media/MediaView.tsx index 53a4ccbf0..9d1154002 100644 --- a/frontend/src/components/Media/MediaView.tsx +++ b/frontend/src/components/Media/MediaView.tsx @@ -117,7 +117,13 @@ export function MediaView({ } if (location.pathname === ROUTES.FAVOURITES) handleClose(); } - }, [currentImage, toggleFavourite, onToggleFavorite, location.pathname, handleClose]); + }, [ + currentImage, + toggleFavourite, + onToggleFavorite, + location.pathname, + handleClose, + ]); const handleZoomIn = useCallback(() => { imageViewerRef.current?.zoomIn(); diff --git a/frontend/src/components/Memories/MemoriesPage.tsx b/frontend/src/components/Memories/MemoriesPage.tsx index a1e7a4e5c..e9dec3429 100644 --- a/frontend/src/components/Memories/MemoriesPage.tsx +++ b/frontend/src/components/Memories/MemoriesPage.tsx @@ -153,19 +153,23 @@ export const MemoriesPage: React.FC = () => { // Calculate counts const locationCount = allMemories.filter( - (m) => m.center_lat !== 0 || m.center_lon !== 0, + (m) => m.center_lat != null && m.center_lon != null, ).length; const dateCount = allMemories.filter( - (m) => m.center_lat === 0 && m.center_lon === 0, + (m) => m.center_lat == null || m.center_lon == null, ).length; // Simple filter function const applyFilter = (memories: Memory[]) => { if (filter === 'location') { - return memories.filter((m) => m.center_lat !== 0 || m.center_lon !== 0); + return memories.filter( + (m) => m.center_lat != null && m.center_lon != null, + ); } if (filter === 'date') { - return memories.filter((m) => m.center_lat === 0 && m.center_lon === 0); + return memories.filter( + (m) => m.center_lat == null || m.center_lon == null, + ); } return memories; // 'all' }; @@ -199,8 +203,8 @@ export const MemoriesPage: React.FC = () => { image_count: onThisDayImages.length, images: onThisDayImages, thumbnail_image_id: onThisDayImages[0]?.id || '', - center_lat: onThisDayImages[0]?.latitude || 0, - center_lon: onThisDayImages[0]?.longitude || 0, + center_lat: onThisDayImages[0]?.latitude ?? null, + center_lon: onThisDayImages[0]?.longitude ?? null, }; dispatch(setSelectedMemory(tempMemory)); } diff --git a/frontend/src/components/Memories/MemoryCard.tsx b/frontend/src/components/Memories/MemoryCard.tsx index 61bd22a0e..21e06a8d2 100644 --- a/frontend/src/components/Memories/MemoryCard.tsx +++ b/frontend/src/components/Memories/MemoryCard.tsx @@ -37,7 +37,7 @@ export const MemoryCard = React.memo(({ memory, onClick }) => { // Determine memory type // Backend uses 0,0 as sentinel for date-based memories (no GPS data) - const isDateBased = memory.center_lat == null || memory.center_lon == null; + const isDateBased = memory.center_lat == null || memory.center_lon == null; // Format title based on memory type let displayTitle = memory.title || 'Untitled Memory'; diff --git a/frontend/src/components/Memories/MemoryViewer.tsx b/frontend/src/components/Memories/MemoryViewer.tsx index 80b1df3c2..12fb41da7 100644 --- a/frontend/src/components/Memories/MemoryViewer.tsx +++ b/frontend/src/components/Memories/MemoryViewer.tsx @@ -14,6 +14,7 @@ import { toggleImageFavorite, } from '@/store/slices/memoriesSlice'; import { setCurrentViewIndex, setImages } from '@/features/imageSlice'; +import { showInfoDialog } from '@/features/infoDialogSlice'; import { MediaView } from '@/components/Media/MediaView'; import { formatDateRangeRelative, @@ -23,6 +24,7 @@ import { formatLocationName, } from '@/services/memoriesApi'; import { togglefav } from '@/api/api-functions/togglefav'; +import { getErrorMessage } from '@/lib/utils'; /** * Memory Viewer Modal Component @@ -40,12 +42,24 @@ export const MemoryViewer: React.FC = () => { // Handle favorite toggle - update both API and Redux state const handleToggleFavorite = useCallback( async (imageId: string) => { + // Optimistic update - toggle UI immediately + dispatch(toggleImageFavorite(imageId)); + try { // Call API to toggle favorite in database await togglefav(imageId); - // Update Redux state to reflect the change immediately - dispatch(toggleImageFavorite(imageId)); } catch (error) { + // Revert the optimistic change on failure + dispatch(toggleImageFavorite(imageId)); + + // Show error dialog to user + dispatch( + showInfoDialog({ + title: 'Failed to Update Favorite', + message: getErrorMessage(error), + variant: 'error', + }), + ); console.error('Failed to toggle favorite:', error); } }, diff --git a/frontend/src/services/memoriesApi.ts b/frontend/src/services/memoriesApi.ts index 07ee6e6a0..2d18de9f6 100644 --- a/frontend/src/services/memoriesApi.ts +++ b/frontend/src/services/memoriesApi.ts @@ -1,6 +1,6 @@ /** * Memories API Service - * + * * Handles all HTTP requests to the memories backend endpoints. * Provides type-safe interfaces and error handling. */ @@ -41,8 +41,8 @@ export interface Memory { image_count: number; images: MemoryImage[]; thumbnail_image_id: string; - center_lat: number; - center_lon: number; + center_lat: number | null; + center_lon: number | null; } /** @@ -115,7 +115,7 @@ export interface ApiError { /** * Generate all memories from images with location data - * + * * @param options - Clustering parameters * @returns Generated memories */ @@ -126,12 +126,21 @@ export const generateMemories = async (options?: { }): Promise => { try { const params = new URLSearchParams(); - if (options?.location_radius_km) params.append('location_radius_km', options.location_radius_km.toString()); - if (options?.date_tolerance_days) params.append('date_tolerance_days', options.date_tolerance_days.toString()); - if (options?.min_images) params.append('min_images', options.min_images.toString()); + if (options?.location_radius_km) + params.append( + 'location_radius_km', + options.location_radius_km.toString(), + ); + if (options?.date_tolerance_days) + params.append( + 'date_tolerance_days', + options.date_tolerance_days.toString(), + ); + if (options?.min_images) + params.append('min_images', options.min_images.toString()); const response = await axios.post( - `${API_BASE_URL}/generate${params.toString() ? '?' + params.toString() : ''}` + `${API_BASE_URL}/generate${params.toString() ? '?' + params.toString() : ''}`, ); return response.data; @@ -142,7 +151,7 @@ export const generateMemories = async (options?: { /** * Get memories from the past N days as a timeline - * + * * @param days - Number of days to look back (default: 365) * @param options - Clustering parameters * @returns Timeline memories @@ -152,16 +161,24 @@ export const getTimeline = async ( options?: { location_radius_km?: number; date_tolerance_days?: number; - } + }, ): Promise => { try { const params = new URLSearchParams(); params.append('days', days.toString()); - if (options?.location_radius_km) params.append('location_radius_km', options.location_radius_km.toString()); - if (options?.date_tolerance_days) params.append('date_tolerance_days', options.date_tolerance_days.toString()); + if (options?.location_radius_km) + params.append( + 'location_radius_km', + options.location_radius_km.toString(), + ); + if (options?.date_tolerance_days) + params.append( + 'date_tolerance_days', + options.date_tolerance_days.toString(), + ); const response = await axios.get( - `${API_BASE_URL}/timeline?${params.toString()}` + `${API_BASE_URL}/timeline?${params.toString()}`, ); return response.data; @@ -172,13 +189,13 @@ export const getTimeline = async ( /** * Get photos taken on this date in previous years - * + * * @returns On This Day images */ export const getOnThisDay = async (): Promise => { try { const response = await axios.get( - `${API_BASE_URL}/on-this-day` + `${API_BASE_URL}/on-this-day`, ); return response.data; @@ -189,7 +206,7 @@ export const getOnThisDay = async (): Promise => { /** * Get all unique locations where photos were taken - * + * * @param options - Clustering and sampling parameters * @returns Location clusters */ @@ -199,11 +216,16 @@ export const getLocations = async (options?: { }): Promise => { try { const params = new URLSearchParams(); - if (options?.location_radius_km) params.append('location_radius_km', options.location_radius_km.toString()); - if (options?.max_sample_images) params.append('max_sample_images', options.max_sample_images.toString()); + if (options?.location_radius_km) + params.append( + 'location_radius_km', + options.location_radius_km.toString(), + ); + if (options?.max_sample_images) + params.append('max_sample_images', options.max_sample_images.toString()); const response = await axios.get( - `${API_BASE_URL}/locations${params.toString() ? '?' + params.toString() : ''}` + `${API_BASE_URL}/locations${params.toString() ? '?' + params.toString() : ''}`, ); return response.data; @@ -221,26 +243,30 @@ export const getLocations = async (options?: { */ const handleApiError = (error: unknown): ApiError => { if (axios.isAxiosError(error)) { - const axiosError = error as AxiosError<{ detail?: string; message?: string }>; - + const axiosError = error as AxiosError<{ + detail?: string; + message?: string; + }>; + return { - message: axiosError.response?.data?.message || - axiosError.response?.data?.detail || - axiosError.message || - 'An unknown error occurred', + message: + axiosError.response?.data?.message || + axiosError.response?.data?.detail || + axiosError.message || + 'An unknown error occurred', status: axiosError.response?.status, - details: axiosError.response?.statusText + details: axiosError.response?.statusText, }; } if (error instanceof Error) { return { - message: error.message + message: error.message, }; } return { - message: 'An unexpected error occurred' + message: 'An unexpected error occurred', }; }; @@ -250,7 +276,7 @@ const handleApiError = (error: unknown): ApiError => { /** * Format a date string to human-readable format - * + * * @param isoDate - ISO 8601 date string * @returns Formatted date (e.g., "November 25, 2025") */ @@ -262,7 +288,7 @@ export const formatMemoryDate = (isoDate: string | null): string => { return date.toLocaleDateString('en-US', { year: 'numeric', month: 'long', - day: 'numeric' + day: 'numeric', }); } catch { return 'Invalid date'; @@ -271,12 +297,15 @@ export const formatMemoryDate = (isoDate: string | null): string => { /** * Format date range for memory display - * + * * @param startDate - Start date ISO string * @param endDate - End date ISO string * @returns Formatted range (e.g., "Nov 25 - Nov 27, 2025") */ -export const formatDateRange = (startDate: string | null, endDate: string | null): string => { +export const formatDateRange = ( + startDate: string | null, + endDate: string | null, +): string => { if (!startDate || !endDate) return 'Unknown date'; try { @@ -288,19 +317,32 @@ export const formatDateRange = (startDate: string | null, endDate: string | null return start.toLocaleDateString('en-US', { year: 'numeric', month: 'long', - day: 'numeric' + day: 'numeric', }); } // Same month and year - if (start.getMonth() === end.getMonth() && start.getFullYear() === end.getFullYear()) { - const monthYear = start.toLocaleDateString('en-US', { month: 'long', year: 'numeric' }); + if ( + start.getMonth() === end.getMonth() && + start.getFullYear() === end.getFullYear() + ) { + const monthYear = start.toLocaleDateString('en-US', { + month: 'long', + year: 'numeric', + }); return `${start.getDate()} - ${end.getDate()}, ${monthYear}`; } // Different months or years - const startFormatted = start.toLocaleDateString('en-US', { month: 'short', day: 'numeric' }); - const endFormatted = end.toLocaleDateString('en-US', { month: 'short', day: 'numeric', year: 'numeric' }); + const startFormatted = start.toLocaleDateString('en-US', { + month: 'short', + day: 'numeric', + }); + const endFormatted = end.toLocaleDateString('en-US', { + month: 'short', + day: 'numeric', + year: 'numeric', + }); return `${startFormatted} - ${endFormatted}`; } catch { return 'Invalid date range'; @@ -309,7 +351,7 @@ export const formatDateRange = (startDate: string | null, endDate: string | null /** * Calculate years ago from a date - * + * * @param isoDate - ISO date string * @returns Number of years ago */ @@ -325,7 +367,7 @@ export const calculateYearsAgo = (isoDate: string): number => { /** * Format photo count - * + * * @param count - Number of photos * @returns Formatted string (e.g., "1 photo" or "5 photos") */ @@ -335,56 +377,64 @@ export const formatPhotoCount = (count: number): string => { /** * Format date range with relative time for recent dates - * + * * @param startDate - Start date ISO string * @param endDate - End date ISO string * @returns Formatted range with relative dates like "Yesterday", "Last week", "2 months ago" */ -export const formatDateRangeRelative = (startDate: string | null, endDate: string | null): string => { +export const formatDateRangeRelative = ( + startDate: string | null, + endDate: string | null, +): string => { if (!startDate || !endDate) return 'Unknown date'; try { const start = new Date(startDate); const end = new Date(endDate); const now = new Date(); - + // Calculate days difference from end date - const daysDiff = Math.floor((now.getTime() - end.getTime()) / (1000 * 60 * 60 * 24)); - + const daysDiff = Math.floor( + (now.getTime() - end.getTime()) / (1000 * 60 * 60 * 24), + ); + // Today if (daysDiff === 0) { return 'Today'; } - + // Yesterday if (daysDiff === 1) { return 'Yesterday'; } - + // This week (2-6 days ago) if (daysDiff >= 2 && daysDiff <= 6) { return `${daysDiff} days ago`; } - + // Last week if (daysDiff >= 7 && daysDiff <= 13) { return 'Last week'; } - + // This month (2-4 weeks ago) if (daysDiff >= 14 && daysDiff <= 30) { const weeks = Math.floor(daysDiff / 7); return `${weeks} weeks ago`; } - + // Recent months (1-12 months ago) const monthsDiff = Math.floor(daysDiff / 30); if (monthsDiff >= 1 && monthsDiff <= 11) { return monthsDiff === 1 ? 'Last month' : `${monthsDiff} months ago`; } - + // Over a year ago - show month and year - return start.toLocaleDateString('en-US', { month: 'short', year: 'numeric' }); + return start.toLocaleDateString('en-US', { + month: 'short', + year: 'numeric', + }); } catch { return formatDateRange(startDate, endDate); } @@ -393,25 +443,25 @@ export const formatDateRangeRelative = (startDate: string | null, endDate: strin /** * Generate a human-readable title from location and date * Improves ugly coordinate-based titles like "26.9333°, 75.9228° - November 2025" - * + * * @param memory - Memory object with location and date info * @returns Better title like "Weekend in Jaipur", "Jaipur Trip", or "December 2024" */ export const generateMemoryTitle = (memory: Memory): string => { const location = memory.location_name; const imageCount = memory.image_count; - + // Check if it's a date-based memory (no GPS data) if (location === 'Date-Based Memory') { // Use the title from backend which is already well-formatted for date-only memories return memory.title; } - + // If location doesn't look like coordinates, use it if (!location.includes('°') && !location.match(/^-?\d+\.\d+/)) { // Parse city name from location (e.g., "Jaipur, Rajasthan" -> "Jaipur") const cityName = location.split(',')[0].trim(); - + // Add descriptive word based on image count if (imageCount >= 50) { return `${cityName} Adventure`; @@ -423,21 +473,24 @@ export const generateMemoryTitle = (memory: Memory): string => { return `${cityName} Memories`; } } - + // Fallback: coordinates - try to make it cleaner if (memory.date_start) { const date = new Date(memory.date_start); - const monthYear = date.toLocaleDateString('en-US', { month: 'long', year: 'numeric' }); + const monthYear = date.toLocaleDateString('en-US', { + month: 'long', + year: 'numeric', + }); return `Memories from ${monthYear}`; } - + // Last resort return memory.title || 'Photo Collection'; }; /** * Format location name by removing coordinates if present - * + * * @param locationName - Raw location name from API * @returns Cleaned location name or empty string if only coordinates or date-based */ @@ -446,18 +499,21 @@ export const formatLocationName = (locationName: string): string => { if (locationName === 'Date-Based Memory') { return ''; } - + // If it looks like coordinates (contains ° or is a number pattern), hide it - if (locationName.includes('°') || locationName.match(/^-?\d+\.\d+.*-?\d+\.\d+/)) { + if ( + locationName.includes('°') || + locationName.match(/^-?\d+\.\d+.*-?\d+\.\d+/) + ) { return ''; // Hide ugly coordinates } - + return locationName; }; /** * Get thumbnail URL with fallback - * + * * @param image - Memory image object * @returns Thumbnail URL or placeholder */ @@ -466,7 +522,7 @@ export const getThumbnailUrl = (image: MemoryImage): string => { if (image.thumbnailPath) { return convertFileSrc(image.thumbnailPath); } - + // Fallback to placeholder return '/photo.png'; }; diff --git a/frontend/src/store/hooks.ts b/frontend/src/store/hooks.ts index 440bcf3f8..96fc8a456 100644 --- a/frontend/src/store/hooks.ts +++ b/frontend/src/store/hooks.ts @@ -1,6 +1,6 @@ /** * Redux Hooks - * + * * Typed hooks for use throughout the application. * These hooks ensure type safety when using Redux with TypeScript. */ diff --git a/frontend/src/store/slices/memoriesSlice.ts b/frontend/src/store/slices/memoriesSlice.ts index f3d5b509b..472255be6 100644 --- a/frontend/src/store/slices/memoriesSlice.ts +++ b/frontend/src/store/slices/memoriesSlice.ts @@ -1,6 +1,6 @@ /** * Memories Redux Slice - * + * * Manages state for the Memories feature including: * - All memories (generated from all photos) * - Recent memories (last 30 days) @@ -16,7 +16,7 @@ import { getOnThisDay, Memory, MemoryImage, - ApiError + ApiError, } from '@/services/memoriesApi'; // ============================================================================ @@ -33,10 +33,10 @@ interface MemoriesState { today: string; years: number[]; } | null; - + // Selected memory for viewer modal selectedMemory: Memory | null; - + // Loading states for each section loading: { all: boolean; @@ -44,7 +44,7 @@ interface MemoriesState { year: boolean; onThisDay: boolean; }; - + // Error states error: { all: string | null; @@ -52,7 +52,7 @@ interface MemoriesState { year: string | null; onThisDay: string | null; }; - + // Metadata lastFetched: number | null; } @@ -72,15 +72,15 @@ const initialState: MemoriesState = { all: false, recent: false, year: false, - onThisDay: false + onThisDay: false, }, error: { all: null, recent: null, year: null, - onThisDay: null + onThisDay: null, }, - lastFetched: null + lastFetched: null, }; // ============================================================================ @@ -94,18 +94,15 @@ export const fetchAllMemories = createAsyncThunk< Memory[], void, { rejectValue: string } ->( - 'memories/fetchAll', - async (_, { rejectWithValue }) => { - try { - const response = await generateMemories(); - return response.memories; - } catch (error) { - const apiError = error as ApiError; - return rejectWithValue(apiError.message); - } +>('memories/fetchAll', async (_, { rejectWithValue }) => { + try { + const response = await generateMemories(); + return response.memories; + } catch (error) { + const apiError = error as ApiError; + return rejectWithValue(apiError.message); } -); +}); /** * Fetch recent memories (last 30 days) @@ -114,18 +111,15 @@ export const fetchRecentMemories = createAsyncThunk< Memory[], number, { rejectValue: string } ->( - 'memories/fetchRecent', - async (days = 30, { rejectWithValue }) => { - try { - const response = await getTimeline(days); - return response.memories; - } catch (error) { - const apiError = error as ApiError; - return rejectWithValue(apiError.message); - } +>('memories/fetchRecent', async (days = 30, { rejectWithValue }) => { + try { + const response = await getTimeline(days); + return response.memories; + } catch (error) { + const apiError = error as ApiError; + return rejectWithValue(apiError.message); } -); +}); /** * Fetch memories from current year @@ -134,18 +128,15 @@ export const fetchYearMemories = createAsyncThunk< Memory[], number, { rejectValue: string } ->( - 'memories/fetchYear', - async (days = 365, { rejectWithValue }) => { - try { - const response = await getTimeline(days); - return response.memories; - } catch (error) { - const apiError = error as ApiError; - return rejectWithValue(apiError.message); - } +>('memories/fetchYear', async (days = 365, { rejectWithValue }) => { + try { + const response = await getTimeline(days); + return response.memories; + } catch (error) { + const apiError = error as ApiError; + return rejectWithValue(apiError.message); } -); +}); /** * Fetch "On This Day" images @@ -154,22 +145,19 @@ export const fetchOnThisDay = createAsyncThunk< { images: MemoryImage[]; today: string; years: number[] }, void, { rejectValue: string } ->( - 'memories/fetchOnThisDay', - async (_, { rejectWithValue }) => { - try { - const response = await getOnThisDay(); - return { - images: response.images, - today: response.today, - years: response.years - }; - } catch (error) { - const apiError = error as ApiError; - return rejectWithValue(apiError.message); - } +>('memories/fetchOnThisDay', async (_, { rejectWithValue }) => { + try { + const response = await getOnThisDay(); + return { + images: response.images, + today: response.today, + years: response.years, + }; + } catch (error) { + const apiError = error as ApiError; + return rejectWithValue(apiError.message); } -); +}); /** * Fetch all memories data at once (parallel requests) @@ -178,22 +166,19 @@ export const fetchAllMemoriesData = createAsyncThunk< void, void, { rejectValue: string } ->( - 'memories/fetchAllData', - async (_, { dispatch, rejectWithValue }) => { - try { - await Promise.all([ - dispatch(fetchOnThisDay()), - dispatch(fetchRecentMemories(30)), - dispatch(fetchYearMemories(365)), - dispatch(fetchAllMemories()) - ]); - } catch (error) { - const apiError = error as ApiError; - return rejectWithValue(apiError.message); - } +>('memories/fetchAllData', async (_, { dispatch, rejectWithValue }) => { + try { + await Promise.all([ + dispatch(fetchOnThisDay()), + dispatch(fetchRecentMemories(30)), + dispatch(fetchYearMemories(365)), + dispatch(fetchAllMemories()), + ]); + } catch (error) { + const apiError = error as ApiError; + return rejectWithValue(apiError.message); } -); +}); // ============================================================================ // Slice @@ -209,46 +194,46 @@ const memoriesSlice = createSlice({ setSelectedMemory: (state, action: PayloadAction) => { state.selectedMemory = action.payload; }, - + /** * Toggle favorite status of an image across all memories */ toggleImageFavorite: (state, action: PayloadAction) => { const imageId = action.payload; - + // Helper function to update image in a memory array const updateMemoriesArray = (memories: Memory[]) => { - memories.forEach(memory => { - memory.images.forEach(image => { + memories.forEach((memory) => { + memory.images.forEach((image) => { if (image.id === imageId) { image.isFavourite = !image.isFavourite; } }); }); }; - + // Update across all memory collections updateMemoriesArray(state.allMemories); updateMemoriesArray(state.recentMemories); updateMemoriesArray(state.yearMemories); - + // Update onThisDay images - state.onThisDayImages.forEach(image => { + state.onThisDayImages.forEach((image) => { if (image.id === imageId) { image.isFavourite = !image.isFavourite; } }); - + // Update selected memory if it exists if (state.selectedMemory) { - state.selectedMemory.images.forEach(image => { + state.selectedMemory.images.forEach((image) => { if (image.id === imageId) { image.isFavourite = !image.isFavourite; } }); } }, - + /** * Clear all errors */ @@ -257,16 +242,16 @@ const memoriesSlice = createSlice({ all: null, recent: null, year: null, - onThisDay: null + onThisDay: null, }; }, - + /** * Reset memories state */ resetMemories: () => { return initialState; - } + }, }, extraReducers: (builder) => { // ======================================================================== @@ -301,7 +286,8 @@ const memoriesSlice = createSlice({ }) .addCase(fetchRecentMemories.rejected, (state, action) => { state.loading.recent = false; - state.error.recent = action.payload || 'Failed to fetch recent memories'; + state.error.recent = + action.payload || 'Failed to fetch recent memories'; }); // ======================================================================== @@ -334,14 +320,14 @@ const memoriesSlice = createSlice({ state.onThisDayImages = action.payload.images; state.onThisDayMeta = { today: action.payload.today, - years: action.payload.years + years: action.payload.years, }; }) .addCase(fetchOnThisDay.rejected, (state, action) => { state.loading.onThisDay = false; state.error.onThisDay = action.payload || 'Failed to fetch On This Day'; }); - } + }, }); // ============================================================================ @@ -352,7 +338,7 @@ export const { setSelectedMemory, toggleImageFavorite, clearErrors, - resetMemories + resetMemories, } = memoriesSlice.actions; export default memoriesSlice.reducer; @@ -361,15 +347,24 @@ export default memoriesSlice.reducer; // Selectors // ============================================================================ -export const selectAllMemories = (state: { memories: MemoriesState }) => state.memories.allMemories; -export const selectRecentMemories = (state: { memories: MemoriesState }) => state.memories.recentMemories; -export const selectYearMemories = (state: { memories: MemoriesState }) => state.memories.yearMemories; -export const selectOnThisDayImages = (state: { memories: MemoriesState }) => state.memories.onThisDayImages; -export const selectOnThisDayMeta = (state: { memories: MemoriesState }) => state.memories.onThisDayMeta; -export const selectSelectedMemory = (state: { memories: MemoriesState }) => state.memories.selectedMemory; -export const selectMemoriesLoading = (state: { memories: MemoriesState }) => state.memories.loading; -export const selectMemoriesError = (state: { memories: MemoriesState }) => state.memories.error; -export const selectLastFetched = (state: { memories: MemoriesState }) => state.memories.lastFetched; +export const selectAllMemories = (state: { memories: MemoriesState }) => + state.memories.allMemories; +export const selectRecentMemories = (state: { memories: MemoriesState }) => + state.memories.recentMemories; +export const selectYearMemories = (state: { memories: MemoriesState }) => + state.memories.yearMemories; +export const selectOnThisDayImages = (state: { memories: MemoriesState }) => + state.memories.onThisDayImages; +export const selectOnThisDayMeta = (state: { memories: MemoriesState }) => + state.memories.onThisDayMeta; +export const selectSelectedMemory = (state: { memories: MemoriesState }) => + state.memories.selectedMemory; +export const selectMemoriesLoading = (state: { memories: MemoriesState }) => + state.memories.loading; +export const selectMemoriesError = (state: { memories: MemoriesState }) => + state.memories.error; +export const selectLastFetched = (state: { memories: MemoriesState }) => + state.memories.lastFetched; /** * Select total memory count across all sections From a1815ef700e5b2663a85baf56767f3ab457c56e7 Mon Sep 17 00:00:00 2001 From: harshit Date: Tue, 27 Jan 2026 01:02:55 +0530 Subject: [PATCH 12/22] fix: minor tweaks and port update to 52123 --- backend/app/routes/memories.py | 15 +++++++++++---- backend/app/utils/memory_clustering.py | 19 +++++++++++-------- backend/test_memories_api.py | 6 +++--- 3 files changed, 25 insertions(+), 15 deletions(-) diff --git a/backend/app/routes/memories.py b/backend/app/routes/memories.py index d1632952a..8e48c30bf 100644 --- a/backend/app/routes/memories.py +++ b/backend/app/routes/memories.py @@ -296,10 +296,17 @@ async def get_on_this_day(): continue # Sort by year (most recent first) - all_images.sort( - key=lambda x: (datetime.fromisoformat(x["captured_at"]) if x.get("captured_at") else datetime.min), - reverse=True, - ) + def parse_captured_at(img): + """Safely parse captured_at date, return datetime.min on failure.""" + captured_at = img.get("captured_at") + if not captured_at: + return datetime.min + try: + return datetime.fromisoformat(captured_at) + except (ValueError, TypeError): + return datetime.min + + all_images.sort(key=parse_captured_at, reverse=True) return OnThisDayResponse( success=True, diff --git a/backend/app/utils/memory_clustering.py b/backend/app/utils/memory_clustering.py index a8f942eaa..9cf19ab10 100644 --- a/backend/app/utils/memory_clustering.py +++ b/backend/app/utils/memory_clustering.py @@ -163,7 +163,7 @@ def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]] skipped_count = 0 for img in images: - has_gps = img.get("latitude") and img.get("longitude") + has_gps = img.get("latitude") is not None and img.get("longitude") is not None has_date = img.get("captured_at") if has_gps: @@ -191,7 +191,7 @@ def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]] memories.extend(date_memories) # Sort by date descending - memories.sort(key=lambda m: m.get("date_start", ""), reverse=True) + memories.sort(key=lambda m: m.get("date_start") or "", reverse=True) logger.info(f"Generated {len(memories)} total memories") return memories @@ -217,7 +217,8 @@ def _cluster_location_images(self, images: List[Dict[str, Any]]) -> List[Dict[st for temp_cluster in temporal_clusters: if len(temp_cluster) >= self.min_images_per_memory: memory = self._create_simple_memory(temp_cluster, memory_type="location") - memories.append(memory) + if memory is not None: + memories.append(memory) return memories except Exception as e: @@ -298,8 +299,8 @@ def _create_simple_memory(self, images: List[Dict[str, Any]], memory_type: str = # Simple titles if memory_type == "location": # Calculate center first - lats = [img["latitude"] for img in images if img.get("latitude")] - lons = [img["longitude"] for img in images if img.get("longitude")] + lats = [img["latitude"] for img in images if img.get("latitude") is not None] + lons = [img["longitude"] for img in images if img.get("longitude") is not None] center_lat = np.mean(lats) if lats else 0 center_lon = np.mean(lons) if lons else 0 @@ -330,8 +331,8 @@ def _create_simple_memory(self, images: List[Dict[str, Any]], memory_type: str = center_lat = 0 center_lon = 0 - # Create memory - memory_id = f"{memory_type}_{int(date_obj.timestamp())}_{len(images)}" + # Create memory - use _generate_memory_id for unique IDs + memory_id = self._generate_memory_id(center_lat, center_lon, date_obj) return { "memory_id": memory_id, @@ -553,7 +554,7 @@ def _filter_valid_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, A for img in images: try: # Check for required fields - if not img.get("latitude") or not img.get("longitude"): + if img.get("latitude") is None or img.get("longitude") is None: continue # Parse captured_at if it's a string @@ -582,6 +583,8 @@ def _filter_valid_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, A else: # Could not parse date, but location is still valid logger.debug(f"Could not parse date for image {img.get('id')}: {captured_at}") + # Clear the unparseable string to prevent downstream errors + img_copy["captured_at"] = None elif isinstance(captured_at, datetime): img_copy["captured_at"] = captured_at diff --git a/backend/test_memories_api.py b/backend/test_memories_api.py index 257481e42..778cc32a5 100644 --- a/backend/test_memories_api.py +++ b/backend/test_memories_api.py @@ -10,7 +10,7 @@ import requests import json -BASE_URL = "http://localhost:8000/api/memories" +BASE_URL = "http://localhost:52123/api/memories" def print_response(endpoint: str, response: requests.Response): @@ -98,7 +98,7 @@ def test_locations(): def check_server(): """Check if the server is running.""" try: - response = requests.get("http://localhost:8000/health", timeout=2) + response = requests.get("http://localhost:52123/health", timeout=2) if response.status_code == 200: print("Server is running") return True @@ -108,7 +108,7 @@ def check_server(): except requests.exceptions.ConnectionError: print("Server is not running") print("\nStart the server with:") - print(" cd /Users/harshit/Code/pictopy/PictoPy/backend") + print(" cd backend") print(" python main.py") return False From b65de085a19341f003ac2899a946f2e51b5da3c1 Mon Sep 17 00:00:00 2001 From: harshit Date: Tue, 27 Jan 2026 07:01:12 +0530 Subject: [PATCH 13/22] fix:improved date handling and added timeouts --- backend/app/routes/memories.py | 21 +++++++++++++++++++-- backend/app/utils/memory_clustering.py | 7 +++++-- backend/test_memories_api.py | 21 +++++++++++++++++---- 3 files changed, 41 insertions(+), 8 deletions(-) diff --git a/backend/app/routes/memories.py b/backend/app/routes/memories.py index 8e48c30bf..2d9085c50 100644 --- a/backend/app/routes/memories.py +++ b/backend/app/routes/memories.py @@ -283,8 +283,25 @@ async def get_on_this_day(): try: images = db_get_images_by_year_month(target_year, current_month) - # Filter to specific day - day_images = [img for img in images if img.get("captured_at") and datetime.fromisoformat(img["captured_at"]).day == current_day] + # Filter to specific day - parse each image defensively + day_images = [] + for img in images: + captured_at_str = img.get("captured_at") + if not captured_at_str: + continue + + try: + # Strip trailing Z and parse ISO format + captured_at_str = captured_at_str.rstrip("Z") + captured_dt = datetime.fromisoformat(captured_at_str) + + # Only include if day matches + if captured_dt.day == current_day: + day_images.append(img) + except (ValueError, TypeError, AttributeError): + # Skip images with malformed dates + logger.debug(f"Skipping image with invalid date: {captured_at_str}") + continue if day_images: all_images.extend(day_images) diff --git a/backend/app/utils/memory_clustering.py b/backend/app/utils/memory_clustering.py index 9cf19ab10..5058d3976 100644 --- a/backend/app/utils/memory_clustering.py +++ b/backend/app/utils/memory_clustering.py @@ -294,7 +294,7 @@ def _create_simple_memory(self, images: List[Dict[str, Any]], memory_type: str = date_obj = min(dates) else: date_start = date_end = None - date_obj = datetime.now() + date_obj = None # Simple titles if memory_type == "location": @@ -326,7 +326,10 @@ def _create_simple_memory(self, images: List[Dict[str, Any]], memory_type: str = title = location_name else: # Date-based: "Month Year" - title = date_obj.strftime("%B %Y") + if date_obj: + title = date_obj.strftime("%B %Y") + else: + title = "Undated Photos" location_name = "" center_lat = 0 center_lon = 0 diff --git a/backend/test_memories_api.py b/backend/test_memories_api.py index 778cc32a5..2cd6fcd13 100644 --- a/backend/test_memories_api.py +++ b/backend/test_memories_api.py @@ -11,6 +11,7 @@ import json BASE_URL = "http://localhost:52123/api/memories" +REQUEST_TIMEOUT = 10 # seconds def print_response(endpoint: str, response: requests.Response): @@ -35,7 +36,11 @@ def test_generate_memories(): """Test POST /api/memories/generate""" print("\nTesting: Generate Memories") - response = requests.post(f"{BASE_URL}/generate", params={"location_radius_km": 5.0, "date_tolerance_days": 3, "min_images": 2}) + response = requests.post( + f"{BASE_URL}/generate", + params={"location_radius_km": 5.0, "date_tolerance_days": 3, "min_images": 2}, + timeout=REQUEST_TIMEOUT, + ) print_response("POST /api/memories/generate", response) @@ -51,7 +56,11 @@ def test_timeline(): """Test GET /api/memories/timeline""" print("\nTesting: Timeline") - response = requests.get(f"{BASE_URL}/timeline", params={"days": 30, "location_radius_km": 5.0, "date_tolerance_days": 3}) + response = requests.get( + f"{BASE_URL}/timeline", + params={"days": 30, "location_radius_km": 5.0, "date_tolerance_days": 3}, + timeout=REQUEST_TIMEOUT, + ) print_response("GET /api/memories/timeline", response) @@ -66,7 +75,7 @@ def test_on_this_day(): """Test GET /api/memories/on-this-day""" print("\nTesting: On This Day") - response = requests.get(f"{BASE_URL}/on-this-day") + response = requests.get(f"{BASE_URL}/on-this-day", timeout=REQUEST_TIMEOUT) print_response("GET /api/memories/on-this-day", response) @@ -82,7 +91,11 @@ def test_locations(): """Test GET /api/memories/locations""" print("\nTesting: Locations") - response = requests.get(f"{BASE_URL}/locations", params={"location_radius_km": 5.0, "max_sample_images": 3}) + response = requests.get( + f"{BASE_URL}/locations", + params={"location_radius_km": 5.0, "max_sample_images": 3}, + timeout=REQUEST_TIMEOUT, + ) print_response("GET /api/memories/locations", response) From 38d30d829f05c354d3e6880dccc04f55f7130ca5 Mon Sep 17 00:00:00 2001 From: harshit Date: Tue, 27 Jan 2026 14:33:10 +0530 Subject: [PATCH 14/22] fix:improved zsuffix handling --- backend/app/routes/memories.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/backend/app/routes/memories.py b/backend/app/routes/memories.py index 2d9085c50..b082b36f9 100644 --- a/backend/app/routes/memories.py +++ b/backend/app/routes/memories.py @@ -319,8 +319,11 @@ def parse_captured_at(img): if not captured_at: return datetime.min try: + + if isinstance(captured_at, str): + captured_at = captured_at.rstrip("Z") return datetime.fromisoformat(captured_at) - except (ValueError, TypeError): + except (ValueError, TypeError, AttributeError): return datetime.min all_images.sort(key=parse_captured_at, reverse=True) From b999b8075053c65492ab14a11d4c6556eace1847 Mon Sep 17 00:00:00 2001 From: harshit Date: Tue, 27 Jan 2026 18:35:29 +0530 Subject: [PATCH 15/22] fix:changed async def to def --- backend/app/routes/memories.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/backend/app/routes/memories.py b/backend/app/routes/memories.py index b082b36f9..8e44a4c12 100644 --- a/backend/app/routes/memories.py +++ b/backend/app/routes/memories.py @@ -33,9 +33,9 @@ logger = get_logger(__name__) -# ============================================================================ + # Response Models -# ============================================================================ + class MemoryImage(BaseModel): @@ -113,13 +113,13 @@ class LocationsResponse(BaseModel): locations: List[LocationCluster] -# ============================================================================ + # API Endpoints -# ============================================================================ + @router.post("/generate", response_model=GenerateMemoriesResponse) -async def generate_memories( +def generate_memories( location_radius_km: float = Query(5.0, ge=0.1, le=100, description="Location clustering radius in km"), date_tolerance_days: int = Query(3, ge=1, le=30, description="Date tolerance in days"), min_images: int = Query(2, ge=1, le=10, description="Minimum images per memory"), @@ -174,12 +174,12 @@ async def generate_memories( ) except Exception as e: - logger.error(f"Error generating memories: {e}", exc_info=True) - raise HTTPException(status_code=500, detail=str(e)) + logger.error("Error generating memories", exc_info=True) + raise HTTPException(status_code=500, detail="Failed to generate memories") @router.get("/timeline", response_model=TimelineResponse) -async def get_timeline( +def get_timeline( days: int = Query(365, ge=1, le=3650, description="Number of days to look back"), location_radius_km: float = Query(5.0, ge=0.1, le=100, description="Location clustering radius in km"), date_tolerance_days: int = Query(3, ge=1, le=30, description="Date tolerance in days"), @@ -244,12 +244,12 @@ async def get_timeline( ) except Exception as e: - logger.error(f"Error getting timeline: {e}") - raise HTTPException(status_code=500, detail=f"Failed to get timeline: {str(e)}") + logger.error("Error getting timeline", exc_info=True) + raise HTTPException(status_code=500, detail="Failed to get timeline") @router.get("/on-this-day", response_model=OnThisDayResponse) -async def get_on_this_day(): +def get_on_this_day(): """ Get photos taken on this date in previous years. @@ -337,12 +337,12 @@ def parse_captured_at(img): ) except Exception as e: - logger.error(f"Error getting 'On This Day': {e}") - raise HTTPException(status_code=500, detail=f"Failed to get 'On This Day': {str(e)}") + logger.error("Error getting 'On This Day'", exc_info=True) + raise HTTPException(status_code=500, detail="Failed to get 'On This Day'") @router.get("/locations", response_model=LocationsResponse) -async def get_locations( +def get_locations( location_radius_km: float = Query(5.0, ge=0.1, le=100, description="Location clustering radius in km"), max_sample_images: int = Query(5, ge=1, le=20, description="Max sample images per location"), ): @@ -418,5 +418,5 @@ async def get_locations( return LocationsResponse(success=True, location_count=len(locations), locations=locations) except Exception as e: - logger.error(f"Error getting locations: {e}") - raise HTTPException(status_code=500, detail=f"Failed to get locations: {str(e)}") + logger.error("Error getting locations", exc_info=True) + raise HTTPException(status_code=500, detail="Failed to get locations") From 665357a1fc9563ec1284d59d362ebef177ea7bef Mon Sep 17 00:00:00 2001 From: harshit Date: Tue, 27 Jan 2026 18:52:24 +0530 Subject: [PATCH 16/22] fix:improved encapsulation and changed the date tolerance --- backend/app/routes/memories.py | 6 +++--- backend/app/utils/memory_clustering.py | 20 ++++++++++++++++++++ 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/backend/app/routes/memories.py b/backend/app/routes/memories.py index 8e44a4c12..ad3dde5fa 100644 --- a/backend/app/routes/memories.py +++ b/backend/app/routes/memories.py @@ -379,12 +379,12 @@ def get_locations( # Cluster by location only (no date clustering) clustering = MemoryClustering( location_radius_km=location_radius_km, - date_tolerance_days=999999, # Large number to group all dates together + date_tolerance_days=3, min_images_per_memory=1, ) - # Use internal method to get location clusters - location_clusters = clustering._cluster_by_location(clustering._filter_valid_images(images)) + # Get location clusters using public API + location_clusters = clustering.cluster_by_location_only(images) # Create location cluster objects locations = [] diff --git a/backend/app/utils/memory_clustering.py b/backend/app/utils/memory_clustering.py index 5058d3976..f57f589f8 100644 --- a/backend/app/utils/memory_clustering.py +++ b/backend/app/utils/memory_clustering.py @@ -200,6 +200,26 @@ def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]] logger.error(f"Clustering failed: {e}", exc_info=True) return [] + def cluster_by_location_only(self, images: List[Dict[str, Any]]) -> List[List[Dict[str, Any]]]: + """ + Public API: Cluster images by location only, without temporal grouping. + + Args: + images: List of image dictionaries with GPS coordinates + + Returns: + List of location clusters (each cluster is a list of images) + """ + try: + valid_images = self._filter_valid_images(images) + if not valid_images: + return [] + + return self._cluster_by_location(valid_images) + except Exception as e: + logger.error(f"Location-only clustering failed: {e}", exc_info=True) + return [] + def _cluster_location_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: """ SIMPLIFIED: Use existing DBSCAN clustering for GPS images. From ab28916d83f2364e2d47f66b3dc6b40837861a05 Mon Sep 17 00:00:00 2001 From: harshit Date: Tue, 27 Jan 2026 19:03:04 +0530 Subject: [PATCH 17/22] fix:docs fixfor consitency --- backend/app/utils/memory_clustering.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/app/utils/memory_clustering.py b/backend/app/utils/memory_clustering.py index f57f589f8..bf9625db0 100644 --- a/backend/app/utils/memory_clustering.py +++ b/backend/app/utils/memory_clustering.py @@ -139,7 +139,7 @@ def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]] FLEXIBLE: Cluster ALL images into memories. - Has GPS + Date: Cluster by location using DBSCAN, then by date within each location - Has GPS only: Cluster by location using DBSCAN - - Has Date only: Group by month (if ≥5 photos per month) + - Has Date only: Group by month (if ≥ min_images_per_memory photos per month; default 2) - Has neither: Skip (can't create meaningful memory) Images work with EITHER date OR location - not both required! From 8e924b5d5097fb7fa2716551f68825c143a60813 Mon Sep 17 00:00:00 2001 From: harshit Date: Tue, 27 Jan 2026 19:14:18 +0530 Subject: [PATCH 18/22] fix:type error prevention --- backend/app/utils/memory_clustering.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/app/utils/memory_clustering.py b/backend/app/utils/memory_clustering.py index bf9625db0..a12ff34a6 100644 --- a/backend/app/utils/memory_clustering.py +++ b/backend/app/utils/memory_clustering.py @@ -302,7 +302,7 @@ def _create_simple_memory(self, images: List[Dict[str, Any]], memory_type: str = cleaned_images.append(img_copy) # Sort by date - sorted_images = sorted(cleaned_images, key=lambda x: x.get("captured_at", "")) + sorted_images = sorted(cleaned_images, key=lambda x: x.get("captured_at") or "") # Get date range dates = [img.get("captured_at") for img in sorted_images if img.get("captured_at")] From 65572388339b3825440dc07c480de0901ce4320a Mon Sep 17 00:00:00 2001 From: harshit Date: Tue, 27 Jan 2026 19:27:25 +0530 Subject: [PATCH 19/22] fix:using hashlib to improve memmory_id production --- backend/app/utils/memory_clustering.py | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/backend/app/utils/memory_clustering.py b/backend/app/utils/memory_clustering.py index a12ff34a6..973f4c2b3 100644 --- a/backend/app/utils/memory_clustering.py +++ b/backend/app/utils/memory_clustering.py @@ -14,6 +14,7 @@ from datetime import datetime from typing import List, Dict, Any, Optional from collections import defaultdict +import hashlib import numpy as np from sklearn.cluster import DBSCAN @@ -538,7 +539,13 @@ def _create_date_based_memory(self, images: List[Dict[str, Any]]) -> Dict[str, A thumbnail_image_id = images[thumbnail_idx]["id"] # Create memory ID (use timestamp only) - memory_id = f"mem_date_{date_start.strftime('%Y%m%d')}" if date_start else f"mem_date_unknown_{hash(tuple(img['id'] for img in images[:5]))}" + if date_start: + memory_id = f"mem_date_{date_start.strftime('%Y%m%d')}" + else: + # Deterministic hash of first 5 image IDs + image_ids = "|".join(img["id"] for img in images[:5]) + hash_digest = hashlib.sha256(image_ids.encode()).hexdigest()[:8] + memory_id = f"mem_date_unknown_{hash_digest}" # Convert captured_at datetime objects to ISO strings serialized_images = [] @@ -858,10 +865,16 @@ def _generate_memory_id(self, latitude: float, longitude: float, date: Optional[ Returns: Unique memory ID """ - # Create hash from location and date - location_hash = hash((round(latitude, 2), round(longitude, 2))) + # Create deterministic hash from location and date + lat_rounded = round(latitude, 2) + lon_rounded = round(longitude, 2) + if date: date_str = date.strftime("%Y%m%d") - return f"mem_{date_str}_{abs(location_hash)}" + hash_input = f"lat:{lat_rounded}|lon:{lon_rounded}|date:{date_str}" + hash_digest = hashlib.sha256(hash_input.encode()).hexdigest()[:8] + return f"mem_{date_str}_{hash_digest}" else: - return f"mem_nodate_{abs(location_hash)}" + hash_input = f"lat:{lat_rounded}|lon:{lon_rounded}" + hash_digest = hashlib.sha256(hash_input.encode()).hexdigest()[:8] + return f"mem_nodate_{hash_digest}" From 99e0effad027844cf2f3e542a2ba4d6b916bb01d Mon Sep 17 00:00:00 2001 From: harshit Date: Tue, 3 Feb 2026 22:12:43 +0530 Subject: [PATCH 20/22] fix: remove invalid ignoreDeprecations from tsconfig.json --- frontend/tsconfig.json | 1 - 1 file changed, 1 deletion(-) diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json index c57466426..32cfa71fc 100644 --- a/frontend/tsconfig.json +++ b/frontend/tsconfig.json @@ -22,7 +22,6 @@ "noFallthroughCasesInSwitch": true, /* Type checking */ - "ignoreDeprecations": "6.0", "baseUrl": ".", "paths": { "@/*": ["./src/*"] From 3bb68953ba7647aa5a7296321c59a18896f1ff57 Mon Sep 17 00:00:00 2001 From: harshit Date: Tue, 3 Feb 2026 22:21:31 +0530 Subject: [PATCH 21/22] style: apply black and ruff formatting to backend --- backend/app/database/albums.py | 32 ++-- backend/app/database/face_clusters.py | 34 +++-- backend/app/database/faces.py | 54 ++++--- backend/app/database/folders.py | 40 +++-- backend/app/database/images.py | 78 ++++++---- backend/app/database/metadata.py | 10 +- backend/app/database/yolo_mapping.py | 6 +- backend/app/logging/setup_logging.py | 53 +++++-- backend/app/models/FaceDetector.py | 4 +- backend/app/models/FaceNet.py | 8 +- backend/app/models/ObjectClassifier.py | 4 +- backend/app/models/YOLO.py | 20 ++- backend/app/routes/albums.py | 56 +++++-- backend/app/routes/face_clusters.py | 15 +- backend/app/routes/folders.py | 48 ++++-- backend/app/routes/images.py | 12 +- backend/app/routes/memories.py | 68 ++++++--- backend/app/routes/user_preferences.py | 1 - backend/app/utils/API.py | 4 +- backend/app/utils/FaceNet.py | 4 +- backend/app/utils/YOLO.py | 8 +- .../app/utils/extract_location_metadata.py | 35 ++++- backend/app/utils/faceSearch.py | 4 +- backend/app/utils/face_clusters.py | 94 +++++++++--- backend/app/utils/folders.py | 22 ++- backend/app/utils/image_metadata.py | 20 ++- backend/app/utils/images.py | 72 +++++++-- backend/app/utils/memory_clustering.py | 138 +++++++++++++----- backend/app/utils/memory_monitor.py | 4 +- backend/app/utils/verify_memories_setup.py | 30 +++- backend/extract_metadata_simple.py | 35 ++++- backend/main.py | 13 +- backend/migrate_add_memories_columns.py | 40 +++-- backend/test_auto_gps_extraction.py | 6 +- backend/test_memories_api.py | 8 +- backend/tests/test_albums.py | 51 +++++-- backend/tests/test_face_clusters.py | 37 +++-- backend/tests/test_folders.py | 49 +++++-- backend/tests/test_user_preferences.py | 83 ++++++++--- 39 files changed, 935 insertions(+), 365 deletions(-) diff --git a/backend/app/database/albums.py b/backend/app/database/albums.py index f3259025b..261dfe045 100644 --- a/backend/app/database/albums.py +++ b/backend/app/database/albums.py @@ -9,8 +9,7 @@ def db_create_albums_table() -> None: try: conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS albums ( album_id TEXT PRIMARY KEY, album_name TEXT UNIQUE, @@ -18,8 +17,7 @@ def db_create_albums_table() -> None: is_hidden BOOLEAN DEFAULT 0, password_hash TEXT ) - """ - ) + """) conn.commit() finally: if conn is not None: @@ -31,8 +29,7 @@ def db_create_album_images_table() -> None: try: conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS album_images ( album_id TEXT, image_id TEXT, @@ -40,8 +37,7 @@ def db_create_album_images_table() -> None: FOREIGN KEY (album_id) REFERENCES albums(album_id) ON DELETE CASCADE, FOREIGN KEY (image_id) REFERENCES images(id) ON DELETE CASCADE ) - """ - ) + """) conn.commit() finally: if conn is not None: @@ -96,7 +92,9 @@ def db_insert_album( try: password_hash = None if password: - password_hash = bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8") + password_hash = bcrypt.hashpw( + password.encode("utf-8"), bcrypt.gensalt() + ).decode("utf-8") cursor.execute( """ INSERT INTO albums (album_id, album_name, description, is_hidden, password_hash) @@ -121,7 +119,9 @@ def db_update_album( try: if password is not None: # Update with new password - password_hash = bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8") + password_hash = bcrypt.hashpw( + password.encode("utf-8"), bcrypt.gensalt() + ).decode("utf-8") cursor.execute( """ UPDATE albums @@ -155,7 +155,9 @@ def db_get_album_images(album_id: str): conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() try: - cursor.execute("SELECT image_id FROM album_images WHERE album_id = ?", (album_id,)) + cursor.execute( + "SELECT image_id FROM album_images WHERE album_id = ?", (album_id,) + ) images = cursor.fetchall() return [img[0] for img in images] finally: @@ -166,7 +168,9 @@ def db_add_images_to_album(album_id: str, image_ids: list[str]): with get_db_connection() as conn: cursor = conn.cursor() - query = f"SELECT id FROM images WHERE id IN ({','.join('?' for _ in image_ids)})" + query = ( + f"SELECT id FROM images WHERE id IN ({','.join('?' for _ in image_ids)})" + ) cursor.execute(query, image_ids) valid_images = [row[0] for row in cursor.fetchall()] @@ -215,7 +219,9 @@ def verify_album_password(album_id: str, password: str) -> bool: conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() try: - cursor.execute("SELECT password_hash FROM albums WHERE album_id = ?", (album_id,)) + cursor.execute( + "SELECT password_hash FROM albums WHERE album_id = ?", (album_id,) + ) row = cursor.fetchone() if not row or not row[0]: return False diff --git a/backend/app/database/face_clusters.py b/backend/app/database/face_clusters.py index 8666af258..dd21804ae 100644 --- a/backend/app/database/face_clusters.py +++ b/backend/app/database/face_clusters.py @@ -24,15 +24,13 @@ def db_create_clusters_table() -> None: try: conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS face_clusters ( cluster_id TEXT PRIMARY KEY, cluster_name TEXT, face_image_base64 TEXT ) - """ - ) + """) conn.commit() finally: if conn is not None: @@ -70,7 +68,9 @@ def db_delete_all_clusters(cursor: Optional[sqlite3.Cursor] = None) -> int: conn.close() -def db_insert_clusters_batch(clusters: List[ClusterData], cursor: Optional[sqlite3.Cursor] = None) -> List[ClusterId]: +def db_insert_clusters_batch( + clusters: List[ClusterData], cursor: Optional[sqlite3.Cursor] = None +) -> List[ClusterId]: """ Insert multiple clusters into the database in batch. @@ -143,7 +143,9 @@ def db_get_cluster_by_id(cluster_id: ClusterId) -> Optional[ClusterData]: row = cursor.fetchone() if row: - return ClusterData(cluster_id=row[0], cluster_name=row[1], face_image_base64=row[2]) + return ClusterData( + cluster_id=row[0], cluster_name=row[1], face_image_base64=row[2] + ) return None finally: conn.close() @@ -160,13 +162,19 @@ def db_get_all_clusters() -> List[ClusterData]: cursor = conn.cursor() try: - cursor.execute("SELECT cluster_id, cluster_name, face_image_base64 FROM face_clusters ORDER BY cluster_id") + cursor.execute( + "SELECT cluster_id, cluster_name, face_image_base64 FROM face_clusters ORDER BY cluster_id" + ) rows = cursor.fetchall() clusters = [] for row in rows: - clusters.append(ClusterData(cluster_id=row[0], cluster_name=row[1], face_image_base64=row[2])) + clusters.append( + ClusterData( + cluster_id=row[0], cluster_name=row[1], face_image_base64=row[2] + ) + ) return clusters finally: @@ -222,7 +230,9 @@ def db_update_cluster( conn.close() -def db_get_all_clusters_with_face_counts() -> List[Dict[str, Union[str, Optional[str], int]]]: +def db_get_all_clusters_with_face_counts() -> ( + List[Dict[str, Union[str, Optional[str], int]]] +): """ Retrieve all clusters with their face counts and stored face images. @@ -233,8 +243,7 @@ def db_get_all_clusters_with_face_counts() -> List[Dict[str, Union[str, Optional cursor = conn.cursor() try: - cursor.execute( - """ + cursor.execute(""" SELECT fc.cluster_id, fc.cluster_name, @@ -244,8 +253,7 @@ def db_get_all_clusters_with_face_counts() -> List[Dict[str, Union[str, Optional LEFT JOIN faces f ON fc.cluster_id = f.cluster_id GROUP BY fc.cluster_id, fc.cluster_name, fc.face_image_base64 ORDER BY fc.cluster_id - """ - ) + """) rows = cursor.fetchall() diff --git a/backend/app/database/faces.py b/backend/app/database/faces.py index 291edfc57..07144acfa 100644 --- a/backend/app/database/faces.py +++ b/backend/app/database/faces.py @@ -32,8 +32,7 @@ def db_create_faces_table() -> None: conn = sqlite3.connect(DATABASE_PATH) conn.execute("PRAGMA foreign_keys = ON") cursor = conn.cursor() - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS faces ( face_id INTEGER PRIMARY KEY AUTOINCREMENT, image_id TEXT, @@ -44,8 +43,7 @@ def db_create_faces_table() -> None: FOREIGN KEY (image_id) REFERENCES images(id) ON DELETE CASCADE, FOREIGN KEY (cluster_id) REFERENCES face_clusters(cluster_id) ON DELETE SET NULL ) - """ - ) + """) conn.commit() finally: if conn is not None: @@ -113,18 +111,32 @@ def db_insert_face_embeddings_by_image_id( """ # Handle multiple faces in one image - if isinstance(embeddings, list) and len(embeddings) > 0 and isinstance(embeddings[0], np.ndarray): + if ( + isinstance(embeddings, list) + and len(embeddings) > 0 + and isinstance(embeddings[0], np.ndarray) + ): face_ids = [] for i, emb in enumerate(embeddings): - conf = confidence[i] if isinstance(confidence, list) and i < len(confidence) else confidence + conf = ( + confidence[i] + if isinstance(confidence, list) and i < len(confidence) + else confidence + ) bb = bbox[i] if isinstance(bbox, list) and i < len(bbox) else bbox - cid = cluster_id[i] if isinstance(cluster_id, list) and i < len(cluster_id) else cluster_id + cid = ( + cluster_id[i] + if isinstance(cluster_id, list) and i < len(cluster_id) + else cluster_id + ) face_id = db_insert_face_embeddings(image_id, emb, conf, bb, cid) face_ids.append(face_id) return face_ids else: # Single face - return db_insert_face_embeddings(image_id, embeddings, confidence, bbox, cluster_id) + return db_insert_face_embeddings( + image_id, embeddings, confidence, bbox, cluster_id + ) def get_all_face_embeddings(): @@ -132,8 +144,7 @@ def get_all_face_embeddings(): cursor = conn.cursor() try: - cursor.execute( - """ + cursor.execute(""" SELECT f.embeddings, f.bbox, @@ -148,8 +159,7 @@ def get_all_face_embeddings(): JOIN images i ON f.image_id=i.id LEFT JOIN image_classes ic ON i.id = ic.image_id LEFT JOIN mappings m ON ic.class_id = m.class_id - """ - ) + """) results = cursor.fetchall() from app.utils.images import image_util_parse_metadata @@ -229,7 +239,9 @@ def db_get_faces_unassigned_clusters() -> List[Dict[str, Union[FaceId, FaceEmbed conn.close() -def db_get_all_faces_with_cluster_names() -> List[Dict[str, Union[FaceId, FaceEmbedding, Optional[str]]]]: +def db_get_all_faces_with_cluster_names() -> ( + List[Dict[str, Union[FaceId, FaceEmbedding, Optional[str]]]] +): """ Get all faces with their corresponding cluster names. @@ -240,14 +252,12 @@ def db_get_all_faces_with_cluster_names() -> List[Dict[str, Union[FaceId, FaceEm cursor = conn.cursor() try: - cursor.execute( - """ + cursor.execute(""" SELECT f.face_id, f.embeddings, fc.cluster_name FROM faces f LEFT JOIN face_clusters fc ON f.cluster_id = fc.cluster_id ORDER BY f.face_id - """ - ) + """) rows = cursor.fetchall() @@ -337,14 +347,12 @@ def db_get_cluster_mean_embeddings() -> List[Dict[str, Union[str, FaceEmbedding] cursor = conn.cursor() try: - cursor.execute( - """ + cursor.execute(""" SELECT f.cluster_id, f.embeddings FROM faces f WHERE f.cluster_id IS NOT NULL ORDER BY f.cluster_id - """ - ) + """) rows = cursor.fetchall() @@ -369,7 +377,9 @@ def db_get_cluster_mean_embeddings() -> List[Dict[str, Union[str, FaceEmbedding] stacked_embeddings = np.stack(embeddings_list) mean_embedding = np.mean(stacked_embeddings, axis=0) - cluster_means.append({"cluster_id": cluster_id, "mean_embedding": mean_embedding}) + cluster_means.append( + {"cluster_id": cluster_id, "mean_embedding": mean_embedding} + ) return cluster_means finally: diff --git a/backend/app/database/folders.py b/backend/app/database/folders.py index b12d1f912..60bca782f 100644 --- a/backend/app/database/folders.py +++ b/backend/app/database/folders.py @@ -17,8 +17,7 @@ def db_create_folders_table() -> None: try: conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS folders ( folder_id TEXT PRIMARY KEY, parent_folder_id TEXT, @@ -28,8 +27,7 @@ def db_create_folders_table() -> None: taggingCompleted BOOLEAN, FOREIGN KEY (parent_folder_id) REFERENCES folders(folder_id) ON DELETE CASCADE ) - """ - ) + """) conn.commit() finally: if conn is not None: @@ -192,7 +190,9 @@ def db_delete_folder(folder_path: FolderPath) -> None: cursor = conn.cursor() try: abs_folder_path = os.path.abspath(folder_path) - cursor.execute("PRAGMA foreign_keys = ON;") # Important for deleting rows in image_id_mapping and images table because they reference this folder_id + cursor.execute( + "PRAGMA foreign_keys = ON;" + ) # Important for deleting rows in image_id_mapping and images table because they reference this folder_id conn.commit() cursor.execute( "SELECT folder_id FROM folders WHERE folder_path = ?", @@ -201,7 +201,9 @@ def db_delete_folder(folder_path: FolderPath) -> None: existing_folder = cursor.fetchone() if not existing_folder: - raise ValueError(f"Error: Folder '{folder_path}' does not exist in the database.") + raise ValueError( + f"Error: Folder '{folder_path}' does not exist in the database." + ) cursor.execute( "DELETE FROM folders WHERE folder_path = ?", @@ -213,7 +215,9 @@ def db_delete_folder(folder_path: FolderPath) -> None: conn.close() -def db_update_parent_ids_for_subtree(root_folder_path: FolderPath, folder_map: FolderMap) -> None: +def db_update_parent_ids_for_subtree( + root_folder_path: FolderPath, folder_map: FolderMap +) -> None: """ Update parent_folder_id for all folders in the subtree rooted at root_folder_path. Only updates folders whose parent_folder_id is NULL. @@ -246,7 +250,9 @@ def db_folder_exists(folder_path: FolderPath) -> bool: cursor = conn.cursor() try: abs_path = os.path.abspath(folder_path) - cursor.execute("SELECT folder_id FROM folders WHERE folder_path = ?", (abs_path,)) + cursor.execute( + "SELECT folder_id FROM folders WHERE folder_path = ?", (abs_path,) + ) result = cursor.fetchone() return bool(result) finally: @@ -265,14 +271,18 @@ def db_find_parent_folder_id(folder_path: FolderPath) -> Optional[FolderId]: conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() try: - cursor.execute("SELECT folder_id FROM folders WHERE folder_path = ?", (parent_path,)) + cursor.execute( + "SELECT folder_id FROM folders WHERE folder_path = ?", (parent_path,) + ) result = cursor.fetchone() return result[0] if result else None finally: conn.close() -def db_update_ai_tagging_batch(folder_ids: List[FolderId], ai_tagging_enabled: bool) -> int: +def db_update_ai_tagging_batch( + folder_ids: List[FolderId], ai_tagging_enabled: bool +) -> int: """ Update AI_Tagging status for multiple folders in a single transaction. folder_ids: list of folder IDs to update @@ -382,7 +392,9 @@ def db_get_folder_ids_by_paths( conn.close() -def db_get_all_folder_details() -> List[Tuple[str, str, Optional[str], int, bool, Optional[bool]]]: +def db_get_all_folder_details() -> ( + List[Tuple[str, str, Optional[str], int, bool, Optional[bool]]] +): """ Get all folder details including folder_id, folder_path, parent_folder_id, last_modified_time, AI_Tagging, and taggingCompleted. @@ -392,13 +404,11 @@ def db_get_all_folder_details() -> List[Tuple[str, str, Optional[str], int, bool cursor = conn.cursor() try: - cursor.execute( - """ + cursor.execute(""" SELECT folder_id, folder_path, parent_folder_id, last_modified_time, AI_Tagging, taggingCompleted FROM folders ORDER BY folder_path - """ - ) + """) return cursor.fetchall() finally: conn.close() diff --git a/backend/app/database/images.py b/backend/app/database/images.py index 742fb0987..de9e33e47 100644 --- a/backend/app/database/images.py +++ b/backend/app/database/images.py @@ -60,8 +60,7 @@ def db_create_images_table() -> None: cursor = conn.cursor() # Create new images table with merged fields including Memories feature columns - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS images ( id TEXT PRIMARY KEY, path VARCHAR UNIQUE, @@ -75,18 +74,22 @@ def db_create_images_table() -> None: captured_at DATETIME, FOREIGN KEY (folder_id) REFERENCES folders(folder_id) ON DELETE CASCADE ) - """ - ) + """) # Create indexes for Memories feature queries cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)") - cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)") - cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)") - cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)") + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)" + ) + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)" + ) + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)" + ) # Create new image_classes junction table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS image_classes ( image_id TEXT, class_id INTEGER, @@ -94,8 +97,7 @@ def db_create_images_table() -> None: FOREIGN KEY (image_id) REFERENCES images(id) ON DELETE CASCADE, FOREIGN KEY (class_id) REFERENCES mappings(class_id) ON DELETE CASCADE ) - """ - ) + """) conn.commit() conn.close() @@ -111,9 +113,13 @@ def db_migrate_add_memories_columns() -> None: try: # Check if images table exists - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='images'") + cursor.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND name='images'" + ) if not cursor.fetchone(): - logger.info("Images table does not exist yet, will be created by db_create_images_table()") + logger.info( + "Images table does not exist yet, will be created by db_create_images_table()" + ) conn.close() return @@ -140,10 +146,18 @@ def db_migrate_add_memories_columns() -> None: changes_made = True # Create indexes - cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)") - cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)") - cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)") - cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)") + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)" + ) + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)" + ) + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)" + ) + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)" + ) if changes_made: logger.info("Memories feature columns migration completed") @@ -270,7 +284,9 @@ def db_get_all_images(tagged: Union[bool, None] = None) -> List[dict]: "isFavourite": bool(is_favourite), "latitude": latitude, "longitude": longitude, - "captured_at": (captured_at if captured_at else None), # SQLite returns string + "captured_at": ( + captured_at if captured_at else None + ), # SQLite returns string "tags": [], } @@ -311,15 +327,13 @@ def db_get_untagged_images() -> List[UntaggedImageRecord]: cursor = conn.cursor() try: - cursor.execute( - """ + cursor.execute(""" SELECT i.id, i.path, i.folder_id, i.thumbnailPath, i.metadata FROM images i JOIN folders f ON i.folder_id = f.folder_id WHERE f.AI_Tagging = TRUE AND i.isTagged = FALSE - """ - ) + """) results = cursor.fetchall() @@ -509,7 +523,9 @@ def db_toggle_image_favourite_status(image_id: str) -> bool: # ============================================================================ -def db_get_images_by_date_range(start_date: datetime, end_date: datetime, include_favorites_only: bool = False) -> List[dict]: +def db_get_images_by_date_range( + start_date: datetime, end_date: datetime, include_favorites_only: bool = False +) -> List[dict]: """ Get images captured within a date range for Memories timeline. @@ -586,7 +602,9 @@ def db_get_images_by_date_range(start_date: datetime, end_date: datetime, includ conn.close() -def db_get_images_near_location(latitude: float, longitude: float, radius_km: float = 5.0) -> List[dict]: +def db_get_images_near_location( + latitude: float, longitude: float, radius_km: float = 5.0 +) -> List[dict]: """ Get images near a location within radius_km using bounding box approximation. @@ -761,8 +779,7 @@ def db_get_images_with_location() -> List[dict]: cursor = conn.cursor() try: - cursor.execute( - """ + cursor.execute(""" SELECT i.id, i.path, @@ -782,8 +799,7 @@ def db_get_images_with_location() -> List[dict]: AND i.longitude IS NOT NULL GROUP BY i.id ORDER BY i.captured_at DESC - """ - ) + """) results = cursor.fetchall() @@ -828,8 +844,7 @@ def db_get_all_images_for_memories() -> List[dict]: cursor = conn.cursor() try: - cursor.execute( - """ + cursor.execute(""" SELECT i.id, i.path, @@ -847,8 +862,7 @@ def db_get_all_images_for_memories() -> List[dict]: LEFT JOIN mappings m ON ic.class_id = m.class_id GROUP BY i.id ORDER BY i.captured_at DESC - """ - ) + """) results = cursor.fetchall() diff --git a/backend/app/database/metadata.py b/backend/app/database/metadata.py index 573cb1697..a86b64cb2 100644 --- a/backend/app/database/metadata.py +++ b/backend/app/database/metadata.py @@ -11,13 +11,11 @@ def db_create_metadata_table() -> None: try: conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS metadata ( metadata TEXT ) - """ - ) + """) # Insert initial row if table is empty cursor.execute("SELECT COUNT(*) FROM metadata") @@ -55,7 +53,9 @@ def db_get_metadata() -> Optional[Dict[str, Any]]: conn.close() -def db_update_metadata(metadata: Dict[str, Any], cursor: Optional[sqlite3.Cursor] = None) -> bool: +def db_update_metadata( + metadata: Dict[str, Any], cursor: Optional[sqlite3.Cursor] = None +) -> bool: """ Update the metadata in the database. diff --git a/backend/app/database/yolo_mapping.py b/backend/app/database/yolo_mapping.py index af5c18927..fe8402dd2 100644 --- a/backend/app/database/yolo_mapping.py +++ b/backend/app/database/yolo_mapping.py @@ -12,14 +12,12 @@ def db_create_YOLO_classes_table(): try: conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS mappings ( class_id INTEGER PRIMARY KEY, name VARCHAR NOT NULL ) - """ - ) + """) for class_id, name in enumerate(class_names): cursor.execute( "INSERT OR REPLACE INTO mappings (class_id, name) VALUES (?, ?)", diff --git a/backend/app/logging/setup_logging.py b/backend/app/logging/setup_logging.py index 3fed7a4ce..0eedecaa9 100644 --- a/backend/app/logging/setup_logging.py +++ b/backend/app/logging/setup_logging.py @@ -78,7 +78,13 @@ def format(self, record: logging.LogRecord) -> str: component_start = formatted_message.find(f"[{component_prefix}]") if component_start >= 0: component_end = component_start + len(f"[{component_prefix}]") - formatted_message = formatted_message[:component_start] + self.COLORS[component_color] + formatted_message[component_start:component_end] + self.COLORS["reset"] + formatted_message[component_end:] + formatted_message = ( + formatted_message[:component_start] + + self.COLORS[component_color] + + formatted_message[component_start:component_end] + + self.COLORS["reset"] + + formatted_message[component_end:] + ) # Add color to the log level level_color = self.level_colors.get(record.levelname, "") @@ -93,7 +99,13 @@ def format(self, record: logging.LogRecord) -> str: level_start = formatted_message.find(f" {record.levelname} ") if level_start >= 0: level_end = level_start + len(f" {record.levelname} ") - formatted_message = formatted_message[:level_start] + color_codes + formatted_message[level_start:level_end] + self.COLORS["reset"] + formatted_message[level_end:] + formatted_message = ( + formatted_message[:level_start] + + color_codes + + formatted_message[level_start:level_end] + + self.COLORS["reset"] + + formatted_message[level_end:] + ) return formatted_message @@ -105,7 +117,12 @@ def load_config() -> Dict[str, Any]: Returns: Dict containing the logging configuration """ - config_path = Path(__file__).parent.parent.parent.parent / "utils" / "logging" / "logging_config.json" + config_path = ( + Path(__file__).parent.parent.parent.parent + / "utils" + / "logging" + / "logging_config.json" + ) try: with open(config_path, "r") as f: return json.load(f) @@ -125,12 +142,16 @@ def setup_logging(component_name: str, environment: Optional[str] = None) -> Non """ config = load_config() if not config: - print("No logging configuration found. Using default settings.", file=sys.stderr) + print( + "No logging configuration found. Using default settings.", file=sys.stderr + ) return # Get environment settings if not environment: - environment = os.environ.get("ENV", config.get("default_environment", "development")) + environment = os.environ.get( + "ENV", config.get("default_environment", "development") + ) env_settings = config.get("environments", {}).get(environment, {}) log_level = getattr(logging, env_settings.get("level", "INFO"), logging.INFO) @@ -138,7 +159,9 @@ def setup_logging(component_name: str, environment: Optional[str] = None) -> Non console_logging = env_settings.get("console_logging", True) # Get component configuration - component_config = config.get("components", {}).get(component_name, {"prefix": component_name.upper(), "color": "white"}) + component_config = config.get("components", {}).get( + component_name, {"prefix": component_name.upper(), "color": "white"} + ) # Configure root logger root_logger = logging.getLogger() @@ -161,8 +184,14 @@ def setup_logging(component_name: str, environment: Optional[str] = None) -> Non console_handler.setLevel(log_level) # Create formatter with component and color information - fmt = config.get("formatters", {}).get("default", {}).get("format", "[%(component)s] | %(levelname)s | %(message)s") - formatter = ColorFormatter(fmt, component_config, config.get("colors", {}), use_colors) + fmt = ( + config.get("formatters", {}) + .get("default", {}) + .get("format", "[%(component)s] | %(levelname)s | %(message)s") + ) + formatter = ColorFormatter( + fmt, component_config, config.get("colors", {}), use_colors + ) console_handler.setFormatter(formatter) root_logger.addHandler(console_handler) @@ -240,9 +269,13 @@ def configure_uvicorn_logging(component_name: str) -> None: # Make sure the handler uses our ColorFormatter config = load_config() - component_config = config.get("components", {}).get(component_name, {"prefix": component_name.upper(), "color": "white"}) + component_config = config.get("components", {}).get( + component_name, {"prefix": component_name.upper(), "color": "white"} + ) level_colors = config.get("colors", {}) - env_settings = config.get("environments", {}).get(os.environ.get("ENV", config.get("default_environment", "development")), {}) + env_settings = config.get("environments", {}).get( + os.environ.get("ENV", config.get("default_environment", "development")), {} + ) use_colors = env_settings.get("colored_output", True) fmt = "[%(component)s] | %(module)s | %(levelname)s | %(message)s" diff --git a/backend/app/models/FaceDetector.py b/backend/app/models/FaceDetector.py index 407e81d58..9e10fd5fc 100644 --- a/backend/app/models/FaceDetector.py +++ b/backend/app/models/FaceDetector.py @@ -56,7 +56,9 @@ def detect_faces(self, image_id: str, image_path: str, forSearch: bool = False): embeddings.append(embedding) if not forSearch and embeddings: - db_insert_face_embeddings_by_image_id(image_id, embeddings, confidence=confidences, bbox=bboxes) + db_insert_face_embeddings_by_image_id( + image_id, embeddings, confidence=confidences, bbox=bboxes + ) return { "ids": f"{class_ids}", diff --git a/backend/app/models/FaceNet.py b/backend/app/models/FaceNet.py index de6ddd34f..df17d3a77 100644 --- a/backend/app/models/FaceNet.py +++ b/backend/app/models/FaceNet.py @@ -11,12 +11,16 @@ class FaceNet: def __init__(self, model_path): - self.session = onnxruntime.InferenceSession(model_path, providers=ONNX_util_get_execution_providers()) + self.session = onnxruntime.InferenceSession( + model_path, providers=ONNX_util_get_execution_providers() + ) self.input_tensor_name = self.session.get_inputs()[0].name self.output_tensor_name = self.session.get_outputs()[0].name def get_embedding(self, preprocessed_image): - result = self.session.run([self.output_tensor_name], {self.input_tensor_name: preprocessed_image})[0] + result = self.session.run( + [self.output_tensor_name], {self.input_tensor_name: preprocessed_image} + )[0] embedding = result[0] return FaceNet_util_normalize_embedding(embedding) diff --git a/backend/app/models/ObjectClassifier.py b/backend/app/models/ObjectClassifier.py index bc2dd5174..1371705b1 100644 --- a/backend/app/models/ObjectClassifier.py +++ b/backend/app/models/ObjectClassifier.py @@ -8,7 +8,9 @@ class ObjectClassifier: def __init__(self): - self.yolo_classifier = YOLO(YOLO_util_get_model_path("object"), conf_threshold=0.4, iou_threshold=0.5) + self.yolo_classifier = YOLO( + YOLO_util_get_model_path("object"), conf_threshold=0.4, iou_threshold=0.5 + ) def get_classes(self, img_path) -> list[int] | None: img = cv2.imread(img_path) diff --git a/backend/app/models/YOLO.py b/backend/app/models/YOLO.py index aaca82d11..66e55d377 100644 --- a/backend/app/models/YOLO.py +++ b/backend/app/models/YOLO.py @@ -20,7 +20,9 @@ def __init__(self, path, conf_threshold=0.7, iou_threshold=0.5): self.conf_threshold = conf_threshold self.iou_threshold = iou_threshold # Create ONNX session once and reuse it - self.session = onnxruntime.InferenceSession(self.model_path, providers=ONNX_util_get_execution_providers()) + self.session = onnxruntime.InferenceSession( + self.model_path, providers=ONNX_util_get_execution_providers() + ) # Initialize model info self.get_input_details() @@ -42,7 +44,9 @@ def detect_objects(self, image): def inference(self, input_tensor): time.perf_counter() - outputs = self.session.run(self.output_names, {self.input_names[0]: input_tensor}) + outputs = self.session.run( + self.output_names, {self.input_names[0]: input_tensor} + ) return outputs def get_input_details(self): @@ -87,10 +91,16 @@ def extract_boxes(self, predictions): return boxes def rescale_boxes(self, boxes): - input_shape = np.array([self.input_width, self.input_height, self.input_width, self.input_height]) + input_shape = np.array( + [self.input_width, self.input_height, self.input_width, self.input_height] + ) boxes = np.divide(boxes, input_shape, dtype=np.float32) - boxes *= np.array([self.img_width, self.img_height, self.img_width, self.img_height]) + boxes *= np.array( + [self.img_width, self.img_height, self.img_width, self.img_height] + ) return boxes def draw_detections(self, image, draw_scores=True, mask_alpha=0.4): - return YOLO_util_draw_detections(image, self.boxes, self.scores, self.class_ids, mask_alpha) + return YOLO_util_draw_detections( + image, self.boxes, self.scores, self.class_ids, mask_alpha + ) diff --git a/backend/app/routes/albums.py b/backend/app/routes/albums.py index 20a58c2a0..ae0408613 100644 --- a/backend/app/routes/albums.py +++ b/backend/app/routes/albums.py @@ -63,7 +63,9 @@ def create_album(body: CreateAlbumRequest): album_id = str(uuid.uuid4()) try: - db_insert_album(album_id, body.name, body.description, body.is_hidden, body.password) + db_insert_album( + album_id, body.name, body.description, body.is_hidden, body.password + ) return CreateAlbumResponse(success=True, album_id=album_id) except Exception as e: raise HTTPException( @@ -83,7 +85,9 @@ def get_album(album_id: str = Path(...)): if not album: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail=ErrorResponse(success=False, error="Album Not Found", message="Album not found").model_dump(), + detail=ErrorResponse( + success=False, error="Album Not Found", message="Album not found" + ).model_dump(), ) try: @@ -149,12 +153,16 @@ def update_album(album_id: str = Path(...), body: UpdateAlbumRequest = Body(...) ) try: - db_update_album(album_id, body.name, body.description, body.is_hidden, body.password) + db_update_album( + album_id, body.name, body.description, body.is_hidden, body.password + ) return SuccessResponse(success=True, msg="Album updated successfully") except Exception as e: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=ErrorResponse(success=False, error="Failed to Update Album", message=str(e)).model_dump(), + detail=ErrorResponse( + success=False, error="Failed to Update Album", message=str(e) + ).model_dump(), ) @@ -178,7 +186,9 @@ def delete_album(album_id: str = Path(...)): except Exception as e: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=ErrorResponse(success=False, error="Failed to Delete Album", message=str(e)).model_dump(), + detail=ErrorResponse( + success=False, error="Failed to Delete Album", message=str(e) + ).model_dump(), ) @@ -187,7 +197,9 @@ def delete_album(album_id: str = Path(...)): # GET requests do not accept a body by default. # Since we need to send a password securely, switching this to POST -- necessary. # Open to suggestions if better approach possible. -def get_album_images(album_id: str = Path(...), body: GetAlbumImagesRequest = Body(...)): +def get_album_images( + album_id: str = Path(...), body: GetAlbumImagesRequest = Body(...) +): album = db_get_album(album_id) if not album: raise HTTPException( @@ -233,7 +245,9 @@ def get_album_images(album_id: str = Path(...), body: GetAlbumImagesRequest = Bo except Exception as e: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=ErrorResponse(success=False, error="Failed to Retrieve Images", message=str(e)).model_dump(), + detail=ErrorResponse( + success=False, error="Failed to Retrieve Images", message=str(e) + ).model_dump(), ) @@ -263,11 +277,15 @@ def add_images_to_album(album_id: str = Path(...), body: ImageIdsRequest = Body( try: db_add_images_to_album(album_id, body.image_ids) - return SuccessResponse(success=True, msg=f"Added {len(body.image_ids)} images to album") + return SuccessResponse( + success=True, msg=f"Added {len(body.image_ids)} images to album" + ) except Exception as e: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=ErrorResponse(success=False, error="Failed to Add Images", message=str(e)).model_dump(), + detail=ErrorResponse( + success=False, error="Failed to Add Images", message=str(e) + ).model_dump(), ) @@ -287,17 +305,23 @@ def remove_image_from_album(album_id: str = Path(...), image_id: str = Path(...) try: db_remove_image_from_album(album_id, image_id) - return SuccessResponse(success=True, msg="Image removed from album successfully") + return SuccessResponse( + success=True, msg="Image removed from album successfully" + ) except Exception as e: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=ErrorResponse(success=False, error="Failed to Remove Image", message=str(e)).model_dump(), + detail=ErrorResponse( + success=False, error="Failed to Remove Image", message=str(e) + ).model_dump(), ) # DELETE /albums/{album_id}/images - Remove multiple images from album @router.delete("/{album_id}/images", response_model=SuccessResponse) -def remove_images_from_album(album_id: str = Path(...), body: ImageIdsRequest = Body(...)): +def remove_images_from_album( + album_id: str = Path(...), body: ImageIdsRequest = Body(...) +): album = db_get_album(album_id) if not album: raise HTTPException( @@ -321,9 +345,13 @@ def remove_images_from_album(album_id: str = Path(...), body: ImageIdsRequest = try: db_remove_images_from_album(album_id, body.image_ids) - return SuccessResponse(success=True, msg=f"Removed {len(body.image_ids)} images from album") + return SuccessResponse( + success=True, msg=f"Removed {len(body.image_ids)} images from album" + ) except Exception as e: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=ErrorResponse(success=False, error="Failed to Remove Images", message=str(e)).model_dump(), + detail=ErrorResponse( + success=False, error="Failed to Remove Images", message=str(e) + ).model_dump(), ) diff --git a/backend/app/routes/face_clusters.py b/backend/app/routes/face_clusters.py index 00282a3a3..78394df79 100644 --- a/backend/app/routes/face_clusters.py +++ b/backend/app/routes/face_clusters.py @@ -28,7 +28,6 @@ from app.schemas.images import FaceSearchRequest, InputType from app.utils.faceSearch import perform_face_search - logger = logging.getLogger(__name__) router = APIRouter() @@ -213,7 +212,9 @@ def get_cluster_images(cluster_id: str): ) def face_tagging( payload: FaceSearchRequest, - input_type: Annotated[InputType, Query(description="Choose input type: 'path' or 'base64'")] = InputType.path, + input_type: Annotated[ + InputType, Query(description="Choose input type: 'path' or 'base64'") + ] = InputType.path, ): image_path = None @@ -274,8 +275,14 @@ def face_tagging( ).model_dump(), ) - format_match = base64_data.split(";")[0].split("/")[-1] if ";" in base64_data else "jpeg" - extension = format_match if format_match in ["jpeg", "jpg", "png", "gif", "webp"] else "jpeg" + format_match = ( + base64_data.split(";")[0].split("/")[-1] if ";" in base64_data else "jpeg" + ) + extension = ( + format_match + if format_match in ["jpeg", "jpg", "png", "gif", "webp"] + else "jpeg" + ) image_id = str(uuid.uuid4())[:8] temp_dir = "temp_uploads" os.makedirs(temp_dir, exist_ok=True) diff --git a/backend/app/routes/folders.py b/backend/app/routes/folders.py index 563078316..a66cca27c 100644 --- a/backend/app/routes/folders.py +++ b/backend/app/routes/folders.py @@ -74,7 +74,9 @@ def post_folder_add_sequence(folder_path: str, folder_id: int): API_util_restart_sync_microservice_watcher() except Exception as e: - logger.error(f"Error in post processing after folder {folder_path} was added: {e}") + logger.error( + f"Error in post processing after folder {folder_path} was added: {e}" + ) return False return True @@ -94,7 +96,9 @@ def post_AI_tagging_enabled_sequence(): return True -def post_sync_folder_sequence(folder_path: str, folder_id: int, added_folders: List[Tuple[str, str]]): +def post_sync_folder_sequence( + folder_path: str, folder_id: int, added_folders: List[Tuple[str, str]] +): """ Post-sync sequence for a folder. This function is called after a folder is synced. @@ -118,7 +122,9 @@ def post_sync_folder_sequence(folder_path: str, folder_id: int, added_folders: L # Restart sync microservice watcher after processing images API_util_restart_sync_microservice_watcher() except Exception as e: - logger.error(f"Error in post processing after folder {folder_path} was synced: {e}") + logger.error( + f"Error in post processing after folder {folder_path} was synced: {e}" + ) return False return True @@ -137,7 +143,9 @@ def add_folder(request: AddFolderRequest, app_state=Depends(get_state)): # Step 1: Data Validation if not os.path.isdir(request.folder_path): - raise ValueError(f"Error: '{request.folder_path}' is not a valid directory.") + raise ValueError( + f"Error: '{request.folder_path}' is not a valid directory." + ) if ( not os.access(request.folder_path, os.R_OK) @@ -188,7 +196,9 @@ def add_folder(request: AddFolderRequest, app_state=Depends(get_state)): executor.submit(post_folder_add_sequence, request.folder_path, root_folder_id) return AddFolderResponse( - data=AddFolderData(folder_id=root_folder_id, folder_path=request.folder_path), + data=AddFolderData( + folder_id=root_folder_id, folder_path=request.folder_path + ), success=True, message=f"Successfully added folder tree starting at: {request.folder_path}", ) @@ -232,7 +242,9 @@ def enable_ai_tagging(request: UpdateAITaggingRequest, app_state=Depends(get_sta executor.submit(post_AI_tagging_enabled_sequence) return UpdateAITaggingResponse( - data=UpdateAITaggingData(updated_count=updated_count, folder_ids=request.folder_ids), + data=UpdateAITaggingData( + updated_count=updated_count, folder_ids=request.folder_ids + ), success=True, message=f"Successfully enabled AI tagging for {updated_count} folder(s)", ) @@ -271,7 +283,9 @@ def disable_ai_tagging(request: UpdateAITaggingRequest): updated_count = db_disable_ai_tagging_batch(request.folder_ids) return UpdateAITaggingResponse( - data=UpdateAITaggingData(updated_count=updated_count, folder_ids=request.folder_ids), + data=UpdateAITaggingData( + updated_count=updated_count, folder_ids=request.folder_ids + ), success=True, message=f"Successfully disabled AI tagging for {updated_count} folder(s)", ) @@ -310,7 +324,9 @@ def delete_folders(request: DeleteFoldersRequest): deleted_count = db_delete_folders_batch(request.folder_ids) return DeleteFoldersResponse( - data=DeleteFoldersData(deleted_count=deleted_count, folder_ids=request.folder_ids), + data=DeleteFoldersData( + deleted_count=deleted_count, folder_ids=request.folder_ids + ), success=True, message=f"Successfully deleted {deleted_count} folder(s)", ) @@ -345,7 +361,9 @@ def sync_folder(request: SyncFolderRequest, app_state=Depends(get_state)): try: # Step 1: Get current state from both sources db_child_folders = db_get_direct_child_folders(request.folder_id) - filesystem_folders = folder_util_get_filesystem_direct_child_folders(request.folder_path) + filesystem_folders = folder_util_get_filesystem_direct_child_folders( + request.folder_path + ) # Step 2: Compare and identify differences filesystem_folder_set = set(filesystem_folders) @@ -355,11 +373,17 @@ def sync_folder(request: SyncFolderRequest, app_state=Depends(get_state)): folders_to_add = filesystem_folder_set - db_folder_paths # Step 3: Perform synchronization operations - deleted_count, deleted_folders = folder_util_delete_obsolete_folders(db_child_folders, folders_to_delete) - added_count, added_folders_with_ids = folder_util_add_multiple_folder_trees(folders_to_add, request.folder_id) + deleted_count, deleted_folders = folder_util_delete_obsolete_folders( + db_child_folders, folders_to_delete + ) + added_count, added_folders_with_ids = folder_util_add_multiple_folder_trees( + folders_to_add, request.folder_id + ) # Extract just the paths for the API response - added_folders = [folder_path for folder_id, folder_path in added_folders_with_ids] + added_folders = [ + folder_path for folder_id, folder_path in added_folders_with_ids + ] executor: ProcessPoolExecutor = app_state.executor executor.submit( diff --git a/backend/app/routes/images.py b/backend/app/routes/images.py index eafb3afdb..2e40cd825 100644 --- a/backend/app/routes/images.py +++ b/backend/app/routes/images.py @@ -48,7 +48,9 @@ class GetAllImagesResponse(BaseModel): response_model=GetAllImagesResponse, responses={500: {"model": ErrorResponse}}, ) -def get_all_images(tagged: Optional[bool] = Query(None, description="Filter images by tagged status")): +def get_all_images( + tagged: Optional[bool] = Query(None, description="Filter images by tagged status") +): """Get all images from the database.""" try: # Get all images with tags from database (single query with optional filter) @@ -99,9 +101,13 @@ def toggle_favourite(req: ToggleFavouriteRequest): try: success = db_toggle_image_favourite_status(image_id) if not success: - raise HTTPException(status_code=404, detail="Image not found or failed to toggle") + raise HTTPException( + status_code=404, detail="Image not found or failed to toggle" + ) # Fetch updated status to return - image = next((img for img in db_get_all_images() if img["id"] == image_id), None) + image = next( + (img for img in db_get_all_images() if img["id"] == image_id), None + ) return { "success": True, "image_id": image_id, diff --git a/backend/app/routes/memories.py b/backend/app/routes/memories.py index ad3dde5fa..65f3edbc6 100644 --- a/backend/app/routes/memories.py +++ b/backend/app/routes/memories.py @@ -33,11 +33,9 @@ logger = get_logger(__name__) - # Response Models - class MemoryImage(BaseModel): """Image within a memory.""" @@ -113,15 +111,17 @@ class LocationsResponse(BaseModel): locations: List[LocationCluster] - # API Endpoints - @router.post("/generate", response_model=GenerateMemoriesResponse) def generate_memories( - location_radius_km: float = Query(5.0, ge=0.1, le=100, description="Location clustering radius in km"), - date_tolerance_days: int = Query(3, ge=1, le=30, description="Date tolerance in days"), + location_radius_km: float = Query( + 5.0, ge=0.1, le=100, description="Location clustering radius in km" + ), + date_tolerance_days: int = Query( + 3, ge=1, le=30, description="Date tolerance in days" + ), min_images: int = Query(2, ge=1, le=10, description="Minimum images per memory"), ): """ @@ -132,7 +132,9 @@ def generate_memories( Returns simple breakdown: {location_count, date_count, total} """ try: - logger.info(f"Generating memories: radius={location_radius_km}km, date_tolerance={date_tolerance_days}days, min_images={min_images}") + logger.info( + f"Generating memories: radius={location_radius_km}km, date_tolerance={date_tolerance_days}days, min_images={min_images}" + ) # Fetch ALL images from app.database.images import db_get_all_images_for_memories @@ -163,7 +165,9 @@ def generate_memories( location_count = sum(1 for m in memories if m.get("type") == "location") date_count = sum(1 for m in memories if m.get("type") == "date") - logger.info(f"Generated {len(memories)} memories (location: {location_count}, date: {date_count})") + logger.info( + f"Generated {len(memories)} memories (location: {location_count}, date: {date_count})" + ) return GenerateMemoriesResponse( success=True, @@ -173,7 +177,7 @@ def generate_memories( memories=memories, ) - except Exception as e: + except Exception: logger.error("Error generating memories", exc_info=True) raise HTTPException(status_code=500, detail="Failed to generate memories") @@ -181,8 +185,12 @@ def generate_memories( @router.get("/timeline", response_model=TimelineResponse) def get_timeline( days: int = Query(365, ge=1, le=3650, description="Number of days to look back"), - location_radius_km: float = Query(5.0, ge=0.1, le=100, description="Location clustering radius in km"), - date_tolerance_days: int = Query(3, ge=1, le=30, description="Date tolerance in days"), + location_radius_km: float = Query( + 5.0, ge=0.1, le=100, description="Location clustering radius in km" + ), + date_tolerance_days: int = Query( + 3, ge=1, le=30, description="Date tolerance in days" + ), ): """ Get memories from the past N days as a timeline. @@ -243,7 +251,7 @@ def get_timeline( memories=memories, ) - except Exception as e: + except Exception: logger.error("Error getting timeline", exc_info=True) raise HTTPException(status_code=500, detail="Failed to get timeline") @@ -289,18 +297,20 @@ def get_on_this_day(): captured_at_str = img.get("captured_at") if not captured_at_str: continue - + try: # Strip trailing Z and parse ISO format captured_at_str = captured_at_str.rstrip("Z") captured_dt = datetime.fromisoformat(captured_at_str) - + # Only include if day matches if captured_dt.day == current_day: day_images.append(img) except (ValueError, TypeError, AttributeError): # Skip images with malformed dates - logger.debug(f"Skipping image with invalid date: {captured_at_str}") + logger.debug( + f"Skipping image with invalid date: {captured_at_str}" + ) continue if day_images: @@ -319,7 +329,7 @@ def parse_captured_at(img): if not captured_at: return datetime.min try: - + if isinstance(captured_at, str): captured_at = captured_at.rstrip("Z") return datetime.fromisoformat(captured_at) @@ -336,15 +346,19 @@ def parse_captured_at(img): images=all_images, ) - except Exception as e: + except Exception: logger.error("Error getting 'On This Day'", exc_info=True) raise HTTPException(status_code=500, detail="Failed to get 'On This Day'") @router.get("/locations", response_model=LocationsResponse) def get_locations( - location_radius_km: float = Query(5.0, ge=0.1, le=100, description="Location clustering radius in km"), - max_sample_images: int = Query(5, ge=1, le=20, description="Max sample images per location"), + location_radius_km: float = Query( + 5.0, ge=0.1, le=100, description="Location clustering radius in km" + ), + max_sample_images: int = Query( + 5, ge=1, le=20, description="Max sample images per location" + ), ): """ Get all unique locations where photos were taken. @@ -379,7 +393,7 @@ def get_locations( # Cluster by location only (no date clustering) clustering = MemoryClustering( location_radius_km=location_radius_km, - date_tolerance_days=3, + date_tolerance_days=3, min_images_per_memory=1, ) @@ -393,8 +407,12 @@ def get_locations( continue # Calculate center - center_lat = sum(img["latitude"] for img in cluster_images) / len(cluster_images) - center_lon = sum(img["longitude"] for img in cluster_images) / len(cluster_images) + center_lat = sum(img["latitude"] for img in cluster_images) / len( + cluster_images + ) + center_lon = sum(img["longitude"] for img in cluster_images) / len( + cluster_images + ) # Get location name location_name = clustering._reverse_geocode(center_lat, center_lon) @@ -415,8 +433,10 @@ def get_locations( # Sort by image count (most photos first) locations.sort(key=lambda loc: loc.image_count, reverse=True) - return LocationsResponse(success=True, location_count=len(locations), locations=locations) + return LocationsResponse( + success=True, location_count=len(locations), locations=locations + ) - except Exception as e: + except Exception: logger.error("Error getting locations", exc_info=True) raise HTTPException(status_code=500, detail="Failed to get locations") diff --git a/backend/app/routes/user_preferences.py b/backend/app/routes/user_preferences.py index 3a80d4464..678e8cfc0 100644 --- a/backend/app/routes/user_preferences.py +++ b/backend/app/routes/user_preferences.py @@ -8,7 +8,6 @@ ErrorResponse, ) - router = APIRouter() diff --git a/backend/app/utils/API.py b/backend/app/utils/API.py index c734c83a5..32bb9a0fa 100644 --- a/backend/app/utils/API.py +++ b/backend/app/utils/API.py @@ -20,7 +20,9 @@ def API_util_restart_sync_microservice_watcher(): logger.info("Successfully restarted sync microservice watcher") return True else: - logger.warning(f"Failed to restart sync microservice watcher. Status code: {response.status_code}") + logger.warning( + f"Failed to restart sync microservice watcher. Status code: {response.status_code}" + ) return False except requests.exceptions.RequestException as e: diff --git a/backend/app/utils/FaceNet.py b/backend/app/utils/FaceNet.py index 7c49be31c..8a541bd21 100644 --- a/backend/app/utils/FaceNet.py +++ b/backend/app/utils/FaceNet.py @@ -18,7 +18,9 @@ def FaceNet_util_normalize_embedding(embedding): def FaceNet_util_cosine_similarity(embedding1, embedding2): - return np.dot(embedding1, embedding2) / (np.linalg.norm(embedding1) * np.linalg.norm(embedding2)) + return np.dot(embedding1, embedding2) / ( + np.linalg.norm(embedding1) * np.linalg.norm(embedding2) + ) def FaceNet_util_get_model_path(): diff --git a/backend/app/utils/YOLO.py b/backend/app/utils/YOLO.py index 056d199d4..cf63d41dc 100644 --- a/backend/app/utils/YOLO.py +++ b/backend/app/utils/YOLO.py @@ -158,7 +158,9 @@ def YOLO_util_xywh2xyxy(x): return y -def YOLO_util_draw_detections(image, boxes, scores, class_ids, mask_alpha=0.3, confidence_threshold=0.3): +def YOLO_util_draw_detections( + image, boxes, scores, class_ids, mask_alpha=0.3, confidence_threshold=0.3 +): det_img = image.copy() img_height, img_width = image.shape[:2] @@ -223,7 +225,9 @@ def YOLO_util_draw_text( ) -def YOLO_util_draw_masks(image: np.ndarray, boxes: np.ndarray, classes: np.ndarray, mask_alpha: float = 0.3) -> np.ndarray: +def YOLO_util_draw_masks( + image: np.ndarray, boxes: np.ndarray, classes: np.ndarray, mask_alpha: float = 0.3 +) -> np.ndarray: mask_img = image.copy() # Draw bounding boxes and labels of detections diff --git a/backend/app/utils/extract_location_metadata.py b/backend/app/utils/extract_location_metadata.py index 39ed04c3f..2eede996b 100644 --- a/backend/app/utils/extract_location_metadata.py +++ b/backend/app/utils/extract_location_metadata.py @@ -45,7 +45,9 @@ def __init__(self): "errors": 0, } - def extract_gps_coordinates(self, metadata: Dict[str, Any]) -> Tuple[Optional[float], Optional[float]]: + def extract_gps_coordinates( + self, metadata: Dict[str, Any] + ) -> Tuple[Optional[float], Optional[float]]: """ Extract GPS coordinates from metadata dictionary. @@ -100,7 +102,9 @@ def extract_gps_coordinates(self, metadata: Dict[str, Any]) -> Tuple[Optional[fl latitude = lat longitude = lon else: - logger.warning(f"Invalid coordinate range: lat={lat}, lon={lon}") + logger.warning( + f"Invalid coordinate range: lat={lat}, lon={lon}" + ) except (ValueError, TypeError) as e: logger.warning(f"Could not convert coordinates to float: {e}") @@ -147,7 +151,12 @@ def extract_datetime(self, metadata: Dict[str, Any]) -> Optional[datetime]: if not date_str: exif = metadata.get("exif", {}) if isinstance(exif, dict): - date_str = exif.get("datetime") or exif.get("DateTime") or exif.get("DateTimeOriginal") or exif.get("DateTimeDigitized") + date_str = ( + exif.get("datetime") + or exif.get("DateTime") + or exif.get("DateTimeOriginal") + or exif.get("DateTimeDigitized") + ) # Parse datetime string if date_str: @@ -170,7 +179,9 @@ def extract_datetime(self, metadata: Dict[str, Any]) -> Optional[datetime]: if "T" in date_str: try: # Remove timezone suffix for simpler parsing - date_str_clean = date_str.replace("Z", "").split("+")[0].split("-") + date_str_clean = ( + date_str.replace("Z", "").split("+")[0].split("-") + ) # Rejoin only date-time parts (not timezone) if len(date_str_clean) >= 3: date_str_clean = "-".join(date_str_clean[:3]) @@ -195,7 +206,9 @@ def extract_datetime(self, metadata: Dict[str, Any]) -> Optional[datetime]: return captured_at - def extract_all(self, metadata_json: str) -> Tuple[Optional[float], Optional[float], Optional[datetime]]: + def extract_all( + self, metadata_json: str + ) -> Tuple[Optional[float], Optional[float], Optional[datetime]]: """ Extract GPS coordinates and datetime from metadata JSON string. @@ -348,9 +361,15 @@ def _print_summary(self): logger.info("=" * 70) logger.info(f"Total images processed: {self.stats['total']}") logger.info(f"Images updated: {self.stats['updated']}") - logger.info(f"Images with location data: {self.stats['with_location']} ({self._percentage('with_location')}%)") - logger.info(f"Images with datetime: {self.stats['with_datetime']} ({self._percentage('with_datetime')}%)") - logger.info(f"Images with both: {self.stats['with_both']} ({self._percentage('with_both')}%)") + logger.info( + f"Images with location data: {self.stats['with_location']} ({self._percentage('with_location')}%)" + ) + logger.info( + f"Images with datetime: {self.stats['with_datetime']} ({self._percentage('with_datetime')}%)" + ) + logger.info( + f"Images with both: {self.stats['with_both']} ({self._percentage('with_both')}%)" + ) logger.info(f"Images skipped (no data): {self.stats['skipped']}") logger.info(f"Errors encountered: {self.stats['errors']}") logger.info("=" * 70) diff --git a/backend/app/utils/faceSearch.py b/backend/app/utils/faceSearch.py index 3222ba768..385cce908 100644 --- a/backend/app/utils/faceSearch.py +++ b/backend/app/utils/faceSearch.py @@ -76,7 +76,9 @@ def perform_face_search(image_path: str) -> GetAllImagesResponse: ) for image in images: - similarity = FaceNet_util_cosine_similarity(new_embedding, image["embeddings"]) + similarity = FaceNet_util_cosine_similarity( + new_embedding, image["embeddings"] + ) if similarity >= CONFIDENCE_PERCENT: matches.append( ImageData( diff --git a/backend/app/utils/face_clusters.py b/backend/app/utils/face_clusters.py index 78281a5d8..4c373c981 100644 --- a/backend/app/utils/face_clusters.py +++ b/backend/app/utils/face_clusters.py @@ -141,9 +141,13 @@ def cluster_util_face_clusters_sync(force_full_reclustering: bool = False): face_image_base64 = _generate_cluster_face_image(cluster_id, cursor) if face_image_base64: # Update the cluster with the generated face image - success = _update_cluster_face_image(cluster_id, face_image_base64, cursor) + success = _update_cluster_face_image( + cluster_id, face_image_base64, cursor + ) if not success: - raise RuntimeError(f"Failed to update face image for cluster {cluster_id}") + raise RuntimeError( + f"Failed to update face image for cluster {cluster_id}" + ) # Update metadata with new reclustering time, preserving other values current_metadata = metadata or {} @@ -221,7 +225,9 @@ def cluster_util_cluster_all_face_embeddings( existing_cluster_names.append(face["cluster_name"]) else: invalid_count += 1 - logger.warning(f"Skipping invalid embedding for face_id {face['face_id']} (NaN or zero vector)") + logger.warning( + f"Skipping invalid embedding for face_id {face['face_id']} (NaN or zero vector)" + ) if invalid_count > 0: logger.warning(f"Filtered out {invalid_count} invalid embeddings") @@ -240,14 +246,18 @@ def cluster_util_cluster_all_face_embeddings( # Guard against NaN distances (shouldn't happen after validation, but double-check) if not np.isfinite(distances).all(): - logger.error("NaN or infinite values detected in distance matrix after validation") + logger.error( + "NaN or infinite values detected in distance matrix after validation" + ) # Replace NaN/inf with max distance (1.0) distances = np.nan_to_num(distances, nan=1.0, posinf=1.0, neginf=1.0) # Apply similarity threshold - mark dissimilar faces as completely different max_distance = 1 - similarity_threshold # Convert similarity to distance distances[distances > max_distance] = 1.0 # Mark as completely different - logger.info(f"Applied similarity threshold: {similarity_threshold} (max_distance: {max_distance:.3f})") + logger.info( + f"Applied similarity threshold: {similarity_threshold} (max_distance: {max_distance:.3f})" + ) # Perform DBSCAN clustering with precomputed distances dbscan = DBSCAN( @@ -258,7 +268,9 @@ def cluster_util_cluster_all_face_embeddings( ) cluster_labels = dbscan.fit_predict(distances) - logger.info(f"DBSCAN found {len(set(cluster_labels)) - (1 if -1 in cluster_labels else 0)} clusters") + logger.info( + f"DBSCAN found {len(set(cluster_labels)) - (1 if -1 in cluster_labels else 0)} clusters" + ) # Group faces by cluster labels clusters = defaultdict(list) @@ -296,7 +308,9 @@ def cluster_util_cluster_all_face_embeddings( # Post-clustering merge: merge similar clusters based on representative faces # Use similarity_threshold if merge_threshold not explicitly provided effective_merge_threshold = merge_threshold if merge_threshold is not None else 0.7 - results = _merge_similar_clusters(results, merge_threshold=effective_merge_threshold) + results = _merge_similar_clusters( + results, merge_threshold=effective_merge_threshold + ) return results @@ -347,7 +361,9 @@ def cluster_util_assign_cluster_to_faces_without_clusterId( mean_embeddings.append(mean_emb) else: invalid_clusters += 1 - logger.warning(f"Skipping invalid cluster mean for cluster_id {cluster_data['cluster_id']}") + logger.warning( + f"Skipping invalid cluster mean for cluster_id {cluster_data['cluster_id']}" + ) if invalid_clusters > 0: logger.warning(f"Filtered out {invalid_clusters} invalid cluster means") @@ -390,15 +406,21 @@ def cluster_util_assign_cluster_to_faces_without_clusterId( nearest_cluster_idx = np.argmin(distances) nearest_cluster_id = cluster_ids[nearest_cluster_idx] - face_cluster_mappings.append({"face_id": face_id, "cluster_id": nearest_cluster_id}) + face_cluster_mappings.append( + {"face_id": face_id, "cluster_id": nearest_cluster_id} + ) if skipped_invalid > 0: - logger.warning(f"Skipped {skipped_invalid} faces with invalid embeddings during assignment") + logger.warning( + f"Skipped {skipped_invalid} faces with invalid embeddings during assignment" + ) return face_cluster_mappings -def _merge_similar_clusters(results: List[ClusterResult], merge_threshold: float = 0.85) -> List[ClusterResult]: +def _merge_similar_clusters( + results: List[ClusterResult], merge_threshold: float = 0.85 +) -> List[ClusterResult]: """ Merge clusters that are too similar based on their mean embeddings. @@ -433,7 +455,9 @@ def _merge_similar_clusters(results: List[ClusterResult], merge_threshold: float cluster_means[cluster_uuid] = mean_embedding else: invalid_clusters.append(cluster_uuid) - logger.warning(f"Cluster {cluster_uuid} has invalid mean embedding, excluding from merge") + logger.warning( + f"Cluster {cluster_uuid} has invalid mean embedding, excluding from merge" + ) # Remove invalid clusters from consideration for invalid_uuid in invalid_clusters: @@ -463,13 +487,17 @@ def _merge_similar_clusters(results: List[ClusterResult], merge_threshold: float # Guard against NaN similarity if not np.isfinite(similarity): - logger.warning(f"NaN similarity between clusters {uuid1} and {uuid2}, skipping merge") + logger.warning( + f"NaN similarity between clusters {uuid1} and {uuid2}, skipping merge" + ) continue # If very similar, merge cluster2 into cluster1 if similarity >= merge_threshold: merge_mapping[uuid2] = uuid1 - logger.info(f"Merging cluster {uuid2} into {uuid1} (similarity: {similarity:.3f})") + logger.info( + f"Merging cluster {uuid2} into {uuid1} (similarity: {similarity:.3f})" + ) # Apply merges if merge_mapping: @@ -517,13 +545,17 @@ def resolve_final_cluster(uuid): for result in merged_results: result.cluster_name = final_cluster_names.get(result.cluster_uuid) - logger.info(f"Merged {len(merge_mapping)} clusters. Final count: {len(set(r.cluster_uuid for r in merged_results))}") + logger.info( + f"Merged {len(merge_mapping)} clusters. Final count: {len(set(r.cluster_uuid for r in merged_results))}" + ) return merged_results return results -def _calculate_cosine_distances(face_embedding: NDArray, cluster_means: NDArray) -> NDArray: +def _calculate_cosine_distances( + face_embedding: NDArray, cluster_means: NDArray +) -> NDArray: """ Calculate cosine distances between a face embedding and cluster means. Handles edge cases with zero vectors and ensures finite results. @@ -544,7 +576,9 @@ def _calculate_cosine_distances(face_embedding: NDArray, cluster_means: NDArray) # Normalize cluster means with safe division cluster_norm_values = np.linalg.norm(cluster_means, axis=1, keepdims=True) - cluster_norm_values = np.maximum(cluster_norm_values, 1e-6) # Prevent division by zero + cluster_norm_values = np.maximum( + cluster_norm_values, 1e-6 + ) # Prevent division by zero cluster_norms = cluster_means / cluster_norm_values # Calculate cosine similarities (dot product of normalized vectors) @@ -559,7 +593,9 @@ def _calculate_cosine_distances(face_embedding: NDArray, cluster_means: NDArray) return cosine_distances -def _update_cluster_face_image(cluster_id: str, face_image_base64: str, cursor: Optional[sqlite3.Cursor] = None) -> bool: +def _update_cluster_face_image( + cluster_id: str, face_image_base64: str, cursor: Optional[sqlite3.Cursor] = None +) -> bool: """ Update the face image for a specific cluster. @@ -597,7 +633,9 @@ def _update_cluster_face_image(cluster_id: str, face_image_base64: str, cursor: conn.close() -def _get_cluster_face_data(cluster_uuid: str, cursor: sqlite3.Cursor) -> Optional[tuple]: +def _get_cluster_face_data( + cluster_uuid: str, cursor: sqlite3.Cursor +) -> Optional[tuple]: """ Get the image path and bounding box for the first face in a cluster. @@ -640,7 +678,9 @@ def _get_cluster_face_data(cluster_uuid: str, cursor: sqlite3.Cursor) -> Optiona return None -def _calculate_square_crop_bounds(bbox: Dict, img_shape: tuple, padding: int = 50) -> tuple: +def _calculate_square_crop_bounds( + bbox: Dict, img_shape: tuple, padding: int = 50 +) -> tuple: """ Calculate square crop bounds centered on a face bounding box. @@ -703,7 +743,9 @@ def _calculate_square_crop_bounds(bbox: Dict, img_shape: tuple, padding: int = 5 return (square_x_start, square_y_start, square_x_end, square_y_end) -def _crop_and_resize_face(img: np.ndarray, crop_bounds: tuple, target_size: int = 300) -> Optional[np.ndarray]: +def _crop_and_resize_face( + img: np.ndarray, crop_bounds: tuple, target_size: int = 300 +) -> Optional[np.ndarray]: """ Crop and resize a face region from an image. @@ -753,7 +795,9 @@ def _encode_image_to_base64(img: np.ndarray, format: str = ".jpg") -> Optional[s return None -def _generate_cluster_face_image(cluster_uuid: str, cursor: sqlite3.Cursor) -> Optional[str]: +def _generate_cluster_face_image( + cluster_uuid: str, cursor: sqlite3.Cursor +) -> Optional[str]: """ Generate a base64 encoded face image for a cluster. @@ -804,7 +848,11 @@ def _determine_cluster_name(faces_in_cluster: List[Dict]) -> Optional[str]: Most common non-null cluster name, or None if no named clusters exist """ # Extract non-null cluster names - existing_names = [face["existing_cluster_name"] for face in faces_in_cluster if face["existing_cluster_name"] is not None] + existing_names = [ + face["existing_cluster_name"] + for face in faces_in_cluster + if face["existing_cluster_name"] is not None + ] if not existing_names: return None diff --git a/backend/app/utils/folders.py b/backend/app/utils/folders.py index b4596887c..ec014f479 100644 --- a/backend/app/utils/folders.py +++ b/backend/app/utils/folders.py @@ -14,7 +14,9 @@ logger = get_logger(__name__) -def folder_util_add_folder_tree(root_path, parent_folder_id=None, AI_Tagging=False, taggingCompleted=None): +def folder_util_add_folder_tree( + root_path, parent_folder_id=None, AI_Tagging=False, taggingCompleted=None +): """ Recursively collect folder data and insert all folders in a single database transaction. All folders are initially inserted with NULL parent_id, which is updated after insertion. @@ -33,7 +35,9 @@ def folder_util_add_folder_tree(root_path, parent_folder_id=None, AI_Tagging=Fal parent_id = parent_folder_id else: parent_path = os.path.dirname(dirpath) - parent_id = folder_map[parent_path][0] if parent_path in folder_map else None + parent_id = ( + folder_map[parent_path][0] if parent_path in folder_map else None + ) # Store both folder_id and parent_id in the map folder_map[dirpath] = (this_folder_id, parent_id) @@ -99,7 +103,9 @@ def folder_util_get_filesystem_direct_child_folders(folder_path: str) -> List[st ) -def folder_util_delete_obsolete_folders(db_child_folders: List[Tuple[str, str]], folders_to_delete: set) -> Tuple[int, List[str]]: +def folder_util_delete_obsolete_folders( + db_child_folders: List[Tuple[str, str]], folders_to_delete: set +) -> Tuple[int, List[str]]: """ Delete folders from the database that are no longer present in the filesystem. @@ -114,7 +120,11 @@ def folder_util_delete_obsolete_folders(db_child_folders: List[Tuple[str, str]], return 0, [] # Get the folder IDs for the folders to delete - folder_ids_to_delete = [folder_id for folder_id, folder_path in db_child_folders if folder_path in folders_to_delete] + folder_ids_to_delete = [ + folder_id + for folder_id, folder_path in db_child_folders + if folder_path in folders_to_delete + ] if folder_ids_to_delete: deleted_count = db_delete_folders_batch(folder_ids_to_delete) @@ -123,7 +133,9 @@ def folder_util_delete_obsolete_folders(db_child_folders: List[Tuple[str, str]], return 0, [] -def folder_util_add_multiple_folder_trees(folders_to_add: set, parent_folder_id: str) -> Tuple[int, List[Tuple[str, str]]]: +def folder_util_add_multiple_folder_trees( + folders_to_add: set, parent_folder_id: str +) -> Tuple[int, List[Tuple[str, str]]]: """ Add multiple folder trees with same parent to the database. diff --git a/backend/app/utils/image_metadata.py b/backend/app/utils/image_metadata.py index b62ffa3ec..c5a91d3e6 100644 --- a/backend/app/utils/image_metadata.py +++ b/backend/app/utils/image_metadata.py @@ -32,7 +32,9 @@ def extract_metadata(image_path): tag = TAGS.get(tag_id, tag_id) data = exifdata.get(tag_id) if isinstance(data, (tuple, list)): - data = [float(d) if isinstance(d, IFDRational) else d for d in data] + data = [ + float(d) if isinstance(d, IFDRational) else d for d in data + ] elif isinstance(data, IFDRational): data = float(data) @@ -44,7 +46,9 @@ def extract_metadata(image_path): metadata[str(tag).lower().replace(" ", "_")] = data except Exception as exif_error: - logger.warning(f"Failed to extract EXIF data from {image_path}. Error: {exif_error}") + logger.warning( + f"Failed to extract EXIF data from {image_path}. Error: {exif_error}" + ) except FileNotFoundError: raise # Re-raise if file is not found @@ -59,12 +63,18 @@ def extract_metadata(image_path): try: metadata["file_size"] = os.path.getsize(image_path) except OSError as file_error: - logger.warning(f"Could not retrieve file size for {image_path}. Error: {file_error}") + logger.warning( + f"Could not retrieve file size for {image_path}. Error: {file_error}" + ) # Image creation date try: creation_time = os.path.getctime(image_path) - metadata["creation_date"] = datetime.fromtimestamp(creation_time).strftime("%Y-%m-%d %H:%M:%S") + metadata["creation_date"] = datetime.fromtimestamp(creation_time).strftime( + "%Y-%m-%d %H:%M:%S" + ) except OSError as time_error: - logger.warning(f"Could not retrieve creation date for {image_path}. Error: {time_error}") + logger.warning( + f"Could not retrieve creation date for {image_path}. Error: {time_error}" + ) return metadata diff --git a/backend/app/utils/images.py b/backend/app/utils/images.py index 6f10d5ab6..ccf65cdf8 100644 --- a/backend/app/utils/images.py +++ b/backend/app/utils/images.py @@ -62,7 +62,9 @@ def image_util_process_folder_images(folder_data: List[Tuple[str, int, bool]]) - folder_path_to_id = {os.path.abspath(folder_path): folder_id} # Step 3: Prepare image records for this folder - folder_image_records = image_util_prepare_image_records(image_files, folder_path_to_id) + folder_image_records = image_util_prepare_image_records( + image_files, folder_path_to_id + ) all_image_records.extend(folder_image_records) except Exception as e: @@ -135,7 +137,9 @@ def image_util_classify_and_face_detect_images( face_detector.close() -def image_util_prepare_image_records(image_files: List[str], folder_path_to_id: Dict[str, int]) -> List[Dict]: +def image_util_prepare_image_records( + image_files: List[str], folder_path_to_id: Dict[str, int] +) -> List[Dict]: """ Prepare image records with thumbnails for database insertion. Automatically extracts GPS coordinates and capture datetime from metadata. @@ -158,7 +162,9 @@ def image_util_prepare_image_records(image_files: List[str], folder_path_to_id: image_id = str(uuid.uuid4()) thumbnail_name = f"thumbnail_{image_id}.jpg" - thumbnail_path = os.path.abspath(os.path.join(THUMBNAIL_IMAGES_PATH, thumbnail_name)) + thumbnail_path = os.path.abspath( + os.path.join(THUMBNAIL_IMAGES_PATH, thumbnail_name) + ) # Generate thumbnail if image_util_generate_thumbnail(image_path, thumbnail_path): @@ -175,11 +181,17 @@ def image_util_prepare_image_records(image_files: List[str], folder_path_to_id: # Log GPS extraction results if latitude and longitude: - logger.info(f"GPS extracted for {os.path.basename(image_path)}: ({latitude}, {longitude})") + logger.info( + f"GPS extracted for {os.path.basename(image_path)}: ({latitude}, {longitude})" + ) if captured_at: - logger.debug(f"Date extracted for {os.path.basename(image_path)}: {captured_at}") + logger.debug( + f"Date extracted for {os.path.basename(image_path)}: {captured_at}" + ) except Exception as e: - logger.warning(f"GPS extraction failed for {os.path.basename(image_path)}: {e}") + logger.warning( + f"GPS extraction failed for {os.path.basename(image_path)}: {e}" + ) # Continue without GPS - don't fail the upload # Build image record with GPS data @@ -194,7 +206,11 @@ def image_util_prepare_image_records(image_files: List[str], folder_path_to_id: "isTagged": False, "latitude": latitude, # Can be None "longitude": longitude, # Can be None - "captured_at": (captured_at.isoformat() if isinstance(captured_at, datetime.datetime) and captured_at else captured_at), # Can be None + "captured_at": ( + captured_at.isoformat() + if isinstance(captured_at, datetime.datetime) and captured_at + else captured_at + ), # Can be None } image_records.append(image_record) @@ -202,7 +218,9 @@ def image_util_prepare_image_records(image_files: List[str], folder_path_to_id: return image_records -def image_util_get_images_from_folder(folder_path: str, recursive: bool = True) -> List[str]: +def image_util_get_images_from_folder( + folder_path: str, recursive: bool = True +) -> List[str]: """Get all image files from a folder. Args: @@ -234,7 +252,9 @@ def image_util_get_images_from_folder(folder_path: str, recursive: bool = True) return image_files -def image_util_generate_thumbnail(image_path: str, thumbnail_path: str, size: Tuple[int, int] = (600, 600)) -> bool: +def image_util_generate_thumbnail( + image_path: str, thumbnail_path: str, size: Tuple[int, int] = (600, 600) +) -> bool: """Generate thumbnail for a single image.""" try: with Image.open(image_path) as img: @@ -301,7 +321,9 @@ def image_util_create_folder_path_mapping( return folder_path_to_id -def image_util_find_folder_id_for_image(image_path: str, folder_path_to_id: Dict[str, int]) -> int: +def image_util_find_folder_id_for_image( + image_path: str, folder_path_to_id: Dict[str, int] +) -> int: """ Find the most specific folder ID for a given image path. @@ -345,7 +367,11 @@ def _convert_to_degrees(value): """Converts a GPS coordinate value from DMS to decimal degrees.""" def to_float(v): - return float(v.numerator) / float(v.denominator) if hasattr(v, "numerator") else float(v) + return ( + float(v.numerator) / float(v.denominator) + if hasattr(v, "numerator") + else float(v) + ) d, m, s = (to_float(v) for v in value[:3]) return d + (m / 60.0) + (s / 3600.0) @@ -426,7 +452,11 @@ def image_util_extract_metadata(image_path: str) -> dict: # Robust EXIF extraction with safe fallback try: - exif_data = img.getexif() if hasattr(img, "getexif") else getattr(img, "_getexif", lambda: None)() + exif_data = ( + img.getexif() + if hasattr(img, "getexif") + else getattr(img, "_getexif", lambda: None)() + ) except Exception: exif_data = None @@ -436,7 +466,11 @@ def image_util_extract_metadata(image_path: str) -> dict: for k, v in exif.items(): if ExifTags.TAGS.get(k) == "DateTimeOriginal": - dt_original = v.decode("utf-8", "ignore") if isinstance(v, (bytes, bytearray)) else str(v) + dt_original = ( + v.decode("utf-8", "ignore") + if isinstance(v, (bytes, bytearray)) + else str(v) + ) break # Safe parse; fall back to mtime without losing width/height @@ -447,9 +481,13 @@ def image_util_extract_metadata(image_path: str) -> dict: "%Y:%m:%d %H:%M:%S", ).isoformat() except ValueError: - date_created = datetime.datetime.fromtimestamp(stats.st_mtime).isoformat() + date_created = datetime.datetime.fromtimestamp( + stats.st_mtime + ).isoformat() else: - date_created = datetime.datetime.fromtimestamp(stats.st_mtime).isoformat() + date_created = datetime.datetime.fromtimestamp( + stats.st_mtime + ).isoformat() metadata_dict = { "name": os.path.basename(image_path), @@ -471,7 +509,9 @@ def image_util_extract_metadata(image_path: str) -> dict: logger.error(f"Pillow could not open image {image_path}: {e}") return { "name": os.path.basename(image_path), - "date_created": datetime.datetime.fromtimestamp(stats.st_mtime).isoformat(), + "date_created": datetime.datetime.fromtimestamp( + stats.st_mtime + ).isoformat(), "file_location": image_path, "file_size": stats.st_size, "width": 0, diff --git a/backend/app/utils/memory_clustering.py b/backend/app/utils/memory_clustering.py index 973f4c2b3..79928968b 100644 --- a/backend/app/utils/memory_clustering.py +++ b/backend/app/utils/memory_clustering.py @@ -66,7 +66,9 @@ } -def find_nearest_city(latitude: float, longitude: float, max_distance_km: float = 50.0) -> Optional[str]: +def find_nearest_city( + latitude: float, longitude: float, max_distance_km: float = 50.0 +) -> Optional[str]: """ Find the nearest known city to given coordinates. @@ -133,7 +135,9 @@ def __init__( EARTH_RADIUS_KM = 6371.0 self.location_eps_radians = location_radius_km / EARTH_RADIUS_KM - logger.info(f"MemoryClustering initialized: radius={location_radius_km}km, date_tolerance={date_tolerance_days}days, min_images={min_images_per_memory}") + logger.info( + f"MemoryClustering initialized: radius={location_radius_km}km, date_tolerance={date_tolerance_days}days, min_images={min_images_per_memory}" + ) def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: """ @@ -164,7 +168,9 @@ def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]] skipped_count = 0 for img in images: - has_gps = img.get("latitude") is not None and img.get("longitude") is not None + has_gps = ( + img.get("latitude") is not None and img.get("longitude") is not None + ) has_date = img.get("captured_at") if has_gps: @@ -177,7 +183,9 @@ def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]] # Has neither GPS nor date → skip skipped_count += 1 - logger.info(f"GPS-based: {len(gps_images)}, Date-only: {len(date_only_images)}, Skipped: {skipped_count}") + logger.info( + f"GPS-based: {len(gps_images)}, Date-only: {len(date_only_images)}, Skipped: {skipped_count}" + ) memories = [] @@ -201,13 +209,15 @@ def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]] logger.error(f"Clustering failed: {e}", exc_info=True) return [] - def cluster_by_location_only(self, images: List[Dict[str, Any]]) -> List[List[Dict[str, Any]]]: + def cluster_by_location_only( + self, images: List[Dict[str, Any]] + ) -> List[List[Dict[str, Any]]]: """ Public API: Cluster images by location only, without temporal grouping. - + Args: images: List of image dictionaries with GPS coordinates - + Returns: List of location clusters (each cluster is a list of images) """ @@ -215,13 +225,15 @@ def cluster_by_location_only(self, images: List[Dict[str, Any]]) -> List[List[Di valid_images = self._filter_valid_images(images) if not valid_images: return [] - + return self._cluster_by_location(valid_images) except Exception as e: logger.error(f"Location-only clustering failed: {e}", exc_info=True) return [] - def _cluster_location_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + def _cluster_location_images( + self, images: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: """ SIMPLIFIED: Use existing DBSCAN clustering for GPS images. """ @@ -237,7 +249,9 @@ def _cluster_location_images(self, images: List[Dict[str, Any]]) -> List[Dict[st temporal_clusters = self._cluster_by_date(cluster) for temp_cluster in temporal_clusters: if len(temp_cluster) >= self.min_images_per_memory: - memory = self._create_simple_memory(temp_cluster, memory_type="location") + memory = self._create_simple_memory( + temp_cluster, memory_type="location" + ) if memory is not None: memories.append(memory) @@ -246,7 +260,9 @@ def _cluster_location_images(self, images: List[Dict[str, Any]]) -> List[Dict[st logger.error(f"Location clustering failed: {e}") return [] - def _cluster_date_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + def _cluster_date_images( + self, images: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: """ FLEXIBLE: Group date-only images by year-month. Uses min_images_per_memory (default: 2) as threshold. @@ -279,7 +295,9 @@ def _cluster_date_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, A memories = [] for month_key, month_images in monthly_groups.items(): if len(month_images) >= self.min_images_per_memory: - memory = self._create_simple_memory(month_images, memory_type="date") + memory = self._create_simple_memory( + month_images, memory_type="date" + ) if memory: memories.append(memory) @@ -288,7 +306,9 @@ def _cluster_date_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, A logger.error(f"Date clustering failed: {e}") return [] - def _create_simple_memory(self, images: List[Dict[str, Any]], memory_type: str = "location") -> Dict[str, Any]: + def _create_simple_memory( + self, images: List[Dict[str, Any]], memory_type: str = "location" + ) -> Dict[str, Any]: """ SIMPLIFIED: Create a memory object with minimal fields. Ensures all datetime objects are converted to ISO strings. @@ -298,15 +318,23 @@ def _create_simple_memory(self, images: List[Dict[str, Any]], memory_type: str = cleaned_images = [] for img in images: img_copy = img.copy() - if img_copy.get("captured_at") and isinstance(img_copy["captured_at"], datetime): + if img_copy.get("captured_at") and isinstance( + img_copy["captured_at"], datetime + ): img_copy["captured_at"] = img_copy["captured_at"].isoformat() cleaned_images.append(img_copy) # Sort by date - sorted_images = sorted(cleaned_images, key=lambda x: x.get("captured_at") or "") + sorted_images = sorted( + cleaned_images, key=lambda x: x.get("captured_at") or "" + ) # Get date range - dates = [img.get("captured_at") for img in sorted_images if img.get("captured_at")] + dates = [ + img.get("captured_at") + for img in sorted_images + if img.get("captured_at") + ] if dates: if isinstance(dates[0], str): dates = [datetime.fromisoformat(d.replace("Z", "")) for d in dates] @@ -315,13 +343,19 @@ def _create_simple_memory(self, images: List[Dict[str, Any]], memory_type: str = date_obj = min(dates) else: date_start = date_end = None - date_obj = None + date_obj = None # Simple titles if memory_type == "location": # Calculate center first - lats = [img["latitude"] for img in images if img.get("latitude") is not None] - lons = [img["longitude"] for img in images if img.get("longitude") is not None] + lats = [ + img["latitude"] for img in images if img.get("latitude") is not None + ] + lons = [ + img["longitude"] + for img in images + if img.get("longitude") is not None + ] center_lat = np.mean(lats) if lats else 0 center_lon = np.mean(lons) if lons else 0 @@ -350,7 +384,7 @@ def _create_simple_memory(self, images: List[Dict[str, Any]], memory_type: str = if date_obj: title = date_obj.strftime("%B %Y") else: - title = "Undated Photos" + title = "Undated Photos" location_name = "" center_lat = 0 center_lon = 0 @@ -376,7 +410,9 @@ def _create_simple_memory(self, images: List[Dict[str, Any]], memory_type: str = logger.error(f"Memory creation failed: {e}") return None - def _cluster_gps_based_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + def _cluster_gps_based_memories( + self, images: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: """ Cluster images with GPS data into location-based memories. This is the original clustering logic. @@ -413,7 +449,9 @@ def _cluster_gps_based_memories(self, images: List[Dict[str, Any]]) -> List[Dict return memories - def _cluster_date_based_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + def _cluster_date_based_memories( + self, images: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: """ Cluster images WITHOUT GPS data into date-based memories. Groups photos by capture date/time only (screenshots, downloads, edits, etc.) @@ -435,7 +473,9 @@ def _cluster_date_based_memories(self, images: List[Dict[str, Any]]) -> List[Dic if captured_at: if isinstance(captured_at, str): try: - captured_at = datetime.fromisoformat(captured_at.replace("Z", "")) + captured_at = datetime.fromisoformat( + captured_at.replace("Z", "") + ) img_copy["captured_at"] = captured_at except Exception: # Try alternative formats @@ -451,7 +491,9 @@ def _cluster_date_based_memories(self, images: List[Dict[str, Any]]) -> List[Dic except Exception: continue else: - logger.debug(f"Could not parse date for image {img.get('id')}") + logger.debug( + f"Could not parse date for image {img.get('id')}" + ) continue elif isinstance(captured_at, datetime): img_copy["captured_at"] = captured_at @@ -527,7 +569,11 @@ def _create_date_based_memory(self, images: List[Dict[str, Any]]) -> Dict[str, A elif days <= 31: title = date_start.strftime("%B %Y") else: - title = date_start.strftime("%B - %B %Y") if date_start.month != date_end.month else date_start.strftime("%B %Y") + title = ( + date_start.strftime("%B - %B %Y") + if date_start.month != date_end.month + else date_start.strftime("%B %Y") + ) else: title = "Memories Collection" @@ -551,7 +597,9 @@ def _create_date_based_memory(self, images: List[Dict[str, Any]]) -> Dict[str, A serialized_images = [] for img in images: img_copy = img.copy() - if img_copy.get("captured_at") and isinstance(img_copy["captured_at"], datetime): + if img_copy.get("captured_at") and isinstance( + img_copy["captured_at"], datetime + ): img_copy["captured_at"] = img_copy["captured_at"].isoformat() serialized_images.append(img_copy) @@ -569,7 +617,9 @@ def _create_date_based_memory(self, images: List[Dict[str, Any]]) -> Dict[str, A "center_lon": 0.0, # No GPS data } - def _filter_valid_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + def _filter_valid_images( + self, images: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: """ Filter images that have valid location and datetime data. @@ -595,7 +645,9 @@ def _filter_valid_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, A if isinstance(captured_at, str): try: # SQLite returns ISO format: "YYYY-MM-DDTHH:MM:SS" - captured_at = datetime.fromisoformat(captured_at.replace("Z", "")) + captured_at = datetime.fromisoformat( + captured_at.replace("Z", "") + ) img_copy["captured_at"] = captured_at except Exception: # Try alternative formats @@ -612,7 +664,9 @@ def _filter_valid_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, A continue else: # Could not parse date, but location is still valid - logger.debug(f"Could not parse date for image {img.get('id')}: {captured_at}") + logger.debug( + f"Could not parse date for image {img.get('id')}: {captured_at}" + ) # Clear the unparseable string to prevent downstream errors img_copy["captured_at"] = None elif isinstance(captured_at, datetime): @@ -626,7 +680,9 @@ def _filter_valid_images(self, images: List[Dict[str, Any]]) -> List[Dict[str, A return valid_images - def _cluster_by_location(self, images: List[Dict[str, Any]]) -> List[List[Dict[str, Any]]]: + def _cluster_by_location( + self, images: List[Dict[str, Any]] + ) -> List[List[Dict[str, Any]]]: """ Cluster images by geographic location using DBSCAN. @@ -670,7 +726,9 @@ def _cluster_by_location(self, images: List[Dict[str, Any]]) -> List[List[Dict[s return list(clusters.values()) - def _cluster_by_date(self, images: List[Dict[str, Any]]) -> List[List[Dict[str, Any]]]: + def _cluster_by_date( + self, images: List[Dict[str, Any]] + ) -> List[List[Dict[str, Any]]]: """ Cluster images by date within a location cluster. @@ -768,7 +826,9 @@ def _create_memory(self, images: List[Dict[str, Any]]) -> Dict[str, Any]: serialized_images = [] for img in images: img_copy = img.copy() - if img_copy.get("captured_at") and isinstance(img_copy["captured_at"], datetime): + if img_copy.get("captured_at") and isinstance( + img_copy["captured_at"], datetime + ): img_copy["captured_at"] = img_copy["captured_at"].isoformat() serialized_images.append(img_copy) @@ -803,13 +863,17 @@ def _reverse_geocode(self, latitude: float, longitude: float) -> str: city_name = find_nearest_city(latitude, longitude, max_distance_km=50.0) if city_name: - logger.debug(f"Mapped coordinates ({latitude:.4f}, {longitude:.4f}) to {city_name}") + logger.debug( + f"Mapped coordinates ({latitude:.4f}, {longitude:.4f}) to {city_name}" + ) return city_name # Fallback: Return formatted coordinates return f"{latitude:.4f}°, {longitude:.4f}°" - def _generate_title(self, location_name: str, date: Optional[datetime], image_count: int) -> str: + def _generate_title( + self, location_name: str, date: Optional[datetime], image_count: int + ) -> str: """ Generate a title for the memory. @@ -853,7 +917,9 @@ def _generate_description( else: return f"{image_count} photos" - def _generate_memory_id(self, latitude: float, longitude: float, date: Optional[datetime]) -> str: + def _generate_memory_id( + self, latitude: float, longitude: float, date: Optional[datetime] + ) -> str: """ Generate a unique ID for the memory. @@ -868,7 +934,7 @@ def _generate_memory_id(self, latitude: float, longitude: float, date: Optional[ # Create deterministic hash from location and date lat_rounded = round(latitude, 2) lon_rounded = round(longitude, 2) - + if date: date_str = date.strftime("%Y%m%d") hash_input = f"lat:{lat_rounded}|lon:{lon_rounded}|date:{date_str}" diff --git a/backend/app/utils/memory_monitor.py b/backend/app/utils/memory_monitor.py index 00fa3389b..c60c4a159 100644 --- a/backend/app/utils/memory_monitor.py +++ b/backend/app/utils/memory_monitor.py @@ -34,7 +34,9 @@ def wrapper(*args, **kwargs): end_time = time.time() # Log memory usage - logger.info(f"Memory usage for {func.__name__}:\n Before: {mem_before:.2f}MB\n After: {mem_after:.2f}MB\n Difference: {mem_after - mem_before:.2f}MB\n Execution time: {(end_time - start_time) * 1000:.2f}ms") + logger.info( + f"Memory usage for {func.__name__}:\n Before: {mem_before:.2f}MB\n After: {mem_after:.2f}MB\n Difference: {mem_after - mem_before:.2f}MB\n Execution time: {(end_time - start_time) * 1000:.2f}ms" + ) return result diff --git a/backend/app/utils/verify_memories_setup.py b/backend/app/utils/verify_memories_setup.py index bb4bb48d8..c803bb600 100644 --- a/backend/app/utils/verify_memories_setup.py +++ b/backend/app/utils/verify_memories_setup.py @@ -69,7 +69,9 @@ def check_dependencies(): version = getattr(module, "__version__", "Unknown") if expected_version and version != expected_version: - print_warning(f"{package} installed (v{version}), expected v{expected_version}") + print_warning( + f"{package} installed (v{version}), expected v{expected_version}" + ) else: print_success(f"{package} v{version}") except ImportError: @@ -124,7 +126,9 @@ def check_database_schema(): cursor = conn.cursor() # Check if images table exists - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='images'") + cursor.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND name='images'" + ) if not cursor.fetchone(): print_error("Table 'images' does not exist") conn.close() @@ -167,7 +171,9 @@ def check_database_schema(): if index_name in indexes: print_success(f"Index '{index_name}'") else: - print_warning(f"Index '{index_name}' not found (recommended for performance)") + print_warning( + f"Index '{index_name}' not found (recommended for performance)" + ) conn.close() return all_columns_exist @@ -244,18 +250,28 @@ def print_summary(results): for check_name, result in results.items(): status = "✓ PASS" if result else ("⚠ WARNING" if result is None else "✗ FAIL") - color = Colors.GREEN if result else (Colors.YELLOW if result is None else Colors.RED) + color = ( + Colors.GREEN + if result + else (Colors.YELLOW if result is None else Colors.RED) + ) print(f"{color}{status}{Colors.RESET} - {check_name}") print() if all_passed: - print(f"{Colors.BOLD}{Colors.GREEN}🎉 All checks passed! Memories feature is ready to use.{Colors.RESET}") + print( + f"{Colors.BOLD}{Colors.GREEN}🎉 All checks passed! Memories feature is ready to use.{Colors.RESET}" + ) print_info("Next steps:") print_info("1. Start the backend: cd backend && ./run.sh") - print_info("2. Run metadata extraction: python -m app.utils.extract_location_metadata") + print_info( + "2. Run metadata extraction: python -m app.utils.extract_location_metadata" + ) print_info("3. Test API endpoints: see MEMORIES_TESTING_GUIDE.md") else: - print(f"{Colors.BOLD}{Colors.RED}❌ Some checks failed. Please fix the issues above.{Colors.RESET}") + print( + f"{Colors.BOLD}{Colors.RED}❌ Some checks failed. Please fix the issues above.{Colors.RESET}" + ) print_info("See MEMORIES_README.md for setup instructions") print() diff --git a/backend/extract_metadata_simple.py b/backend/extract_metadata_simple.py index 6c60c478b..35b4b80b2 100644 --- a/backend/extract_metadata_simple.py +++ b/backend/extract_metadata_simple.py @@ -22,7 +22,9 @@ def extract_and_update(): cursor = conn.cursor() # Get all images with metadata - cursor.execute("SELECT id, metadata FROM images WHERE metadata IS NOT NULL AND metadata != ''") + cursor.execute( + "SELECT id, metadata FROM images WHERE metadata IS NOT NULL AND metadata != ''" + ) images = cursor.fetchall() print(f"\nFound {len(images)} images with metadata") @@ -48,13 +50,22 @@ def extract_and_update(): if has_location or has_datetime: # Update the database if has_location and has_datetime: - cursor.execute("UPDATE images SET latitude = ?, longitude = ?, captured_at = ? WHERE id = ?", (latitude, longitude, date_created, image_id)) + cursor.execute( + "UPDATE images SET latitude = ?, longitude = ?, captured_at = ? WHERE id = ?", + (latitude, longitude, date_created, image_id), + ) both_count += 1 elif has_location: - cursor.execute("UPDATE images SET latitude = ?, longitude = ? WHERE id = ?", (latitude, longitude, image_id)) + cursor.execute( + "UPDATE images SET latitude = ?, longitude = ? WHERE id = ?", + (latitude, longitude, image_id), + ) location_count += 1 elif has_datetime: - cursor.execute("UPDATE images SET captured_at = ? WHERE id = ?", (date_created, image_id)) + cursor.execute( + "UPDATE images SET captured_at = ? WHERE id = ?", + (date_created, image_id), + ) datetime_count += 1 updated_count += 1 @@ -77,7 +88,9 @@ def extract_and_update(): cursor.execute("SELECT COUNT(*) FROM images WHERE captured_at IS NOT NULL") total_with_datetime = cursor.fetchone()[0] - cursor.execute("SELECT COUNT(*) FROM images WHERE latitude IS NOT NULL AND captured_at IS NOT NULL") + cursor.execute( + "SELECT COUNT(*) FROM images WHERE latitude IS NOT NULL AND captured_at IS NOT NULL" + ) total_with_both = cursor.fetchone()[0] conn.close() @@ -88,9 +101,15 @@ def extract_and_update(): print("=" * 70) print(f"Total images processed: {len(images)}") print(f"Images updated: {updated_count}") - print(f"Images with location data: {total_with_location} ({100 * total_with_location / len(images):.1f}%)") - print(f"Images with datetime: {total_with_datetime} ({100 * total_with_datetime / len(images):.1f}%)") - print(f"Images with both: {total_with_both} ({100 * total_with_both / len(images):.1f}%)") + print( + f"Images with location data: {total_with_location} ({100 * total_with_location / len(images):.1f}%)" + ) + print( + f"Images with datetime: {total_with_datetime} ({100 * total_with_datetime / len(images):.1f}%)" + ) + print( + f"Images with both: {total_with_both} ({100 * total_with_both / len(images):.1f}%)" + ) print(f"Images skipped (no data): {len(images) - updated_count}") print("=" * 70) print("\n✅ Migration completed successfully!") diff --git a/backend/main.py b/backend/main.py index a97f32d0c..2e314c9bf 100644 --- a/backend/main.py +++ b/backend/main.py @@ -90,7 +90,9 @@ async def lifespan(app: FastAPI): "name": "PictoPy Postman Collection", "url": "https://www.postman.com/aossie-pictopy/pictopy/overview", }, - servers=[{"url": "http://localhost:52123", "description": "Local Development server"}], + servers=[ + {"url": "http://localhost:52123", "description": "Local Development server"} + ], ) @@ -111,7 +113,9 @@ def generate_openapi_json(): openapi_schema["info"]["contact"] = app.contact project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) - openapi_path = os.path.join(project_root, "docs", "backend", "backend_python", "openapi.json") + openapi_path = os.path.join( + project_root, "docs", "backend", "backend_python", "openapi.json" + ) os.makedirs(os.path.dirname(openapi_path), exist_ok=True) @@ -147,11 +151,12 @@ async def root(): app.include_router( user_preferences_router, prefix="/user-preferences", tags=["User Preferences"] ) -app.include_router(memories_router) # Memories router (prefix already defined in router) +app.include_router( + memories_router +) # Memories router (prefix already defined in router) app.include_router(shutdown_router, tags=["Shutdown"]) - # Entry point for running with: python3 main.py if __name__ == "__main__": multiprocessing.freeze_support() # Required for Windows diff --git a/backend/migrate_add_memories_columns.py b/backend/migrate_add_memories_columns.py index 60cecb97c..d36ca398c 100644 --- a/backend/migrate_add_memories_columns.py +++ b/backend/migrate_add_memories_columns.py @@ -67,7 +67,9 @@ def check_database_exists(): def check_images_table(cursor): """Check if images table exists""" - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='images'") + cursor.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND name='images'" + ) if not cursor.fetchone(): print_error("Table 'images' does not exist") print_info("Run the app first to create the database schema.") @@ -126,10 +128,22 @@ def create_indexes(cursor): print_header("Creating Performance Indexes") indexes = [ - ("ix_images_latitude", "CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)"), - ("ix_images_longitude", "CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)"), - ("ix_images_captured_at", "CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)"), - ("ix_images_favourite_captured_at", "CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)"), + ( + "ix_images_latitude", + "CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)", + ), + ( + "ix_images_longitude", + "CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)", + ), + ( + "ix_images_captured_at", + "CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)", + ), + ( + "ix_images_favourite_captured_at", + "CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)", + ), ] for index_name, sql in indexes: @@ -149,7 +163,9 @@ def show_final_schema(cursor): primary = " PRIMARY KEY" if pk else "" print(f" {col_name:<20} {col_type:<15} {nullable:<10}{primary}") - cursor.execute("SELECT name FROM sqlite_master WHERE type='index' AND tbl_name='images'") + cursor.execute( + "SELECT name FROM sqlite_master WHERE type='index' AND tbl_name='images'" + ) indexes = cursor.fetchall() print(f"\n{Colors.BOLD}Indexes:{Colors.RESET}") for index in indexes: @@ -192,7 +208,9 @@ def migrate(): # Summary print_header("Migration Summary") if changes_made: - print(f"{Colors.BOLD}{Colors.GREEN}✅ Migration completed successfully!{Colors.RESET}\n") + print( + f"{Colors.BOLD}{Colors.GREEN}✅ Migration completed successfully!{Colors.RESET}\n" + ) print_info("New columns added to 'images' table:") print_info(" - latitude (REAL)") print_info(" - longitude (REAL)") @@ -200,11 +218,15 @@ def migrate(): print_info("") print_info("Performance indexes created for fast queries.") else: - print(f"{Colors.BOLD}{Colors.GREEN}✅ Database is already up to date!{Colors.RESET}\n") + print( + f"{Colors.BOLD}{Colors.GREEN}✅ Database is already up to date!{Colors.RESET}\n" + ) print_info("All required columns and indexes already exist.") print(f"\n{Colors.BOLD}Next Steps:{Colors.RESET}") - print_info("1. Run metadata extraction: python -m app.utils.extract_location_metadata") + print_info( + "1. Run metadata extraction: python -m app.utils.extract_location_metadata" + ) print_info("2. Verify setup: python -m app.utils.verify_memories_setup") print_info("3. Start the backend: ./run.sh") print() diff --git a/backend/test_auto_gps_extraction.py b/backend/test_auto_gps_extraction.py index ba63c364c..fec6bcba2 100644 --- a/backend/test_auto_gps_extraction.py +++ b/backend/test_auto_gps_extraction.py @@ -29,7 +29,11 @@ def test_gps_extraction(): extractor = MetadataExtractor() # Test case 1: Sample metadata with GPS - sample_metadata = {"latitude": 28.6139, "longitude": 77.2090, "CreateDate": "2024:11:15 14:30:00"} + sample_metadata = { + "latitude": 28.6139, + "longitude": 77.2090, + "CreateDate": "2024:11:15 14:30:00", + } metadata_json = json.dumps(sample_metadata) lat, lon, captured_at = extractor.extract_all(metadata_json) diff --git a/backend/test_memories_api.py b/backend/test_memories_api.py index 2cd6fcd13..c02845da6 100644 --- a/backend/test_memories_api.py +++ b/backend/test_memories_api.py @@ -104,8 +104,12 @@ def test_locations(): print("\nSummary:") print(f" - Location Count: {data.get('location_count', 0)}") if data.get("locations"): - print(f" - Top Location: {data['locations'][0].get('location_name', 'N/A')}") - print(f" - Photos at Top Location: {data['locations'][0].get('image_count', 0)}") + print( + f" - Top Location: {data['locations'][0].get('location_name', 'N/A')}" + ) + print( + f" - Photos at Top Location: {data['locations'][0].get('image_count', 0)}" + ) def check_server(): diff --git a/backend/tests/test_albums.py b/backend/tests/test_albums.py index 50997cdd1..cec9f670e 100644 --- a/backend/tests/test_albums.py +++ b/backend/tests/test_albums.py @@ -68,7 +68,9 @@ class TestAlbumRoutes: ], ) def test_create_album_variants(self, album_data): - with patch("app.routes.albums.db_get_album_by_name") as mock_get_by_name, patch("app.routes.albums.db_insert_album") as mock_insert: + with patch("app.routes.albums.db_get_album_by_name") as mock_get_by_name, patch( + "app.routes.albums.db_insert_album" + ) as mock_insert: mock_get_by_name.return_value = None # No existing album mock_insert.return_value = None @@ -131,8 +133,13 @@ def test_get_all_albums_public_only(self, mock_db_album): assert isinstance(json_response["albums"], list) assert len(json_response["albums"]) == 1 assert json_response["albums"][0]["album_id"] == mock_db_album["album_id"] - assert json_response["albums"][0]["album_name"] == mock_db_album["album_name"] - assert json_response["albums"][0]["description"] == mock_db_album["description"] + assert ( + json_response["albums"][0]["album_name"] == mock_db_album["album_name"] + ) + assert ( + json_response["albums"][0]["description"] + == mock_db_album["description"] + ) assert json_response["albums"][0]["is_hidden"] == mock_db_album["is_hidden"] mock_get_all.assert_called_once_with(False) @@ -284,8 +291,14 @@ def test_get_album_by_id_not_found(self): ), ], ) - def test_update_album(self, album_data, request_data, verify_password_return, expected_status): - with patch("app.routes.albums.db_get_album") as mock_get_album, patch("app.routes.albums.db_update_album") as mock_update_album, patch("app.routes.albums.verify_album_password") as mock_verify: + def test_update_album( + self, album_data, request_data, verify_password_return, expected_status + ): + with patch("app.routes.albums.db_get_album") as mock_get_album, patch( + "app.routes.albums.db_update_album" + ) as mock_update_album, patch( + "app.routes.albums.verify_album_password" + ) as mock_verify: mock_get_album.return_value = album_data mock_verify.return_value = verify_password_return @@ -312,7 +325,9 @@ def test_delete_album_success(self, mock_db_album): mock_db_album["password_hash"], ) - with patch("app.routes.albums.db_get_album") as mock_get_album, patch("app.routes.albums.db_delete_album") as mock_delete_album: + with patch("app.routes.albums.db_get_album") as mock_get_album, patch( + "app.routes.albums.db_delete_album" + ) as mock_delete_album: mock_get_album.return_value = album_tuple mock_delete_album.return_value = None @@ -351,7 +366,9 @@ def test_add_images_to_album_success(self, mock_db_album): mock_db_album["password_hash"], ) - with patch("app.routes.albums.db_get_album") as mock_get_album, patch("app.routes.albums.db_add_images_to_album") as mock_add_images: + with patch("app.routes.albums.db_get_album") as mock_get_album, patch( + "app.routes.albums.db_add_images_to_album" + ) as mock_add_images: mock_get_album.return_value = album_tuple mock_add_images.return_value = None @@ -384,7 +401,9 @@ def test_get_album_images_success(self, mock_db_album): mock_db_album["password_hash"], ) - with patch("app.routes.albums.db_get_album") as mock_get_album, patch("app.routes.albums.db_get_album_images") as mock_get_images: + with patch("app.routes.albums.db_get_album") as mock_get_album, patch( + "app.routes.albums.db_get_album_images" + ) as mock_get_images: mock_get_album.return_value = album_tuple mock_get_images.return_value = expected_image_ids @@ -414,7 +433,9 @@ def test_remove_image_from_album_success(self, mock_db_album): mock_db_album["password_hash"], ) - with patch("app.routes.albums.db_get_album") as mock_get_album, patch("app.routes.albums.db_remove_image_from_album") as mock_remove: + with patch("app.routes.albums.db_get_album") as mock_get_album, patch( + "app.routes.albums.db_remove_image_from_album" + ) as mock_remove: mock_get_album.return_value = album_tuple mock_remove.return_value = None @@ -436,12 +457,18 @@ def test_remove_multiple_images_from_album(self, mock_db_album): album_id = mock_db_album["album_id"] image_ids_to_remove = {"image_ids": [str(uuid.uuid4()), str(uuid.uuid4())]} - with patch("app.routes.albums.db_get_album") as mock_get, patch("app.routes.albums.db_remove_images_from_album") as mock_remove_bulk: + with patch("app.routes.albums.db_get_album") as mock_get, patch( + "app.routes.albums.db_remove_images_from_album" + ) as mock_remove_bulk: mock_get.return_value = tuple(mock_db_album.values()) - response = client.request("DELETE", f"/albums/{album_id}/images", json=image_ids_to_remove) + response = client.request( + "DELETE", f"/albums/{album_id}/images", json=image_ids_to_remove + ) assert response.status_code == 200 json_response = response.json() assert json_response["success"] is True assert str(len(image_ids_to_remove["image_ids"])) in json_response["msg"] mock_get.assert_called_once_with(album_id) - mock_remove_bulk.assert_called_once_with(album_id, image_ids_to_remove["image_ids"]) + mock_remove_bulk.assert_called_once_with( + album_id, image_ids_to_remove["image_ids"] + ) diff --git a/backend/tests/test_face_clusters.py b/backend/tests/test_face_clusters.py index e2bae970e..3ccd284d5 100644 --- a/backend/tests/test_face_clusters.py +++ b/backend/tests/test_face_clusters.py @@ -4,7 +4,6 @@ from fastapi.testclient import TestClient from app.routes.face_clusters import router as face_clusters_router - app = FastAPI() app.include_router(face_clusters_router, prefix="/face_clusters") client = TestClient(app) @@ -95,7 +94,9 @@ class TestFaceClustersAPI: @patch("app.routes.face_clusters.db_update_cluster") @patch("app.routes.face_clusters.db_get_cluster_by_id") - def test_rename_cluster_success(self, mock_get_cluster, mock_update_cluster, sample_rename_request): + def test_rename_cluster_success( + self, mock_get_cluster, mock_update_cluster, sample_rename_request + ): """Test successfully renaming a cluster.""" cluster_id = "cluster_123" mock_get_cluster.return_value = { @@ -104,7 +105,9 @@ def test_rename_cluster_success(self, mock_get_cluster, mock_update_cluster, sam } mock_update_cluster.return_value = True - response = client.put(f"/face_clusters/{cluster_id}", json=sample_rename_request) + response = client.put( + f"/face_clusters/{cluster_id}", json=sample_rename_request + ) assert response.status_code == 200 data = response.json() @@ -114,7 +117,9 @@ def test_rename_cluster_success(self, mock_get_cluster, mock_update_cluster, sam assert data["data"]["cluster_name"] == sample_rename_request["cluster_name"] mock_get_cluster.assert_called_once_with(cluster_id) - mock_update_cluster.assert_called_once_with(cluster_id=cluster_id, cluster_name=sample_rename_request["cluster_name"]) + mock_update_cluster.assert_called_once_with( + cluster_id=cluster_id, cluster_name=sample_rename_request["cluster_name"] + ) @patch("app.routes.face_clusters.db_get_cluster_by_id") def test_rename_cluster_not_found(self, mock_get_cluster): @@ -191,12 +196,16 @@ def test_rename_cluster_database_error(self, mock_get_cluster): @patch("app.routes.face_clusters.db_update_cluster") @patch("app.routes.face_clusters.db_get_cluster_by_id") - def test_rename_cluster_name_whitespace_trimming(self, mock_get_cluster, mock_update_cluster, sample_cluster_data): + def test_rename_cluster_name_whitespace_trimming( + self, mock_get_cluster, mock_update_cluster, sample_cluster_data + ): """Test that cluster names are properly trimmed of whitespace.""" mock_get_cluster.return_value = sample_cluster_data mock_update_cluster.return_value = True - response = client.put("/face_clusters/cluster_123", json={"cluster_name": " John Doe "}) + response = client.put( + "/face_clusters/cluster_123", json={"cluster_name": " John Doe "} + ) assert response.status_code == 200 response_data = response.json() @@ -212,7 +221,9 @@ def test_rename_cluster_name_whitespace_trimming(self, mock_get_cluster, mock_up # ============================================================================ @patch("app.routes.face_clusters.db_get_all_clusters_with_face_counts") - def test_get_all_clusters_success(self, mock_get_clusters, sample_clusters_with_counts): + def test_get_all_clusters_success( + self, mock_get_clusters, sample_clusters_with_counts + ): """Test successfully retrieving all clusters.""" mock_get_clusters.return_value = sample_clusters_with_counts @@ -260,7 +271,9 @@ def test_get_all_clusters_database_error(self, mock_get_clusters): def test_get_all_clusters_response_structure(self, sample_clusters_with_counts): """Test that get all clusters returns correct response structure.""" - with patch("app.routes.face_clusters.db_get_all_clusters_with_face_counts") as mock_get: + with patch( + "app.routes.face_clusters.db_get_all_clusters_with_face_counts" + ) as mock_get: mock_get.return_value = sample_clusters_with_counts response = client.get("/face_clusters/") @@ -286,7 +299,9 @@ def test_get_all_clusters_response_structure(self, sample_clusters_with_counts): @patch("app.routes.face_clusters.db_get_images_by_cluster_id") @patch("app.routes.face_clusters.db_get_cluster_by_id") - def test_get_cluster_images_success(self, mock_get_cluster, mock_get_images, sample_cluster_images): + def test_get_cluster_images_success( + self, mock_get_cluster, mock_get_images, sample_cluster_images + ): """Test successfully retrieving images for a cluster.""" cluster_id = "cluster_123" mock_get_cluster.return_value = { @@ -374,7 +389,9 @@ def test_rename_cluster_missing_request_body(self): def test_rename_cluster_invalid_json(self): """Test rename cluster with invalid JSON structure.""" - response = client.put("/face_clusters/cluster_123", json={"invalid_field": "value"}) + response = client.put( + "/face_clusters/cluster_123", json={"invalid_field": "value"} + ) assert response.status_code == 422 diff --git a/backend/tests/test_folders.py b/backend/tests/test_folders.py index 46f220be8..21c4525a8 100644 --- a/backend/tests/test_folders.py +++ b/backend/tests/test_folders.py @@ -10,7 +10,6 @@ from app.routes.folders import router as folders_router - # ############################## # Pytest Fixtures # ############################## @@ -163,7 +162,9 @@ def test_add_folder_success( mock_add_folder_tree.assert_called_once() @patch("app.routes.folders.db_folder_exists") - def test_add_folder_already_exists(self, mock_folder_exists, client, temp_folder_structure): + def test_add_folder_already_exists( + self, mock_folder_exists, client, temp_folder_structure + ): """Test adding folder that already exists in database.""" mock_folder_exists.return_value = True @@ -231,7 +232,9 @@ def test_add_folder_with_parent_id( """Test adding folder with specified parent_folder_id.""" mock_folder_exists.return_value = False - mock_find_parent.return_value = None # Should not be called when parent_id provided + mock_find_parent.return_value = ( + None # Should not be called when parent_id provided + ) mock_add_folder_tree.return_value = ("child-folder-id", {}) mock_update_parent_ids.return_value = None @@ -254,7 +257,9 @@ def test_add_folder_with_parent_id( @patch("app.routes.folders.folder_util_add_folder_tree") @patch("app.routes.folders.db_folder_exists") - def test_add_folder_database_error(self, mock_folder_exists, mock_add_folder_tree, client, temp_folder_structure): + def test_add_folder_database_error( + self, mock_folder_exists, mock_add_folder_tree, client, temp_folder_structure + ): """Test handling database errors during folder addition.""" mock_folder_exists.return_value = False mock_add_folder_tree.side_effect = Exception("Database connection failed") @@ -388,7 +393,9 @@ def test_enable_ai_tagging_database_error(self, mock_enable_batch, client): assert data["detail"]["error"] == "Internal server error" @patch("app.routes.folders.db_enable_ai_tagging_batch") - def test_enable_ai_tagging_background_processing_called(self, mock_enable_batch, client): + def test_enable_ai_tagging_background_processing_called( + self, mock_enable_batch, client + ): """Test that background processing is triggered after enabling AI tagging.""" mock_enable_batch.return_value = 2 @@ -411,7 +418,9 @@ def test_disable_ai_tagging_success(self, mock_disable_batch, client): """Test successfully disabling AI tagging for folders.""" mock_disable_batch.return_value = 5 # 5 folders updated - request_data = {"folder_ids": ["folder-1", "folder-2", "folder-3", "folder-4", "folder-5"]} + request_data = { + "folder_ids": ["folder-1", "folder-2", "folder-3", "folder-4", "folder-5"] + } response = client.post("/folders/disable-ai-tagging", json=request_data) @@ -428,7 +437,9 @@ def test_disable_ai_tagging_success(self, mock_disable_batch, client): "folder-5", ] - mock_disable_batch.assert_called_once_with(["folder-1", "folder-2", "folder-3", "folder-4", "folder-5"]) + mock_disable_batch.assert_called_once_with( + ["folder-1", "folder-2", "folder-3", "folder-4", "folder-5"] + ) @patch("app.routes.folders.db_disable_ai_tagging_batch") def test_disable_ai_tagging_single_folder(self, mock_disable_batch, client): @@ -479,7 +490,9 @@ def test_disable_ai_tagging_database_error(self, mock_disable_batch, client): assert data["detail"]["error"] == "Internal server error" @patch("app.routes.folders.db_disable_ai_tagging_batch") - def test_disable_ai_tagging_no_background_processing(self, mock_disable_batch, client): + def test_disable_ai_tagging_no_background_processing( + self, mock_disable_batch, client + ): """Test that no background processing is triggered when disabling AI tagging.""" mock_disable_batch.return_value = 2 @@ -572,7 +585,9 @@ def test_delete_folders_database_error(self, mock_delete_batch, client): # ============================================================================ @patch("app.routes.folders.db_get_all_folder_details") - def test_get_all_folders_success(self, mock_get_all_folders, client, sample_folder_details): + def test_get_all_folders_success( + self, mock_get_all_folders, client, sample_folder_details + ): """Test successfully retrieving all folders.""" mock_get_all_folders.return_value = sample_folder_details @@ -660,7 +675,9 @@ def test_disable_ai_tagging_no_folders_updated(self, mock_disable_batch, client) """Test disabling AI tagging when no folders are actually updated.""" mock_disable_batch.return_value = 0 - request_data = {"folder_ids": ["non-existent-folder-1", "non-existent-folder-2"]} + request_data = { + "folder_ids": ["non-existent-folder-1", "non-existent-folder-2"] + } response = client.post("/folders/disable-ai-tagging", json=request_data) @@ -719,7 +736,9 @@ def test_complete_folder_workflow( @patch("app.routes.folders.db_enable_ai_tagging_batch") @patch("app.routes.folders.db_disable_ai_tagging_batch") - def test_ai_tagging_toggle_workflow(self, mock_disable_batch, mock_enable_batch, client): + def test_ai_tagging_toggle_workflow( + self, mock_disable_batch, mock_enable_batch, client + ): """Test toggling AI tagging on and off for folders.""" folder_ids = ["folder-1", "folder-2"] @@ -732,7 +751,9 @@ def test_ai_tagging_toggle_workflow(self, mock_disable_batch, mock_enable_batch, assert enable_response.json()["data"]["updated_count"] == 2 disable_request = {"folder_ids": folder_ids} - disable_response = client.post("/folders/disable-ai-tagging", json=disable_request) + disable_response = client.post( + "/folders/disable-ai-tagging", json=disable_request + ) assert disable_response.status_code == 200 assert disable_response.json()["data"]["updated_count"] == 2 @@ -786,7 +807,9 @@ def mock_find_parent_side_effect(folder_path): @patch("app.routes.folders.db_delete_folders_batch") @patch("app.routes.folders.db_enable_ai_tagging_batch") - def test_complete_folder_lifecycle(self, mock_enable_batch, mock_delete_batch, client): + def test_complete_folder_lifecycle( + self, mock_enable_batch, mock_delete_batch, client + ): """Test complete folder lifecycle: enable AI -> delete.""" folder_ids = ["folder-1", "folder-2"] diff --git a/backend/tests/test_user_preferences.py b/backend/tests/test_user_preferences.py index edeb4803f..e77ec15f4 100644 --- a/backend/tests/test_user_preferences.py +++ b/backend/tests/test_user_preferences.py @@ -56,7 +56,9 @@ class TestUserPreferencesAPI: """Test class for User Preferences API endpoints.""" @patch("app.routes.user_preferences.db_get_metadata") - def test_get_user_preferences_with_existing_data(self, mock_get_metadata, sample_metadata_with_preferences): + def test_get_user_preferences_with_existing_data( + self, mock_get_metadata, sample_metadata_with_preferences + ): """Test successful retrieval of user preferences when data exists.""" mock_get_metadata.return_value = sample_metadata_with_preferences @@ -76,7 +78,9 @@ def test_get_user_preferences_with_existing_data(self, mock_get_metadata, sample mock_get_metadata.assert_called_once() @patch("app.routes.user_preferences.db_get_metadata") - def test_get_user_preferences_with_defaults(self, mock_get_metadata, sample_metadata_without_preferences): + def test_get_user_preferences_with_defaults( + self, mock_get_metadata, sample_metadata_without_preferences + ): """Test retrieval of user preferences with default values when no preferences exist.""" mock_get_metadata.return_value = sample_metadata_without_preferences @@ -129,7 +133,9 @@ def test_get_user_preferences_null_metadata(self, mock_get_metadata): def test_get_user_preferences_partial_data(self, mock_get_metadata): """Test retrieval when only some preference fields exist.""" - mock_get_metadata.return_value = {"user_preferences": {"YOLO_model_size": "medium"}} + mock_get_metadata.return_value = { + "user_preferences": {"YOLO_model_size": "medium"} + } response = client.get("/user_preferences/") @@ -195,7 +201,9 @@ def test_update_user_preferences_full_update( @patch("app.routes.user_preferences.db_update_metadata") @patch("app.routes.user_preferences.db_get_metadata") - def test_update_user_preferences_partial_update(self, mock_get_metadata, mock_update_metadata, sample_metadata_with_preferences): + def test_update_user_preferences_partial_update( + self, mock_get_metadata, mock_update_metadata, sample_metadata_with_preferences + ): """Test successful partial update of user preferences.""" mock_get_metadata.return_value = sample_metadata_with_preferences @@ -214,7 +222,9 @@ def test_update_user_preferences_partial_update(self, mock_get_metadata, mock_up @patch("app.routes.user_preferences.db_update_metadata") @patch("app.routes.user_preferences.db_get_metadata") - def test_update_user_preferences_new_metadata(self, mock_get_metadata, mock_update_metadata): + def test_update_user_preferences_new_metadata( + self, mock_get_metadata, mock_update_metadata + ): """Test update when no existing metadata exists.""" mock_get_metadata.return_value = None @@ -243,7 +253,9 @@ def test_update_user_preferences_new_metadata(self, mock_get_metadata, mock_upda ) @patch("app.routes.user_preferences.db_update_metadata") @patch("app.routes.user_preferences.db_get_metadata") - def test_update_user_preferences_various_combinations(self, mock_get_metadata, mock_update_metadata, yolo_size, gpu_accel): + def test_update_user_preferences_various_combinations( + self, mock_get_metadata, mock_update_metadata, yolo_size, gpu_accel + ): """Test update with various parameter combinations.""" mock_get_metadata.return_value = {} @@ -271,11 +283,17 @@ def test_update_user_preferences_no_fields_provided(self): if "detail" in response_data: assert response_data["detail"]["success"] is False assert response_data["detail"]["error"] == "Validation Error" - assert "At least one preference field must be provided" in response_data["detail"]["message"] + assert ( + "At least one preference field must be provided" + in response_data["detail"]["message"] + ) else: assert response_data["success"] is False assert response_data["error"] == "Validation Error" - assert "At least one preference field must be provided" in response_data["message"] + assert ( + "At least one preference field must be provided" + in response_data["message"] + ) def test_update_user_preferences_all_none_fields(self): """Test update with all fields explicitly set to None.""" @@ -290,15 +308,23 @@ def test_update_user_preferences_all_none_fields(self): if "detail" in response_data: assert response_data["detail"]["success"] is False assert response_data["detail"]["error"] == "Validation Error" - assert "At least one preference field must be provided" in response_data["detail"]["message"] + assert ( + "At least one preference field must be provided" + in response_data["detail"]["message"] + ) else: assert response_data["success"] is False assert response_data["error"] == "Validation Error" - assert "At least one preference field must be provided" in response_data["message"] + assert ( + "At least one preference field must be provided" + in response_data["message"] + ) @patch("app.routes.user_preferences.db_update_metadata") @patch("app.routes.user_preferences.db_get_metadata") - def test_update_user_preferences_database_update_failed(self, mock_get_metadata, mock_update_metadata): + def test_update_user_preferences_database_update_failed( + self, mock_get_metadata, mock_update_metadata + ): """Test update when database update fails.""" mock_get_metadata.return_value = {} @@ -312,7 +338,10 @@ def test_update_user_preferences_database_update_failed(self, mock_get_metadata, if "detail" in response_data: assert response_data["detail"]["success"] is False assert response_data["detail"]["error"] == "Update Failed" - assert "Failed to update user preferences" in response_data["detail"]["message"] + assert ( + "Failed to update user preferences" + in response_data["detail"]["message"] + ) else: assert response_data["success"] is False assert response_data["error"] == "Update Failed" @@ -340,7 +369,9 @@ def test_update_user_preferences_database_get_exception(self, mock_get_metadata) @patch("app.routes.user_preferences.db_update_metadata") @patch("app.routes.user_preferences.db_get_metadata") - def test_update_user_preferences_database_update_exception(self, mock_get_metadata, mock_update_metadata): + def test_update_user_preferences_database_update_exception( + self, mock_get_metadata, mock_update_metadata + ): """Test update when database update raises an exception.""" mock_get_metadata.return_value = {} @@ -382,11 +413,15 @@ def test_get_user_preferences_response_structure(self): def test_update_user_preferences_response_structure(self): """Test that update user preferences returns correct response structure.""" - with patch("app.routes.user_preferences.db_get_metadata") as mock_get, patch("app.routes.user_preferences.db_update_metadata") as mock_update: + with patch("app.routes.user_preferences.db_get_metadata") as mock_get, patch( + "app.routes.user_preferences.db_update_metadata" + ) as mock_update: mock_get.return_value = {} mock_update.return_value = True - response = client.put("/user_preferences/", json={"YOLO_model_size": "medium"}) + response = client.put( + "/user_preferences/", json={"YOLO_model_size": "medium"} + ) assert response.status_code == 200 response_data = response.json() @@ -402,7 +437,9 @@ def test_update_user_preferences_response_structure(self): def test_update_user_preferences_preserves_other_metadata(self): """Test that updating preferences preserves other metadata fields.""" - with patch("app.routes.user_preferences.db_get_metadata") as mock_get, patch("app.routes.user_preferences.db_update_metadata") as mock_update: + with patch("app.routes.user_preferences.db_get_metadata") as mock_get, patch( + "app.routes.user_preferences.db_update_metadata" + ) as mock_update: existing_metadata = { "user_preferences": {"YOLO_model_size": "small"}, "other_field": "should_be_preserved", @@ -428,7 +465,9 @@ def test_update_user_preferences_missing_request_body(self): def test_update_user_preferences_invalid_yolo_size(self): """Test update with invalid YOLO model size.""" - response = client.put("/user_preferences/", json={"YOLO_model_size": "invalid_size"}) + response = client.put( + "/user_preferences/", json={"YOLO_model_size": "invalid_size"} + ) assert response.status_code == 422 response_data = response.json() @@ -443,9 +482,15 @@ def test_update_user_preferences_invalid_json_structure(self): response_data = response.json() if "detail" in response_data: - assert "At least one preference field must be provided" in response_data["detail"]["message"] + assert ( + "At least one preference field must be provided" + in response_data["detail"]["message"] + ) else: - assert "At least one preference field must be provided" in response_data["message"] + assert ( + "At least one preference field must be provided" + in response_data["message"] + ) @pytest.mark.parametrize( "method,endpoint", From d045895acd58e4b00f22797db069250da3bf8fbc Mon Sep 17 00:00:00 2001 From: harshit Date: Tue, 3 Feb 2026 22:47:42 +0530 Subject: [PATCH 22/22] feat: add type field to Memory schema for location/date distinction --- backend/app/routes/memories.py | 3 ++- docs/backend/backend_python/openapi.json | 11 ++++++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/backend/app/routes/memories.py b/backend/app/routes/memories.py index 65f3edbc6..a21ea9cf6 100644 --- a/backend/app/routes/memories.py +++ b/backend/app/routes/memories.py @@ -15,7 +15,7 @@ """ from datetime import datetime, timedelta -from typing import List, Dict, Optional +from typing import List, Dict, Optional, Literal from fastapi import APIRouter, HTTPException, Query from pydantic import BaseModel @@ -62,6 +62,7 @@ class Memory(BaseModel): thumbnail_image_id: str center_lat: Optional[float] = None center_lon: Optional[float] = None + type: Literal["location", "date"] # Memory clustering type class GenerateMemoriesResponse(BaseModel): diff --git a/docs/backend/backend_python/openapi.json b/docs/backend/backend_python/openapi.json index 23f774bb4..1389b792b 100644 --- a/docs/backend/backend_python/openapi.json +++ b/docs/backend/backend_python/openapi.json @@ -2702,6 +2702,14 @@ } ], "title": "Center Lon" + }, + "type": { + "type": "string", + "enum": [ + "location", + "date" + ], + "title": "Type" } }, "type": "object", @@ -2714,7 +2722,8 @@ "date_end", "image_count", "images", - "thumbnail_image_id" + "thumbnail_image_id", + "type" ], "title": "Memory", "description": "Memory object containing grouped images."