diff --git a/backend/app/database/albums.py b/backend/app/database/albums.py index b9e5b149a..261dfe045 100644 --- a/backend/app/database/albums.py +++ b/backend/app/database/albums.py @@ -9,8 +9,7 @@ def db_create_albums_table() -> None: try: conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS albums ( album_id TEXT PRIMARY KEY, album_name TEXT UNIQUE, @@ -18,8 +17,7 @@ def db_create_albums_table() -> None: is_hidden BOOLEAN DEFAULT 0, password_hash TEXT ) - """ - ) + """) conn.commit() finally: if conn is not None: @@ -31,8 +29,7 @@ def db_create_album_images_table() -> None: try: conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS album_images ( album_id TEXT, image_id TEXT, @@ -40,8 +37,7 @@ def db_create_album_images_table() -> None: FOREIGN KEY (album_id) REFERENCES albums(album_id) ON DELETE CASCADE, FOREIGN KEY (image_id) REFERENCES images(id) ON DELETE CASCADE ) - """ - ) + """) conn.commit() finally: if conn is not None: diff --git a/backend/app/database/face_clusters.py b/backend/app/database/face_clusters.py index ceac7f556..dd21804ae 100644 --- a/backend/app/database/face_clusters.py +++ b/backend/app/database/face_clusters.py @@ -24,15 +24,13 @@ def db_create_clusters_table() -> None: try: conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS face_clusters ( cluster_id TEXT PRIMARY KEY, cluster_name TEXT, face_image_base64 TEXT ) - """ - ) + """) conn.commit() finally: if conn is not None: @@ -245,8 +243,7 @@ def db_get_all_clusters_with_face_counts() -> ( cursor = conn.cursor() try: - cursor.execute( - """ + cursor.execute(""" SELECT fc.cluster_id, fc.cluster_name, @@ -256,8 +253,7 @@ def db_get_all_clusters_with_face_counts() -> ( LEFT JOIN faces f ON fc.cluster_id = f.cluster_id GROUP BY fc.cluster_id, fc.cluster_name, fc.face_image_base64 ORDER BY fc.cluster_id - """ - ) + """) rows = cursor.fetchall() diff --git a/backend/app/database/faces.py b/backend/app/database/faces.py index 0e43f7117..07144acfa 100644 --- a/backend/app/database/faces.py +++ b/backend/app/database/faces.py @@ -32,8 +32,7 @@ def db_create_faces_table() -> None: conn = sqlite3.connect(DATABASE_PATH) conn.execute("PRAGMA foreign_keys = ON") cursor = conn.cursor() - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS faces ( face_id INTEGER PRIMARY KEY AUTOINCREMENT, image_id TEXT, @@ -44,8 +43,7 @@ def db_create_faces_table() -> None: FOREIGN KEY (image_id) REFERENCES images(id) ON DELETE CASCADE, FOREIGN KEY (cluster_id) REFERENCES face_clusters(cluster_id) ON DELETE SET NULL ) - """ - ) + """) conn.commit() finally: if conn is not None: @@ -146,8 +144,7 @@ def get_all_face_embeddings(): cursor = conn.cursor() try: - cursor.execute( - """ + cursor.execute(""" SELECT f.embeddings, f.bbox, @@ -162,8 +159,7 @@ def get_all_face_embeddings(): JOIN images i ON f.image_id=i.id LEFT JOIN image_classes ic ON i.id = ic.image_id LEFT JOIN mappings m ON ic.class_id = m.class_id - """ - ) + """) results = cursor.fetchall() from app.utils.images import image_util_parse_metadata @@ -256,14 +252,12 @@ def db_get_all_faces_with_cluster_names() -> ( cursor = conn.cursor() try: - cursor.execute( - """ + cursor.execute(""" SELECT f.face_id, f.embeddings, fc.cluster_name FROM faces f LEFT JOIN face_clusters fc ON f.cluster_id = fc.cluster_id ORDER BY f.face_id - """ - ) + """) rows = cursor.fetchall() @@ -353,14 +347,12 @@ def db_get_cluster_mean_embeddings() -> List[Dict[str, Union[str, FaceEmbedding] cursor = conn.cursor() try: - cursor.execute( - """ + cursor.execute(""" SELECT f.cluster_id, f.embeddings FROM faces f WHERE f.cluster_id IS NOT NULL ORDER BY f.cluster_id - """ - ) + """) rows = cursor.fetchall() diff --git a/backend/app/database/folders.py b/backend/app/database/folders.py index 3a2ac976d..60bca782f 100644 --- a/backend/app/database/folders.py +++ b/backend/app/database/folders.py @@ -17,8 +17,7 @@ def db_create_folders_table() -> None: try: conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS folders ( folder_id TEXT PRIMARY KEY, parent_folder_id TEXT, @@ -28,8 +27,7 @@ def db_create_folders_table() -> None: taggingCompleted BOOLEAN, FOREIGN KEY (parent_folder_id) REFERENCES folders(folder_id) ON DELETE CASCADE ) - """ - ) + """) conn.commit() finally: if conn is not None: @@ -406,13 +404,11 @@ def db_get_all_folder_details() -> ( cursor = conn.cursor() try: - cursor.execute( - """ + cursor.execute(""" SELECT folder_id, folder_path, parent_folder_id, last_modified_time, AI_Tagging, taggingCompleted FROM folders ORDER BY folder_path - """ - ) + """) return cursor.fetchall() finally: conn.close() diff --git a/backend/app/database/images.py b/backend/app/database/images.py index ec9541a56..de9e33e47 100644 --- a/backend/app/database/images.py +++ b/backend/app/database/images.py @@ -1,6 +1,7 @@ # Standard library imports import sqlite3 -from typing import Any, List, Mapping, Tuple, TypedDict, Union +from typing import Any, List, Mapping, Tuple, TypedDict, Union, Optional +from datetime import datetime # App-specific imports from app.config.settings import ( @@ -18,7 +19,7 @@ ClassId = int -class ImageRecord(TypedDict): +class ImageRecord(TypedDict, total=False): """Represents the full images table structure""" id: ImageId @@ -27,6 +28,11 @@ class ImageRecord(TypedDict): thumbnailPath: str metadata: Union[Mapping[str, Any], str] isTagged: bool + isFavourite: bool + # New fields for Memories feature + latitude: Optional[float] + longitude: Optional[float] + captured_at: Optional[datetime] class UntaggedImageRecord(TypedDict): @@ -53,9 +59,8 @@ def db_create_images_table() -> None: conn = _connect() cursor = conn.cursor() - # Create new images table with merged fields - cursor.execute( - """ + # Create new images table with merged fields including Memories feature columns + cursor.execute(""" CREATE TABLE IF NOT EXISTS images ( id TEXT PRIMARY KEY, path VARCHAR UNIQUE, @@ -64,14 +69,27 @@ def db_create_images_table() -> None: metadata TEXT, isTagged BOOLEAN DEFAULT 0, isFavourite BOOLEAN DEFAULT 0, + latitude REAL, + longitude REAL, + captured_at DATETIME, FOREIGN KEY (folder_id) REFERENCES folders(folder_id) ON DELETE CASCADE ) - """ + """) + + # Create indexes for Memories feature queries + cursor.execute("CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)") + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)" + ) + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)" + ) + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)" ) # Create new image_classes junction table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS image_classes ( image_id TEXT, class_id INTEGER, @@ -79,13 +97,80 @@ def db_create_images_table() -> None: FOREIGN KEY (image_id) REFERENCES images(id) ON DELETE CASCADE, FOREIGN KEY (class_id) REFERENCES mappings(class_id) ON DELETE CASCADE ) - """ - ) + """) conn.commit() conn.close() +def db_migrate_add_memories_columns() -> None: + """ + Add Memories feature columns to existing images table if they don't exist. + This function handles backward compatibility for existing databases. + """ + conn = _connect() + cursor = conn.cursor() + + try: + # Check if images table exists + cursor.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND name='images'" + ) + if not cursor.fetchone(): + logger.info( + "Images table does not exist yet, will be created by db_create_images_table()" + ) + conn.close() + return + + # Get existing columns + cursor.execute("PRAGMA table_info(images)") + columns = {row[1] for row in cursor.fetchall()} + + # Add missing columns + changes_made = False + + if "latitude" not in columns: + cursor.execute("ALTER TABLE images ADD COLUMN latitude REAL") + logger.info("Added column: latitude") + changes_made = True + + if "longitude" not in columns: + cursor.execute("ALTER TABLE images ADD COLUMN longitude REAL") + logger.info("Added column: longitude") + changes_made = True + + if "captured_at" not in columns: + cursor.execute("ALTER TABLE images ADD COLUMN captured_at DATETIME") + logger.info("Added column: captured_at") + changes_made = True + + # Create indexes + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)" + ) + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)" + ) + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)" + ) + cursor.execute( + "CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)" + ) + + if changes_made: + logger.info("Memories feature columns migration completed") + + conn.commit() + + except Exception as e: + logger.error(f"Error during Memories columns migration: {e}") + conn.rollback() + finally: + conn.close() + + def db_bulk_insert_images(image_records: List[ImageRecord]) -> bool: """Insert multiple image records in a single transaction.""" if not image_records: @@ -97,8 +182,8 @@ def db_bulk_insert_images(image_records: List[ImageRecord]) -> bool: try: cursor.executemany( """ - INSERT INTO images (id, path, folder_id, thumbnailPath, metadata, isTagged) - VALUES (:id, :path, :folder_id, :thumbnailPath, :metadata, :isTagged) + INSERT INTO images (id, path, folder_id, thumbnailPath, metadata, isTagged, latitude, longitude, captured_at) + VALUES (:id, :path, :folder_id, :thumbnailPath, :metadata, :isTagged, :latitude, :longitude, :captured_at) ON CONFLICT(path) DO UPDATE SET folder_id=excluded.folder_id, thumbnailPath=excluded.thumbnailPath, @@ -106,7 +191,10 @@ def db_bulk_insert_images(image_records: List[ImageRecord]) -> bool: isTagged=CASE WHEN excluded.isTagged THEN 1 ELSE images.isTagged - END + END, + latitude=COALESCE(excluded.latitude, images.latitude), + longitude=COALESCE(excluded.longitude, images.longitude), + captured_at=COALESCE(excluded.captured_at, images.captured_at) """, image_records, ) @@ -145,6 +233,9 @@ def db_get_all_images(tagged: Union[bool, None] = None) -> List[dict]: i.metadata, i.isTagged, i.isFavourite, + i.latitude, + i.longitude, + i.captured_at, m.name as tag_name FROM images i LEFT JOIN image_classes ic ON i.id = ic.image_id @@ -172,6 +263,9 @@ def db_get_all_images(tagged: Union[bool, None] = None) -> List[dict]: metadata, is_tagged, is_favourite, + latitude, + longitude, + captured_at, tag_name, ) in results: if image_id not in images_dict: @@ -188,6 +282,11 @@ def db_get_all_images(tagged: Union[bool, None] = None) -> List[dict]: "metadata": metadata_dict, "isTagged": bool(is_tagged), "isFavourite": bool(is_favourite), + "latitude": latitude, + "longitude": longitude, + "captured_at": ( + captured_at if captured_at else None + ), # SQLite returns string "tags": [], } @@ -228,15 +327,13 @@ def db_get_untagged_images() -> List[UntaggedImageRecord]: cursor = conn.cursor() try: - cursor.execute( - """ + cursor.execute(""" SELECT i.id, i.path, i.folder_id, i.thumbnailPath, i.metadata FROM images i JOIN folders f ON i.folder_id = f.folder_id WHERE f.AI_Tagging = TRUE AND i.isTagged = FALSE - """ - ) + """) results = cursor.fetchall() @@ -419,3 +516,380 @@ def db_toggle_image_favourite_status(image_id: str) -> bool: return False finally: conn.close() + + +# ============================================================================ +# MEMORIES FEATURE - Location and Time-based Queries +# ============================================================================ + + +def db_get_images_by_date_range( + start_date: datetime, end_date: datetime, include_favorites_only: bool = False +) -> List[dict]: + """ + Get images captured within a date range for Memories timeline. + + Args: + start_date: Start of date range (inclusive) + end_date: End of date range (inclusive) + include_favorites_only: If True, only return favorite images + + Returns: + List of image dictionaries with location and time data + """ + conn = _connect() + cursor = conn.cursor() + + try: + query = """ + SELECT + i.id, + i.path, + i.folder_id, + i.thumbnailPath, + i.metadata, + i.isTagged, + i.isFavourite, + i.latitude, + i.longitude, + i.captured_at, + GROUP_CONCAT(m.name, ',') as tags + FROM images i + LEFT JOIN image_classes ic ON i.id = ic.image_id + LEFT JOIN mappings m ON ic.class_id = m.class_id + WHERE i.captured_at BETWEEN ? AND ? + """ + + params = [start_date, end_date] + + if include_favorites_only: + query += " AND i.isFavourite = 1" + + query += """ + GROUP BY i.id + ORDER BY i.captured_at DESC + """ + + cursor.execute(query, params) + results = cursor.fetchall() + + images = [] + for row in results: + from app.utils.images import image_util_parse_metadata + + images.append( + { + "id": row[0], + "path": row[1], + "folder_id": str(row[2]) if row[2] else None, + "thumbnailPath": row[3], + "metadata": image_util_parse_metadata(row[4]), + "isTagged": bool(row[5]), + "isFavourite": bool(row[6]), + "latitude": row[7], + "longitude": row[8], + "captured_at": row[9] if row[9] else None, + "tags": row[10].split(",") if row[10] else None, + } + ) + + return images + + except Exception as e: + logger.error(f"Error getting images by date range: {e}") + return [] + finally: + conn.close() + + +def db_get_images_near_location( + latitude: float, longitude: float, radius_km: float = 5.0 +) -> List[dict]: + """ + Get images near a location within radius_km using bounding box approximation. + + Args: + latitude: Center latitude (-90 to 90) + longitude: Center longitude (-180 to 180) + radius_km: Search radius in kilometers (default: 5km) + + Returns: + List of image dictionaries with location data + + Note: + Uses simple bounding box (not precise Haversine distance). + 1 degree latitude ≈ 111 km + 1 degree longitude ≈ 111 km * cos(latitude) + """ + conn = _connect() + cursor = conn.cursor() + + try: + import math + + # Calculate bounding box offsets + lat_offset = radius_km / 111.0 + cos_lat = abs(math.cos(math.radians(latitude))) + # Clamp to avoid division by near-zero at poles + lon_offset = radius_km / (111.0 * max(cos_lat, 0.01)) + + cursor.execute( + """ + SELECT + i.id, + i.path, + i.folder_id, + i.thumbnailPath, + i.metadata, + i.isTagged, + i.isFavourite, + i.latitude, + i.longitude, + i.captured_at, + GROUP_CONCAT(m.name, ',') as tags + FROM images i + LEFT JOIN image_classes ic ON i.id = ic.image_id + LEFT JOIN mappings m ON ic.class_id = m.class_id + WHERE i.latitude BETWEEN ? AND ? + AND i.longitude BETWEEN ? AND ? + AND i.latitude IS NOT NULL + AND i.longitude IS NOT NULL + GROUP BY i.id + ORDER BY i.captured_at DESC + """, + ( + latitude - lat_offset, + latitude + lat_offset, + longitude - lon_offset, + longitude + lon_offset, + ), + ) + + results = cursor.fetchall() + + images = [] + for row in results: + from app.utils.images import image_util_parse_metadata + + images.append( + { + "id": row[0], + "path": row[1], + "folder_id": str(row[2]) if row[2] else None, + "thumbnailPath": row[3], + "metadata": image_util_parse_metadata(row[4]), + "isTagged": bool(row[5]), + "isFavourite": bool(row[6]), + "latitude": row[7], + "longitude": row[8], + "captured_at": row[9] if row[9] else None, # SQLite returns string, + "tags": row[10].split(",") if row[10] else None, + } + ) + + return images + + except Exception as e: + logger.error(f"Error getting images near location: {e}") + return [] + finally: + conn.close() + + +def db_get_images_by_year_month(year: int, month: int) -> List[dict]: + """ + Get all images captured in a specific year and month. + + Args: + year: Year (e.g., 2024) + month: Month (1-12) + + Returns: + List of image dictionaries captured in the specified month + """ + conn = _connect() + cursor = conn.cursor() + + try: + cursor.execute( + """ + SELECT + i.id, + i.path, + i.folder_id, + i.thumbnailPath, + i.metadata, + i.isTagged, + i.isFavourite, + i.latitude, + i.longitude, + i.captured_at, + GROUP_CONCAT(m.name, ',') as tags + FROM images i + LEFT JOIN image_classes ic ON i.id = ic.image_id + LEFT JOIN mappings m ON ic.class_id = m.class_id + WHERE strftime('%Y', i.captured_at) = ? + AND strftime('%m', i.captured_at) = ? + GROUP BY i.id + ORDER BY i.captured_at DESC + """, + (str(year).zfill(4), str(month).zfill(2)), + ) + + results = cursor.fetchall() + + images = [] + for row in results: + from app.utils.images import image_util_parse_metadata + + images.append( + { + "id": row[0], + "path": row[1], + "folder_id": str(row[2]) if row[2] else None, + "thumbnailPath": row[3], + "metadata": image_util_parse_metadata(row[4]), + "isTagged": bool(row[5]), + "isFavourite": bool(row[6]), + "latitude": row[7], + "longitude": row[8], + "captured_at": row[9] if row[9] else None, # SQLite returns string, + "tags": row[10].split(",") if row[10] else None, + } + ) + + return images + + except Exception as e: + logger.error(f"Error getting images by year/month: {e}") + return [] + finally: + conn.close() + + +def db_get_images_with_location() -> List[dict]: + """ + Get all images that have valid GPS coordinates. + Useful for displaying all photos on a map. + + Returns: + List of image dictionaries that have latitude and longitude + """ + conn = _connect() + cursor = conn.cursor() + + try: + cursor.execute(""" + SELECT + i.id, + i.path, + i.folder_id, + i.thumbnailPath, + i.metadata, + i.isTagged, + i.isFavourite, + i.latitude, + i.longitude, + i.captured_at, + GROUP_CONCAT(m.name, ',') as tags + FROM images i + LEFT JOIN image_classes ic ON i.id = ic.image_id + LEFT JOIN mappings m ON ic.class_id = m.class_id + WHERE i.latitude IS NOT NULL + AND i.longitude IS NOT NULL + GROUP BY i.id + ORDER BY i.captured_at DESC + """) + + results = cursor.fetchall() + + images = [] + for row in results: + from app.utils.images import image_util_parse_metadata + + images.append( + { + "id": row[0], + "path": row[1], + "folder_id": str(row[2]) if row[2] else None, + "thumbnailPath": row[3], + "metadata": image_util_parse_metadata(row[4]), + "isTagged": bool(row[5]), + "isFavourite": bool(row[6]), + "latitude": row[7], + "longitude": row[8], + "captured_at": row[9] if row[9] else None, # SQLite returns string, + "tags": row[10].split(",") if row[10] else None, + } + ) + + return images + + except Exception as e: + logger.error(f"Error fetching images with location: {e}") + return [] + finally: + conn.close() + + +def db_get_all_images_for_memories() -> List[dict]: + """ + Get ALL images that can be used for memories (with OR without GPS). + Includes images with timestamps for date-based memories. + + Returns: + List of all image dictionaries (both GPS and non-GPS images) + """ + conn = _connect() + cursor = conn.cursor() + + try: + cursor.execute(""" + SELECT + i.id, + i.path, + i.folder_id, + i.thumbnailPath, + i.metadata, + i.isTagged, + i.isFavourite, + i.latitude, + i.longitude, + i.captured_at, + GROUP_CONCAT(m.name, ',') as tags + FROM images i + LEFT JOIN image_classes ic ON i.id = ic.image_id + LEFT JOIN mappings m ON ic.class_id = m.class_id + GROUP BY i.id + ORDER BY i.captured_at DESC + """) + + results = cursor.fetchall() + + images = [] + for row in results: + from app.utils.images import image_util_parse_metadata + + images.append( + { + "id": row[0], + "path": row[1], + "folder_id": str(row[2]) if row[2] else None, + "thumbnailPath": row[3], + "metadata": image_util_parse_metadata(row[4]), + "isTagged": bool(row[5]), + "isFavourite": bool(row[6]), + "latitude": row[7] if row[7] else None, # Can be None + "longitude": row[8] if row[8] else None, # Can be None + "captured_at": row[9] if row[9] else None, + "tags": row[10].split(",") if row[10] else None, + } + ) + + return images + + except Exception as e: + logger.error(f"Error getting images from database: {e}") + return [] + finally: + conn.close() diff --git a/backend/app/database/metadata.py b/backend/app/database/metadata.py index d431f6e2b..a86b64cb2 100644 --- a/backend/app/database/metadata.py +++ b/backend/app/database/metadata.py @@ -11,13 +11,11 @@ def db_create_metadata_table() -> None: try: conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS metadata ( metadata TEXT ) - """ - ) + """) # Insert initial row if table is empty cursor.execute("SELECT COUNT(*) FROM metadata") diff --git a/backend/app/database/yolo_mapping.py b/backend/app/database/yolo_mapping.py index af5c18927..fe8402dd2 100644 --- a/backend/app/database/yolo_mapping.py +++ b/backend/app/database/yolo_mapping.py @@ -12,14 +12,12 @@ def db_create_YOLO_classes_table(): try: conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() - cursor.execute( - """ + cursor.execute(""" CREATE TABLE IF NOT EXISTS mappings ( class_id INTEGER PRIMARY KEY, name VARCHAR NOT NULL ) - """ - ) + """) for class_id, name in enumerate(class_names): cursor.execute( "INSERT OR REPLACE INTO mappings (class_id, name) VALUES (?, ?)", diff --git a/backend/app/routes/face_clusters.py b/backend/app/routes/face_clusters.py index 99974ac4a..78394df79 100644 --- a/backend/app/routes/face_clusters.py +++ b/backend/app/routes/face_clusters.py @@ -28,7 +28,6 @@ from app.schemas.images import FaceSearchRequest, InputType from app.utils.faceSearch import perform_face_search - logger = logging.getLogger(__name__) router = APIRouter() diff --git a/backend/app/routes/memories.py b/backend/app/routes/memories.py new file mode 100644 index 000000000..a21ea9cf6 --- /dev/null +++ b/backend/app/routes/memories.py @@ -0,0 +1,443 @@ +""" +Memories API Routes + +This module provides REST API endpoints for the Memories feature, which groups +photos by location and time into meaningful collections. + +Endpoints: +- POST /api/memories/generate - Generate memories from all images with location data +- GET /api/memories/timeline - Get memories from past N days +- GET /api/memories/on-this-day - Get photos from this date in previous years +- GET /api/memories/locations - Get all unique locations where photos were taken + +Author: PictoPy Team +Date: 2025-12-14 +""" + +from datetime import datetime, timedelta +from typing import List, Dict, Optional, Literal + +from fastapi import APIRouter, HTTPException, Query +from pydantic import BaseModel + +from app.database.images import ( + db_get_images_with_location, + db_get_images_by_date_range, + db_get_images_by_year_month, +) +from app.utils.memory_clustering import MemoryClustering +from app.logging.setup_logging import get_logger + +# Initialize router and logger +router = APIRouter(prefix="/api/memories", tags=["memories"]) +logger = get_logger(__name__) + + +# Response Models + + +class MemoryImage(BaseModel): + """Image within a memory.""" + + id: str + path: str + thumbnailPath: str + latitude: Optional[float] + longitude: Optional[float] + captured_at: Optional[str] + isFavourite: Optional[bool] = False # Add favorite status + + +class Memory(BaseModel): + """Memory object containing grouped images.""" + + memory_id: str + title: str + description: str + location_name: str + date_start: Optional[str] + date_end: Optional[str] + image_count: int + images: List[MemoryImage] + thumbnail_image_id: str + center_lat: Optional[float] = None + center_lon: Optional[float] = None + type: Literal["location", "date"] # Memory clustering type + + +class GenerateMemoriesResponse(BaseModel): + """Response for generate memories endpoint.""" + + success: bool + message: str + memory_count: int + image_count: int + memories: List[Memory] + + +class TimelineResponse(BaseModel): + """Response for timeline endpoint.""" + + success: bool + date_range: Dict[str, str] + memory_count: int + memories: List[Memory] + + +class OnThisDayResponse(BaseModel): + """Response for on-this-day endpoint.""" + + success: bool + today: str + years: List[int] + image_count: int + images: List[MemoryImage] + + +class LocationCluster(BaseModel): + """Location cluster with photo count.""" + + location_name: str + center_lat: float + center_lon: float + image_count: int + sample_images: List[MemoryImage] + + +class LocationsResponse(BaseModel): + """Response for locations endpoint.""" + + success: bool + location_count: int + locations: List[LocationCluster] + + +# API Endpoints + + +@router.post("/generate", response_model=GenerateMemoriesResponse) +def generate_memories( + location_radius_km: float = Query( + 5.0, ge=0.1, le=100, description="Location clustering radius in km" + ), + date_tolerance_days: int = Query( + 3, ge=1, le=30, description="Date tolerance in days" + ), + min_images: int = Query(2, ge=1, le=10, description="Minimum images per memory"), +): + """ + SIMPLIFIED: Generate memories from ALL images. + - GPS images → location-based memories + - Non-GPS images → monthly date-based memories + + Returns simple breakdown: {location_count, date_count, total} + """ + try: + logger.info( + f"Generating memories: radius={location_radius_km}km, date_tolerance={date_tolerance_days}days, min_images={min_images}" + ) + + # Fetch ALL images + from app.database.images import db_get_all_images_for_memories + + images = db_get_all_images_for_memories() + + if not images: + return GenerateMemoriesResponse( + success=True, + message="No images found", + memory_count=0, + image_count=0, + memories=[], + ) + + logger.info(f"Processing {len(images)} images") + + # Cluster into memories + clustering = MemoryClustering( + location_radius_km=location_radius_km, + date_tolerance_days=date_tolerance_days, + min_images_per_memory=min_images, + ) + + memories = clustering.cluster_memories(images) + + # Calculate breakdown + location_count = sum(1 for m in memories if m.get("type") == "location") + date_count = sum(1 for m in memories if m.get("type") == "date") + + logger.info( + f"Generated {len(memories)} memories (location: {location_count}, date: {date_count})" + ) + + return GenerateMemoriesResponse( + success=True, + message=f"{len(memories)} memories ({location_count} location, {date_count} date)", + memory_count=len(memories), + image_count=len(images), + memories=memories, + ) + + except Exception: + logger.error("Error generating memories", exc_info=True) + raise HTTPException(status_code=500, detail="Failed to generate memories") + + +@router.get("/timeline", response_model=TimelineResponse) +def get_timeline( + days: int = Query(365, ge=1, le=3650, description="Number of days to look back"), + location_radius_km: float = Query( + 5.0, ge=0.1, le=100, description="Location clustering radius in km" + ), + date_tolerance_days: int = Query( + 3, ge=1, le=30, description="Date tolerance in days" + ), +): + """ + Get memories from the past N days as a timeline. + + This endpoint: + 1. Calculates date range (today - N days to today) + 2. Fetches images within that date range + 3. Clusters them into memories + 4. Returns timeline of memories + + Args: + days: Number of days to look back (default: 365 = 1 year) + location_radius_km: Location clustering radius (default: 5km) + date_tolerance_days: Date tolerance for temporal clustering (default: 3) + + Returns: + TimelineResponse with memories ordered by date + + Raises: + HTTPException: If database query fails + """ + try: + # Calculate date range + end_date = datetime.now() + start_date = end_date - timedelta(days=days) + + logger.info(f"Getting timeline from {start_date.date()} to {end_date.date()}") + + # Fetch images within date range + images = db_get_images_by_date_range(start_date, end_date) + + if not images: + return TimelineResponse( + success=True, + date_range={ + "start": start_date.isoformat(), + "end": end_date.isoformat(), + }, + memory_count=0, + memories=[], + ) + + logger.info(f"Found {len(images)} images in date range") + + # Cluster into memories + clustering = MemoryClustering( + location_radius_km=location_radius_km, + date_tolerance_days=date_tolerance_days, + min_images_per_memory=1, # Allow single images in timeline + ) + + memories = clustering.cluster_memories(images) + + return TimelineResponse( + success=True, + date_range={"start": start_date.isoformat(), "end": end_date.isoformat()}, + memory_count=len(memories), + memories=memories, + ) + + except Exception: + logger.error("Error getting timeline", exc_info=True) + raise HTTPException(status_code=500, detail="Failed to get timeline") + + +@router.get("/on-this-day", response_model=OnThisDayResponse) +def get_on_this_day(): + """ + Get photos taken on this date in previous years. + + This endpoint: + 1. Gets current month and day + 2. Searches for images from this month-day in all previous years + 3. Groups by year + 4. Returns images sorted by year (most recent first) + + Returns: + OnThisDayResponse with images from this date in previous years + + Raises: + HTTPException: If database query fails + """ + try: + today = datetime.now() + current_month = today.month + current_day = today.day + + logger.info(f"Getting 'On This Day' for {today.strftime('%B %d')}") + + # Search for images from this month-day in past years + # Go back 10 years maximum + all_images = [] + years_found = [] + + for year_offset in range(1, 11): # 1-10 years ago + target_year = today.year - year_offset + + try: + images = db_get_images_by_year_month(target_year, current_month) + + # Filter to specific day - parse each image defensively + day_images = [] + for img in images: + captured_at_str = img.get("captured_at") + if not captured_at_str: + continue + + try: + # Strip trailing Z and parse ISO format + captured_at_str = captured_at_str.rstrip("Z") + captured_dt = datetime.fromisoformat(captured_at_str) + + # Only include if day matches + if captured_dt.day == current_day: + day_images.append(img) + except (ValueError, TypeError, AttributeError): + # Skip images with malformed dates + logger.debug( + f"Skipping image with invalid date: {captured_at_str}" + ) + continue + + if day_images: + all_images.extend(day_images) + years_found.append(target_year) + logger.info(f"Found {len(day_images)} images from {target_year}") + + except Exception as e: + logger.warning(f"Error querying year {target_year}: {e}") + continue + + # Sort by year (most recent first) + def parse_captured_at(img): + """Safely parse captured_at date, return datetime.min on failure.""" + captured_at = img.get("captured_at") + if not captured_at: + return datetime.min + try: + + if isinstance(captured_at, str): + captured_at = captured_at.rstrip("Z") + return datetime.fromisoformat(captured_at) + except (ValueError, TypeError, AttributeError): + return datetime.min + + all_images.sort(key=parse_captured_at, reverse=True) + + return OnThisDayResponse( + success=True, + today=today.strftime("%B %d"), + years=sorted(years_found, reverse=True), + image_count=len(all_images), + images=all_images, + ) + + except Exception: + logger.error("Error getting 'On This Day'", exc_info=True) + raise HTTPException(status_code=500, detail="Failed to get 'On This Day'") + + +@router.get("/locations", response_model=LocationsResponse) +def get_locations( + location_radius_km: float = Query( + 5.0, ge=0.1, le=100, description="Location clustering radius in km" + ), + max_sample_images: int = Query( + 5, ge=1, le=20, description="Max sample images per location" + ), +): + """ + Get all unique locations where photos were taken. + + This endpoint: + 1. Fetches all images with GPS coordinates + 2. Clusters them by location + 3. Returns location clusters with photo counts + 4. Includes sample images for each location + + Args: + location_radius_km: Location clustering radius (default: 5km) + max_sample_images: Maximum sample images per location (default: 5) + + Returns: + LocationsResponse with list of location clusters + + Raises: + HTTPException: If database query fails + """ + try: + logger.info(f"Getting locations with radius={location_radius_km}km") + + # Fetch all images with location data + images = db_get_images_with_location() + + if not images: + return LocationsResponse(success=True, location_count=0, locations=[]) + + logger.info(f"Found {len(images)} images with location data") + + # Cluster by location only (no date clustering) + clustering = MemoryClustering( + location_radius_km=location_radius_km, + date_tolerance_days=3, + min_images_per_memory=1, + ) + + # Get location clusters using public API + location_clusters = clustering.cluster_by_location_only(images) + + # Create location cluster objects + locations = [] + for cluster_images in location_clusters: + if not cluster_images: + continue + + # Calculate center + center_lat = sum(img["latitude"] for img in cluster_images) / len( + cluster_images + ) + center_lon = sum(img["longitude"] for img in cluster_images) / len( + cluster_images + ) + + # Get location name + location_name = clustering._reverse_geocode(center_lat, center_lon) + + # Get sample images (up to max_sample_images) + sample_images = cluster_images[:max_sample_images] + + locations.append( + LocationCluster( + location_name=location_name, + center_lat=center_lat, + center_lon=center_lon, + image_count=len(cluster_images), + sample_images=sample_images, + ) + ) + + # Sort by image count (most photos first) + locations.sort(key=lambda loc: loc.image_count, reverse=True) + + return LocationsResponse( + success=True, location_count=len(locations), locations=locations + ) + + except Exception: + logger.error("Error getting locations", exc_info=True) + raise HTTPException(status_code=500, detail="Failed to get locations") diff --git a/backend/app/routes/user_preferences.py b/backend/app/routes/user_preferences.py index 3a80d4464..678e8cfc0 100644 --- a/backend/app/routes/user_preferences.py +++ b/backend/app/routes/user_preferences.py @@ -8,7 +8,6 @@ ErrorResponse, ) - router = APIRouter() diff --git a/backend/app/utils/extract_location_metadata.py b/backend/app/utils/extract_location_metadata.py new file mode 100644 index 000000000..2eede996b --- /dev/null +++ b/backend/app/utils/extract_location_metadata.py @@ -0,0 +1,415 @@ +""" +Location and Datetime Metadata Extraction Utility + +This module extracts GPS coordinates and capture datetime from image metadata JSON +and populates the dedicated latitude, longitude, and captured_at columns in the database. + +Usage: + python -m app.utils.extract_location_metadata + +Author: PictoPy Team +Date: 2025-12-14 +""" + +import json +import sqlite3 +from datetime import datetime +from typing import Optional, Tuple, Dict, Any +from pathlib import Path + +from app.config.settings import DATABASE_PATH +from app.logging.setup_logging import get_logger + +# Initialize logger +logger = get_logger(__name__) + + +class MetadataExtractor: + """ + Extracts location and datetime information from image metadata JSON. + + This class provides utilities to safely parse metadata and extract: + - GPS coordinates (latitude, longitude) + - Capture datetime + """ + + def __init__(self): + """Initialize the metadata extractor.""" + self.stats = { + "total": 0, + "updated": 0, + "with_location": 0, + "with_datetime": 0, + "with_both": 0, + "skipped": 0, + "errors": 0, + } + + def extract_gps_coordinates( + self, metadata: Dict[str, Any] + ) -> Tuple[Optional[float], Optional[float]]: + """ + Extract GPS coordinates from metadata dictionary. + + Supports multiple metadata structures: + - Top-level: {"latitude": 28.6, "longitude": 77.2} + - Nested EXIF: {"exif": {"gps": {"latitude": 28.6, "longitude": 77.2}}} + - Alternative names: lat, lon, Latitude, Longitude + + Args: + metadata: Parsed metadata dictionary + + Returns: + Tuple of (latitude, longitude) or (None, None) if not found + + Validates: + - Latitude: -90 to 90 + - Longitude: -180 to 180 + """ + latitude = None + longitude = None + + try: + if not isinstance(metadata, dict): + return None, None + + # Method 1: Direct top-level fields + lat = metadata.get("latitude") + lon = metadata.get("longitude") + + # Method 2: Check nested 'exif' -> 'gps' structure + if not lat or not lon: + exif = metadata.get("exif", {}) + if isinstance(exif, dict): + gps = exif.get("gps", {}) + if isinstance(gps, dict): + lat = lat or gps.get("latitude") + lon = lon or gps.get("longitude") + + # Method 3: Check alternative field names + if not lat or not lon: + lat = lat or metadata.get("lat") or metadata.get("Latitude") + lon = lon or metadata.get("lon") or metadata.get("Longitude") + + # Validate and convert coordinates + if lat is not None and lon is not None: + try: + lat = float(lat) + lon = float(lon) + + # Sanity check: valid coordinate ranges + if -90 <= lat <= 90 and -180 <= lon <= 180: + latitude = lat + longitude = lon + else: + logger.warning( + f"Invalid coordinate range: lat={lat}, lon={lon}" + ) + except (ValueError, TypeError) as e: + logger.warning(f"Could not convert coordinates to float: {e}") + + except Exception as e: + logger.error(f"Unexpected error extracting GPS coordinates: {e}") + + return latitude, longitude + + def extract_datetime(self, metadata: Dict[str, Any]) -> Optional[datetime]: + """ + Extract capture datetime from metadata dictionary. + + Supports multiple datetime formats and field names: + - date_created, datetime, date_taken, timestamp, DateTime + - Nested: exif.datetime, exif.DateTimeOriginal + - Formats: ISO 8601, EXIF format (YYYY:MM:DD HH:MM:SS), etc. + + Args: + metadata: Parsed metadata dictionary + + Returns: + datetime object or None if not found/parseable + """ + captured_at = None + + try: + if not isinstance(metadata, dict): + return None + + # Method 1: Check common top-level field names + date_str = None + for field in [ + "date_created", + "datetime", + "date_taken", + "timestamp", + "DateTime", + ]: + if field in metadata: + date_str = metadata[field] + break + + # Method 2: Check nested 'exif' structure + if not date_str: + exif = metadata.get("exif", {}) + if isinstance(exif, dict): + date_str = ( + exif.get("datetime") + or exif.get("DateTime") + or exif.get("DateTimeOriginal") + or exif.get("DateTimeDigitized") + ) + + # Parse datetime string + if date_str: + date_str = str(date_str).strip() + + # Try multiple datetime formats + datetime_formats = [ + "%Y-%m-%d %H:%M:%S", # 2024-01-15 14:30:45 + "%Y:%m:%d %H:%M:%S", # 2024:01:15 14:30:45 (EXIF format) + "%Y-%m-%dT%H:%M:%S", # 2024-01-15T14:30:45 (ISO) + "%Y-%m-%dT%H:%M:%S.%f", # 2024-01-15T14:30:45.123456 + "%Y-%m-%d", # 2024-01-15 + "%d/%m/%Y %H:%M:%S", # 15/01/2024 14:30:45 + "%d/%m/%Y", # 15/01/2024 + "%m/%d/%Y %H:%M:%S", # 01/15/2024 14:30:45 + "%m/%d/%Y", # 01/15/2024 + ] + + # Try ISO format first (handles timezone) + if "T" in date_str: + try: + # Remove timezone suffix for simpler parsing + date_str_clean = ( + date_str.replace("Z", "").split("+")[0].split("-") + ) + # Rejoin only date-time parts (not timezone) + if len(date_str_clean) >= 3: + date_str_clean = "-".join(date_str_clean[:3]) + captured_at = datetime.fromisoformat(date_str_clean) + except Exception: + pass + + # Try other formats + if not captured_at: + for fmt in datetime_formats: + try: + captured_at = datetime.strptime(date_str, fmt) + break + except (ValueError, TypeError): + continue + + if not captured_at: + logger.warning(f"Could not parse datetime: {date_str}") + + except Exception as e: + logger.error(f"Unexpected error extracting datetime: {e}") + + return captured_at + + def extract_all( + self, metadata_json: str + ) -> Tuple[Optional[float], Optional[float], Optional[datetime]]: + """ + Extract GPS coordinates and datetime from metadata JSON string. + + Args: + metadata_json: JSON string from images.metadata column + + Returns: + Tuple of (latitude, longitude, captured_at) + """ + latitude = None + longitude = None + captured_at = None + + # Handle null/empty metadata + if not metadata_json or metadata_json == "null": + return None, None, None + + try: + # Parse JSON + if isinstance(metadata_json, bytes): + metadata_json = metadata_json.decode("utf-8") + + metadata = json.loads(metadata_json) + + # Extract GPS coordinates + latitude, longitude = self.extract_gps_coordinates(metadata) + + # Extract datetime + captured_at = self.extract_datetime(metadata) + + except json.JSONDecodeError as e: + logger.warning(f"Invalid JSON in metadata: {e}") + except Exception as e: + logger.error(f"Unexpected error parsing metadata: {e}") + + return latitude, longitude, captured_at + + def migrate_metadata(self) -> Dict[str, int]: + """ + Main migration function to populate latitude, longitude, and captured_at + columns for all images with metadata. + + This function: + 1. Connects to the database + 2. Retrieves all images with metadata + 3. Extracts GPS coordinates and datetime + 4. Updates the database with extracted values + 5. Reports statistics + + Returns: + Dictionary with migration statistics + """ + logger.info("=" * 70) + logger.info("Starting metadata extraction migration...") + logger.info("=" * 70) + + # Connect to database + conn = sqlite3.connect(DATABASE_PATH) + cursor = conn.cursor() + + try: + # Fetch all images with metadata + logger.info("Fetching images from database...") + cursor.execute("SELECT id, metadata FROM images WHERE metadata IS NOT NULL") + images = cursor.fetchall() + + self.stats["total"] = len(images) + logger.info(f"Found {self.stats['total']} images with metadata") + + if self.stats["total"] == 0: + logger.warning("No images found with metadata") + return self.stats + + # Process each image + updates = [] + for image_id, metadata_json in images: + try: + lat, lon, dt = self.extract_all(metadata_json) + + # Only update if we extracted something + if lat is not None or lon is not None or dt is not None: + updates.append( + { + "id": image_id, + "latitude": lat, + "longitude": lon, + "captured_at": dt, + } + ) + + # Track statistics + has_location = lat is not None and lon is not None + has_datetime = dt is not None + + if has_location: + self.stats["with_location"] += 1 + if has_datetime: + self.stats["with_datetime"] += 1 + if has_location and has_datetime: + self.stats["with_both"] += 1 + else: + self.stats["skipped"] += 1 + + except Exception as e: + self.stats["errors"] += 1 + logger.error(f"Error processing image {image_id}: {e}") + + # Batch update database + if updates: + logger.info(f"Updating {len(updates)} images...") + + for update_data in updates: + cursor.execute( + """ + UPDATE images + SET latitude = ?, + longitude = ?, + captured_at = ? + WHERE id = ? + """, + ( + update_data["latitude"], + update_data["longitude"], + update_data["captured_at"], + update_data["id"], + ), + ) + + conn.commit() + self.stats["updated"] = len(updates) + logger.info(f"Successfully updated {self.stats['updated']} images") + + # Print summary + self._print_summary() + + except Exception as e: + logger.error(f"Migration failed: {e}") + conn.rollback() + raise + + finally: + conn.close() + + return self.stats + + def _print_summary(self): + """Print migration summary statistics.""" + logger.info("\n" + "=" * 70) + logger.info("METADATA EXTRACTION SUMMARY") + logger.info("=" * 70) + logger.info(f"Total images processed: {self.stats['total']}") + logger.info(f"Images updated: {self.stats['updated']}") + logger.info( + f"Images with location data: {self.stats['with_location']} ({self._percentage('with_location')}%)" + ) + logger.info( + f"Images with datetime: {self.stats['with_datetime']} ({self._percentage('with_datetime')}%)" + ) + logger.info( + f"Images with both: {self.stats['with_both']} ({self._percentage('with_both')}%)" + ) + logger.info(f"Images skipped (no data): {self.stats['skipped']}") + logger.info(f"Errors encountered: {self.stats['errors']}") + logger.info("=" * 70) + + def _percentage(self, key: str) -> str: + """Calculate percentage for a statistic.""" + if self.stats["total"] == 0: + return "0.0" + return f"{(self.stats[key] / self.stats['total'] * 100):.1f}" + + +def main(): + """ + Main entry point for the metadata extraction script. + + Usage: + python -m app.utils.extract_location_metadata + """ + try: + # Check if database exists + if not Path(DATABASE_PATH).exists(): + logger.error(f"Database not found at: {DATABASE_PATH}") + return + + # Create extractor and run migration + extractor = MetadataExtractor() + stats = extractor.migrate_metadata() + + # Exit with appropriate code + if stats["errors"] > 0: + logger.warning("Migration completed with errors") + exit(1) + else: + logger.info("✅ Migration completed successfully!") + exit(0) + + except Exception as e: + logger.error(f"❌ Migration failed: {e}") + exit(1) + + +if __name__ == "__main__": + main() diff --git a/backend/app/utils/images.py b/backend/app/utils/images.py index c3b202205..ccf65cdf8 100644 --- a/backend/app/utils/images.py +++ b/backend/app/utils/images.py @@ -19,6 +19,7 @@ from app.models.FaceDetector import FaceDetector from app.models.ObjectClassifier import ObjectClassifier from app.logging.setup_logging import get_logger +from app.utils.extract_location_metadata import MetadataExtractor logger = get_logger(__name__) @@ -141,6 +142,7 @@ def image_util_prepare_image_records( ) -> List[Dict]: """ Prepare image records with thumbnails for database insertion. + Automatically extracts GPS coordinates and capture datetime from metadata. Args: image_files: List of image file paths @@ -150,6 +152,8 @@ def image_util_prepare_image_records( List of image record dictionaries ready for database insertion """ image_records = [] + extractor = MetadataExtractor() + for image_path in image_files: folder_id = image_util_find_folder_id_for_image(image_path, folder_path_to_id) @@ -166,16 +170,50 @@ def image_util_prepare_image_records( if image_util_generate_thumbnail(image_path, thumbnail_path): metadata = image_util_extract_metadata(image_path) logger.debug(f"Extracted metadata for {image_path}: {metadata}") - image_records.append( - { - "id": image_id, - "path": image_path, - "folder_id": folder_id, - "thumbnailPath": thumbnail_path, - "metadata": json.dumps(metadata), - "isTagged": False, - } - ) + + # Automatically extract GPS coordinates and datetime from metadata + # Don't fail upload if extraction fails + metadata_json = json.dumps(metadata) + latitude, longitude, captured_at = None, None, None + + try: + latitude, longitude, captured_at = extractor.extract_all(metadata_json) + + # Log GPS extraction results + if latitude and longitude: + logger.info( + f"GPS extracted for {os.path.basename(image_path)}: ({latitude}, {longitude})" + ) + if captured_at: + logger.debug( + f"Date extracted for {os.path.basename(image_path)}: {captured_at}" + ) + except Exception as e: + logger.warning( + f"GPS extraction failed for {os.path.basename(image_path)}: {e}" + ) + # Continue without GPS - don't fail the upload + + # Build image record with GPS data + # ALWAYS include latitude, longitude, captured_at (even if None) + # to satisfy SQL INSERT statement named parameters + image_record = { + "id": image_id, + "path": image_path, + "folder_id": folder_id, + "thumbnailPath": thumbnail_path, + "metadata": metadata_json, + "isTagged": False, + "latitude": latitude, # Can be None + "longitude": longitude, # Can be None + "captured_at": ( + captured_at.isoformat() + if isinstance(captured_at, datetime.datetime) and captured_at + else captured_at + ), # Can be None + } + + image_records.append(image_record) return image_records diff --git a/backend/app/utils/memory_clustering.py b/backend/app/utils/memory_clustering.py new file mode 100644 index 000000000..79928968b --- /dev/null +++ b/backend/app/utils/memory_clustering.py @@ -0,0 +1,946 @@ +""" +Memory Clustering Algorithm + +This module groups images into "memories" based on spatial proximity (location) +and temporal proximity (date/time). Uses DBSCAN for spatial clustering and +date-based grouping for temporal clustering. + +A "memory" is a collection of photos taken at the same place around the same time. + +Author: PictoPy Team +Date: 2025-12-14 +""" + +from datetime import datetime +from typing import List, Dict, Any, Optional +from collections import defaultdict +import hashlib + +import numpy as np +from sklearn.cluster import DBSCAN + +from app.logging.setup_logging import get_logger + +# Initialize logger +logger = get_logger(__name__) + + +# ============================================================================ +# City Coordinate Mapping for Reverse Geocoding +# ============================================================================ + +# Major city coordinates for approximate reverse geocoding +CITY_COORDINATES = { + # India - Major Cities + "Jaipur, Rajasthan": (26.9124, 75.7873), + "Delhi, India": (28.7041, 77.1025), + "Mumbai, Maharashtra": (19.0760, 72.8777), + "Bangalore, Karnataka": (12.9716, 77.5946), + "Kolkata, West Bengal": (22.5726, 88.3639), + "Chennai, Tamil Nadu": (13.0827, 80.2707), + "Hyderabad, Telangana": (17.3850, 78.4867), + "Pune, Maharashtra": (18.5204, 73.8567), + "Ahmedabad, Gujarat": (23.0225, 72.5714), + "Goa, India": (15.2993, 74.1240), + "Agra, Uttar Pradesh": (27.1767, 78.0081), + "Udaipur, Rajasthan": (24.5854, 73.7125), + "Jaisalmer, Rajasthan": (26.9157, 70.9083), + "Varanasi, Uttar Pradesh": (25.3176, 82.9739), + "Rishikesh, Uttarakhand": (30.0869, 78.2676), + "Shimla, Himachal Pradesh": (31.1048, 77.1734), + "Manali, Himachal Pradesh": (32.2432, 77.1892), + "Darjeeling, West Bengal": (27.0410, 88.2663), + "Ooty, Tamil Nadu": (11.4102, 76.6950), + "Coorg, Karnataka": (12.3375, 75.8069), + # International - Major Tourist Destinations + "Paris, France": (48.8566, 2.3522), + "London, UK": (51.5074, -0.1278), + "New York, USA": (40.7128, -74.0060), + "Tokyo, Japan": (35.6762, 139.6503), + "Dubai, UAE": (25.2048, 55.2708), + "Singapore": (1.3521, 103.8198), + "Bangkok, Thailand": (13.7563, 100.5018), + "Bali, Indonesia": (-8.4095, 115.1889), + "Sydney, Australia": (-33.8688, 151.2093), + "Rome, Italy": (41.9028, 12.4964), +} + + +def find_nearest_city( + latitude: float, longitude: float, max_distance_km: float = 50.0 +) -> Optional[str]: + """ + Find the nearest known city to given coordinates. + + Args: + latitude: GPS latitude + longitude: GPS longitude + max_distance_km: Maximum distance to consider (default: 50km) + + Returns: + City name if within range, None otherwise + """ + from math import radians, cos, sin, asin, sqrt + + def haversine_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> float: + """Calculate distance between two points in km using Haversine formula.""" + lat1, lon1, lat2, lon2 = map(radians, [lat1, lon1, lat2, lon2]) + dlat = lat2 - lat1 + dlon = lon2 - lon1 + a = sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2 + c = 2 * asin(sqrt(a)) + km = 6371 * c # Radius of Earth in km + return km + + nearest_city = None + min_distance = float("inf") + + for city_name, (city_lat, city_lon) in CITY_COORDINATES.items(): + distance = haversine_distance(latitude, longitude, city_lat, city_lon) + if distance < min_distance and distance <= max_distance_km: + min_distance = distance + nearest_city = city_name + + return nearest_city + + +class MemoryClustering: + """ + Clusters images into memories based on location and time proximity. + + Algorithm: + 1. Spatial clustering: Group images by GPS coordinates using DBSCAN + 2. Temporal clustering: Within each location cluster, group by date + 3. Memory creation: Generate memory objects with metadata + + Parameters: + location_radius_km: Maximum distance between photos in the same location (default: 5km) + date_tolerance_days: Maximum days between photos in the same memory (default: 3) + min_images_per_memory: Minimum images required to form a memory (default: 2) + """ + + def __init__( + self, + location_radius_km: float = 5.0, + date_tolerance_days: int = 3, + min_images_per_memory: int = 2, + ): + """Initialize the memory clustering algorithm.""" + self.location_radius_km = location_radius_km + self.date_tolerance_days = date_tolerance_days + self.min_images_per_memory = min_images_per_memory + + # Convert km to radians for DBSCAN with haversine metric + # Earth radius in kilometers + EARTH_RADIUS_KM = 6371.0 + self.location_eps_radians = location_radius_km / EARTH_RADIUS_KM + + logger.info( + f"MemoryClustering initialized: radius={location_radius_km}km, date_tolerance={date_tolerance_days}days, min_images={min_images_per_memory}" + ) + + def cluster_memories(self, images: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + """ + FLEXIBLE: Cluster ALL images into memories. + - Has GPS + Date: Cluster by location using DBSCAN, then by date within each location + - Has GPS only: Cluster by location using DBSCAN + - Has Date only: Group by month (if ≥ min_images_per_memory photos per month; default 2) + - Has neither: Skip (can't create meaningful memory) + + Images work with EITHER date OR location - not both required! + + Args: + images: List of image dicts with id, path, thumbnailPath, + latitude, longitude, captured_at + + Returns: + List of memories with type='location' or type='date' + """ + logger.info(f"Starting flexible clustering for {len(images)} images") + + if not images: + return [] + + try: + # Separate images by what data they have + gps_images = [] + date_only_images = [] + skipped_count = 0 + + for img in images: + has_gps = ( + img.get("latitude") is not None and img.get("longitude") is not None + ) + has_date = img.get("captured_at") + + if has_gps: + # Has GPS (with or without date) → location-based clustering + gps_images.append(img) + elif has_date: + # Has date but no GPS → date-based grouping + date_only_images.append(img) + else: + # Has neither GPS nor date → skip + skipped_count += 1 + + logger.info( + f"GPS-based: {len(gps_images)}, Date-only: {len(date_only_images)}, Skipped: {skipped_count}" + ) + + memories = [] + + # Process location-based memories (these may also have dates) + if gps_images: + location_memories = self._cluster_location_images(gps_images) + memories.extend(location_memories) + + # Process date-only memories (no GPS) + if date_only_images: + date_memories = self._cluster_date_images(date_only_images) + memories.extend(date_memories) + + # Sort by date descending + memories.sort(key=lambda m: m.get("date_start") or "", reverse=True) + + logger.info(f"Generated {len(memories)} total memories") + return memories + + except Exception as e: + logger.error(f"Clustering failed: {e}", exc_info=True) + return [] + + def cluster_by_location_only( + self, images: List[Dict[str, Any]] + ) -> List[List[Dict[str, Any]]]: + """ + Public API: Cluster images by location only, without temporal grouping. + + Args: + images: List of image dictionaries with GPS coordinates + + Returns: + List of location clusters (each cluster is a list of images) + """ + try: + valid_images = self._filter_valid_images(images) + if not valid_images: + return [] + + return self._cluster_by_location(valid_images) + except Exception as e: + logger.error(f"Location-only clustering failed: {e}", exc_info=True) + return [] + + def _cluster_location_images( + self, images: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: + """ + SIMPLIFIED: Use existing DBSCAN clustering for GPS images. + """ + try: + valid_images = self._filter_valid_images(images) + if not valid_images: + return [] + + location_clusters = self._cluster_by_location(valid_images) + memories = [] + + for cluster in location_clusters: + temporal_clusters = self._cluster_by_date(cluster) + for temp_cluster in temporal_clusters: + if len(temp_cluster) >= self.min_images_per_memory: + memory = self._create_simple_memory( + temp_cluster, memory_type="location" + ) + if memory is not None: + memories.append(memory) + + return memories + except Exception as e: + logger.error(f"Location clustering failed: {e}") + return [] + + def _cluster_date_images( + self, images: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: + """ + FLEXIBLE: Group date-only images by year-month. + Uses min_images_per_memory (default: 2) as threshold. + """ + try: + # Group by year-month + monthly_groups = defaultdict(list) + + for img in images: + captured_at = img.get("captured_at") + if not captured_at: + continue + + # Parse date + if isinstance(captured_at, str): + try: + dt = datetime.fromisoformat(captured_at.replace("Z", "")) + except (ValueError, AttributeError): + continue + elif isinstance(captured_at, datetime): + dt = captured_at + else: + continue + + # Group by year-month + month_key = dt.strftime("%Y-%m") + monthly_groups[month_key].append(img) + + # Create memories for months with enough photos (uses min_images_per_memory) + memories = [] + for month_key, month_images in monthly_groups.items(): + if len(month_images) >= self.min_images_per_memory: + memory = self._create_simple_memory( + month_images, memory_type="date" + ) + if memory: + memories.append(memory) + + return memories + except Exception as e: + logger.error(f"Date clustering failed: {e}") + return [] + + def _create_simple_memory( + self, images: List[Dict[str, Any]], memory_type: str = "location" + ) -> Dict[str, Any]: + """ + SIMPLIFIED: Create a memory object with minimal fields. + Ensures all datetime objects are converted to ISO strings. + """ + try: + # Convert datetime objects to ISO strings in images + cleaned_images = [] + for img in images: + img_copy = img.copy() + if img_copy.get("captured_at") and isinstance( + img_copy["captured_at"], datetime + ): + img_copy["captured_at"] = img_copy["captured_at"].isoformat() + cleaned_images.append(img_copy) + + # Sort by date + sorted_images = sorted( + cleaned_images, key=lambda x: x.get("captured_at") or "" + ) + + # Get date range + dates = [ + img.get("captured_at") + for img in sorted_images + if img.get("captured_at") + ] + if dates: + if isinstance(dates[0], str): + dates = [datetime.fromisoformat(d.replace("Z", "")) for d in dates] + date_start = min(dates).isoformat() + date_end = max(dates).isoformat() + date_obj = min(dates) + else: + date_start = date_end = None + date_obj = None + + # Simple titles + if memory_type == "location": + # Calculate center first + lats = [ + img["latitude"] for img in images if img.get("latitude") is not None + ] + lons = [ + img["longitude"] + for img in images + if img.get("longitude") is not None + ] + center_lat = np.mean(lats) if lats else 0 + center_lon = np.mean(lons) if lons else 0 + + # Get actual location name using reverse geocoding + location_name = self._reverse_geocode(center_lat, center_lon) + + # Create title based on date range + if len(dates) > 1: + # Multiple dates: show date range + start_date = min(dates) + end_date = max(dates) + if start_date.strftime("%B %Y") == end_date.strftime("%B %Y"): + # Same month: "Jaipur in Nov 2025" + title = f"{location_name} in {start_date.strftime('%b %Y')}" + else: + # Different months: "Jaipur - Nov-Dec 2025" or "Jaipur - Nov 2025 to Jan 2026" + if start_date.year == end_date.year: + title = f"{location_name} - {start_date.strftime('%b')}-{end_date.strftime('%b %Y')}" + else: + title = f"{location_name} - {start_date.strftime('%b %Y')} to {end_date.strftime('%b %Y')}" + else: + # Single date or no dates: just the location name + title = location_name + else: + # Date-based: "Month Year" + if date_obj: + title = date_obj.strftime("%B %Y") + else: + title = "Undated Photos" + location_name = "" + center_lat = 0 + center_lon = 0 + + # Create memory - use _generate_memory_id for unique IDs + memory_id = self._generate_memory_id(center_lat, center_lon, date_obj) + + return { + "memory_id": memory_id, + "title": title, + "description": f"{len(images)} photos", + "location_name": location_name, + "date_start": date_start, + "date_end": date_end, + "image_count": len(images), + "images": sorted_images, + "thumbnail_image_id": sorted_images[0].get("id", ""), + "center_lat": center_lat, + "center_lon": center_lon, + "type": memory_type, # Add type field + } + except Exception as e: + logger.error(f"Memory creation failed: {e}") + return None + + def _cluster_gps_based_memories( + self, images: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: + """ + Cluster images with GPS data into location-based memories. + This is the original clustering logic. + + Args: + images: List of images with GPS coordinates + + Returns: + List of location-based memories + """ + # Filter images with valid location data + valid_images = self._filter_valid_images(images) + + if not valid_images: + logger.warning("No images with valid location data") + return [] + + logger.info(f"Processing {len(valid_images)} GPS images") + + # Step 1: Cluster by location (spatial) + location_clusters = self._cluster_by_location(valid_images) + logger.info(f"Created {len(location_clusters)} location clusters") + + # Step 2: Within each location cluster, cluster by date (temporal) + memories = [] + for location_cluster in location_clusters: + temporal_clusters = self._cluster_by_date(location_cluster) + + # Step 3: Create memory objects + for temporal_cluster in temporal_clusters: + if len(temporal_cluster) >= self.min_images_per_memory: + memory = self._create_memory(temporal_cluster) + memories.append(memory) + + return memories + + def _cluster_date_based_memories( + self, images: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: + """ + Cluster images WITHOUT GPS data into date-based memories. + Groups photos by capture date/time only (screenshots, downloads, edits, etc.) + + Args: + images: List of images without GPS coordinates + + Returns: + List of date-based memories + """ + logger.info(f"Clustering {len(images)} non-GPS images by date") + + # Parse and filter images with valid dates + valid_images = [] + for img in images: + img_copy = img.copy() + captured_at = img_copy.get("captured_at") + + if captured_at: + if isinstance(captured_at, str): + try: + captured_at = datetime.fromisoformat( + captured_at.replace("Z", "") + ) + img_copy["captured_at"] = captured_at + except Exception: + # Try alternative formats + for fmt in [ + "%Y-%m-%d %H:%M:%S", + "%Y:%m:%d %H:%M:%S", + "%Y-%m-%d", + ]: + try: + captured_at = datetime.strptime(captured_at, fmt) + img_copy["captured_at"] = captured_at + break + except Exception: + continue + else: + logger.debug( + f"Could not parse date for image {img.get('id')}" + ) + continue + elif isinstance(captured_at, datetime): + img_copy["captured_at"] = captured_at + + valid_images.append(img_copy) + + if not valid_images: + logger.warning("No non-GPS images with valid dates") + return [] + + logger.info(f"Found {len(valid_images)} non-GPS images with valid dates") + + # Sort by date + valid_images.sort(key=lambda x: x["captured_at"]) + + # Group by date tolerance + clusters = [] + current_cluster = [valid_images[0]] + + for i in range(1, len(valid_images)): + prev_date = valid_images[i - 1]["captured_at"] + curr_date = valid_images[i]["captured_at"] + + # Check if within tolerance + date_diff = abs((curr_date - prev_date).days) + + if date_diff <= self.date_tolerance_days: + current_cluster.append(valid_images[i]) + else: + # Create memory from current cluster if it meets min size + if len(current_cluster) >= self.min_images_per_memory: + clusters.append(current_cluster) + # Start new cluster + current_cluster = [valid_images[i]] + + # Add last cluster if it meets min size + if current_cluster and len(current_cluster) >= self.min_images_per_memory: + clusters.append(current_cluster) + + logger.info(f"Created {len(clusters)} date-based clusters") + + # Create memory objects + memories = [] + for cluster in clusters: + memory = self._create_date_based_memory(cluster) + memories.append(memory) + + return memories + + def _create_date_based_memory(self, images: List[Dict[str, Any]]) -> Dict[str, Any]: + """ + Create a date-based memory object for images without GPS. + + Args: + images: List of image dictionaries in the cluster (no GPS) + + Returns: + Memory dictionary with metadata + """ + # Get date range + dates = [img["captured_at"] for img in images if img.get("captured_at")] + date_start = min(dates) if dates else None + date_end = max(dates) if dates else None + + # Generate title for date-based memory + if date_start: + if date_start.date() == date_end.date(): + title = date_start.strftime("%B %d, %Y") + else: + days = (date_end - date_start).days + 1 + if days <= 7: + title = date_start.strftime("%B %d, %Y") + elif days <= 31: + title = date_start.strftime("%B %Y") + else: + title = ( + date_start.strftime("%B - %B %Y") + if date_start.month != date_end.month + else date_start.strftime("%B %Y") + ) + else: + title = "Memories Collection" + + # Generate description + description = self._generate_description(len(images), date_start, date_end) + + # Select thumbnail (middle image) + thumbnail_idx = len(images) // 2 + thumbnail_image_id = images[thumbnail_idx]["id"] + + # Create memory ID (use timestamp only) + if date_start: + memory_id = f"mem_date_{date_start.strftime('%Y%m%d')}" + else: + # Deterministic hash of first 5 image IDs + image_ids = "|".join(img["id"] for img in images[:5]) + hash_digest = hashlib.sha256(image_ids.encode()).hexdigest()[:8] + memory_id = f"mem_date_unknown_{hash_digest}" + + # Convert captured_at datetime objects to ISO strings + serialized_images = [] + for img in images: + img_copy = img.copy() + if img_copy.get("captured_at") and isinstance( + img_copy["captured_at"], datetime + ): + img_copy["captured_at"] = img_copy["captured_at"].isoformat() + serialized_images.append(img_copy) + + return { + "memory_id": memory_id, + "title": title, + "description": description, + "location_name": "Date-Based Memory", # Identifier for non-GPS memories + "date_start": date_start.isoformat() if date_start else None, + "date_end": date_end.isoformat() if date_end else None, + "image_count": len(images), + "images": serialized_images, + "thumbnail_image_id": thumbnail_image_id, + "center_lat": 0.0, # No GPS data + "center_lon": 0.0, # No GPS data + } + + def _filter_valid_images( + self, images: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: + """ + Filter images that have valid location and datetime data. + + Args: + images: List of image dictionaries + + Returns: + List of valid images with parsed datetime objects + """ + valid_images = [] + + for img in images: + try: + # Check for required fields + if img.get("latitude") is None or img.get("longitude") is None: + continue + + # Parse captured_at if it's a string + captured_at = img.get("captured_at") + img_copy = img.copy() + + if captured_at: + if isinstance(captured_at, str): + try: + # SQLite returns ISO format: "YYYY-MM-DDTHH:MM:SS" + captured_at = datetime.fromisoformat( + captured_at.replace("Z", "") + ) + img_copy["captured_at"] = captured_at + except Exception: + # Try alternative formats + for fmt in [ + "%Y-%m-%d %H:%M:%S", + "%Y:%m:%d %H:%M:%S", + "%Y-%m-%d", + ]: + try: + captured_at = datetime.strptime(captured_at, fmt) + img_copy["captured_at"] = captured_at + break + except Exception: + continue + else: + # Could not parse date, but location is still valid + logger.debug( + f"Could not parse date for image {img.get('id')}: {captured_at}" + ) + # Clear the unparseable string to prevent downstream errors + img_copy["captured_at"] = None + elif isinstance(captured_at, datetime): + img_copy["captured_at"] = captured_at + + valid_images.append(img_copy) + + except Exception as e: + logger.warning(f"Error filtering image {img.get('id')}: {e}") + continue + + return valid_images + + def _cluster_by_location( + self, images: List[Dict[str, Any]] + ) -> List[List[Dict[str, Any]]]: + """ + Cluster images by geographic location using DBSCAN. + + Args: + images: List of image dictionaries with latitude/longitude + + Returns: + List of location clusters (each cluster is a list of images) + """ + if not images: + return [] + + # Extract coordinates + coordinates = np.array([[img["latitude"], img["longitude"]] for img in images]) + + # Convert to radians for haversine metric + coordinates_rad = np.radians(coordinates) + + # Apply DBSCAN clustering + # eps: maximum distance between two samples (in radians for haversine) + # min_samples: minimum number of samples to form a cluster + clustering = DBSCAN( + eps=self.location_eps_radians, + min_samples=1, # Even single photos can form a cluster + metric="haversine", # Use haversine distance for lat/lon + algorithm="ball_tree", + ) + + labels = clustering.fit_predict(coordinates_rad) + + # Group images by cluster label + clusters = defaultdict(list) + for idx, label in enumerate(labels): + if label != -1: # -1 is noise in DBSCAN + clusters[label].append(images[idx]) + + # Noise points (label -1) each become their own cluster + for idx, label in enumerate(labels): + if label == -1: + clusters[f"noise_{idx}"].append(images[idx]) + + return list(clusters.values()) + + def _cluster_by_date( + self, images: List[Dict[str, Any]] + ) -> List[List[Dict[str, Any]]]: + """ + Cluster images by date within a location cluster. + + Groups images that were taken within date_tolerance_days of each other. + + Args: + images: List of image dictionaries with captured_at datetime + + Returns: + List of temporal clusters (each cluster is a list of images) + """ + if not images: + return [] + + # Sort by date + sorted_images = sorted( + [img for img in images if img.get("captured_at")], + key=lambda x: x["captured_at"], + ) + + # Images without dates go into a separate cluster + no_date_images = [img for img in images if not img.get("captured_at")] + + if not sorted_images: + return [no_date_images] if no_date_images else [] + + # Group by date tolerance + clusters = [] + current_cluster = [sorted_images[0]] + + for i in range(1, len(sorted_images)): + prev_date = sorted_images[i - 1]["captured_at"] + curr_date = sorted_images[i]["captured_at"] + + # Check if within tolerance + date_diff = abs((curr_date - prev_date).days) + + if date_diff <= self.date_tolerance_days: + current_cluster.append(sorted_images[i]) + else: + # Start new cluster + clusters.append(current_cluster) + current_cluster = [sorted_images[i]] + + # Add last cluster + if current_cluster: + clusters.append(current_cluster) + + # Add no-date images as separate cluster if exists + if no_date_images: + clusters.append(no_date_images) + + return clusters + + def _create_memory(self, images: List[Dict[str, Any]]) -> Dict[str, Any]: + """ + Create a memory object from a cluster of images. + + Args: + images: List of image dictionaries in the cluster + + Returns: + Memory dictionary with metadata + """ + # Calculate center coordinates + center_lat = np.mean([img["latitude"] for img in images]) + center_lon = np.mean([img["longitude"] for img in images]) + + # Get date range + dates = [img["captured_at"] for img in images if img.get("captured_at")] + if dates: + date_start = min(dates) + date_end = max(dates) + else: + date_start = None + date_end = None + + # Get location name + location_name = self._reverse_geocode(center_lat, center_lon) + + # Generate title + title = self._generate_title(location_name, date_start, len(images)) + + # Generate description + description = self._generate_description(len(images), date_start, date_end) + + # Select thumbnail (first image or middle image) + thumbnail_idx = len(images) // 2 + thumbnail_image_id = images[thumbnail_idx]["id"] + + # Create memory ID (use timestamp + location hash) + memory_id = self._generate_memory_id(center_lat, center_lon, date_start) + + # Convert captured_at datetime objects to ISO strings for all images + serialized_images = [] + for img in images: + img_copy = img.copy() + if img_copy.get("captured_at") and isinstance( + img_copy["captured_at"], datetime + ): + img_copy["captured_at"] = img_copy["captured_at"].isoformat() + serialized_images.append(img_copy) + + return { + "memory_id": memory_id, + "title": title, + "description": description, + "location_name": location_name, + "date_start": date_start.isoformat() if date_start else None, + "date_end": date_end.isoformat() if date_end else None, + "image_count": len(images), + "images": serialized_images, + "thumbnail_image_id": thumbnail_image_id, + "center_lat": float(center_lat), + "center_lon": float(center_lon), + } + + def _reverse_geocode(self, latitude: float, longitude: float) -> str: + """ + Convert GPS coordinates to a human-readable location name. + + Uses city coordinate mapping for major cities, falls back to coordinates. + + Args: + latitude: GPS latitude + longitude: GPS longitude + + Returns: + Location string (e.g., "Jaipur, Rajasthan" or formatted coordinates) + """ + # Try to find nearest known city + city_name = find_nearest_city(latitude, longitude, max_distance_km=50.0) + + if city_name: + logger.debug( + f"Mapped coordinates ({latitude:.4f}, {longitude:.4f}) to {city_name}" + ) + return city_name + + # Fallback: Return formatted coordinates + return f"{latitude:.4f}°, {longitude:.4f}°" + + def _generate_title( + self, location_name: str, date: Optional[datetime], image_count: int + ) -> str: + """ + Generate a title for the memory. + + Args: + location_name: Human-readable location + date: Date of the memory + image_count: Number of images + + Returns: + Title string + """ + if date: + month_year = date.strftime("%B %Y") + return f"{location_name} - {month_year}" + else: + return f"{location_name} - {image_count} photos" + + def _generate_description( + self, + image_count: int, + date_start: Optional[datetime], + date_end: Optional[datetime], + ) -> str: + """ + Generate a description for the memory. + + Args: + image_count: Number of images + date_start: Start date + date_end: End date + + Returns: + Description string + """ + if date_start and date_end: + if date_start.date() == date_end.date(): + return f"{image_count} photos from {date_start.strftime('%B %d, %Y')}" + else: + days = (date_end - date_start).days + 1 + return f"{image_count} photos over {days} days ({date_start.strftime('%b %d')} - {date_end.strftime('%b %d, %Y')})" + else: + return f"{image_count} photos" + + def _generate_memory_id( + self, latitude: float, longitude: float, date: Optional[datetime] + ) -> str: + """ + Generate a unique ID for the memory. + + Args: + latitude: Center latitude + longitude: Center longitude + date: Date of memory + + Returns: + Unique memory ID + """ + # Create deterministic hash from location and date + lat_rounded = round(latitude, 2) + lon_rounded = round(longitude, 2) + + if date: + date_str = date.strftime("%Y%m%d") + hash_input = f"lat:{lat_rounded}|lon:{lon_rounded}|date:{date_str}" + hash_digest = hashlib.sha256(hash_input.encode()).hexdigest()[:8] + return f"mem_{date_str}_{hash_digest}" + else: + hash_input = f"lat:{lat_rounded}|lon:{lon_rounded}" + hash_digest = hashlib.sha256(hash_input.encode()).hexdigest()[:8] + return f"mem_nodate_{hash_digest}" diff --git a/backend/app/utils/memory_monitor.py b/backend/app/utils/memory_monitor.py index 8078f4b41..c60c4a159 100644 --- a/backend/app/utils/memory_monitor.py +++ b/backend/app/utils/memory_monitor.py @@ -35,11 +35,7 @@ def wrapper(*args, **kwargs): # Log memory usage logger.info( - f"Memory usage for {func.__name__}:\n" - f" Before: {mem_before:.2f}MB\n" - f" After: {mem_after:.2f}MB\n" - f" Difference: {mem_after - mem_before:.2f}MB\n" - f" Execution time: {(end_time - start_time)*1000:.2f}ms" + f"Memory usage for {func.__name__}:\n Before: {mem_before:.2f}MB\n After: {mem_after:.2f}MB\n Difference: {mem_after - mem_before:.2f}MB\n Execution time: {(end_time - start_time) * 1000:.2f}ms" ) return result diff --git a/backend/app/utils/verify_memories_setup.py b/backend/app/utils/verify_memories_setup.py new file mode 100644 index 000000000..c803bb600 --- /dev/null +++ b/backend/app/utils/verify_memories_setup.py @@ -0,0 +1,297 @@ +""" +Verification script for Memories feature setup. +Checks all dependencies, database schema, file structure, and API routes. + +Usage: + python -m app.utils.verify_memories_setup +""" + +import sys +import sqlite3 +import importlib +from pathlib import Path + + +# ANSI color codes for terminal output +class Colors: + GREEN = "\033[92m" + RED = "\033[91m" + YELLOW = "\033[93m" + BLUE = "\033[94m" + BOLD = "\033[1m" + RESET = "\033[0m" + + +def print_header(text): + """Print section header""" + print(f"\n{Colors.BOLD}{Colors.BLUE}{'=' * 60}{Colors.RESET}") + print(f"{Colors.BOLD}{Colors.BLUE}{text}{Colors.RESET}") + print(f"{Colors.BOLD}{Colors.BLUE}{'=' * 60}{Colors.RESET}\n") + + +def print_success(text): + """Print success message""" + print(f"{Colors.GREEN}✓ {text}{Colors.RESET}") + + +def print_error(text): + """Print error message""" + print(f"{Colors.RED}✗ {text}{Colors.RESET}") + + +def print_warning(text): + """Print warning message""" + print(f"{Colors.YELLOW}⚠ {text}{Colors.RESET}") + + +def print_info(text): + """Print info message""" + print(f" {text}") + + +def check_dependencies(): + """Check if all required packages are installed""" + print_header("1. Checking Python Dependencies") + + required_packages = { + "numpy": "1.26.4", + "sklearn": "1.5.1", # scikit-learn imports as sklearn + "fastapi": "0.111.0", + "sqlalchemy": None, + "pydantic": None, + } + + all_installed = True + + for package, expected_version in required_packages.items(): + try: + module = importlib.import_module(package) + version = getattr(module, "__version__", "Unknown") + + if expected_version and version != expected_version: + print_warning( + f"{package} installed (v{version}), expected v{expected_version}" + ) + else: + print_success(f"{package} v{version}") + except ImportError: + print_error(f"{package} is NOT installed") + all_installed = False + + return all_installed + + +def check_file_structure(): + """Check if all required files exist""" + print_header("2. Checking File Structure") + + backend_path = Path(__file__).parent.parent.parent + + required_files = [ + "app/utils/extract_location_metadata.py", + "app/utils/memory_clustering.py", + "app/routes/memories.py", + "app/database/images.py", + "main.py", + ] + + all_exist = True + + for file_path in required_files: + full_path = backend_path / file_path + if full_path.exists(): + print_success(f"{file_path}") + print_info(f" → {full_path}") + else: + print_error(f"{file_path} NOT FOUND") + all_exist = False + + return all_exist + + +def check_database_schema(): + """Check if database has required columns and indexes""" + print_header("3. Checking Database Schema") + + backend_path = Path(__file__).parent.parent.parent + db_path = backend_path / "app" / "database" / "PictoPy.db" + + if not db_path.exists(): + print_warning("Database file 'gallery.db' not found") + print_info(" → Database will be created on first run") + return None # Not an error, just not initialized yet + + try: + conn = sqlite3.connect(str(db_path)) + cursor = conn.cursor() + + # Check if images table exists + cursor.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND name='images'" + ) + if not cursor.fetchone(): + print_error("Table 'images' does not exist") + conn.close() + return False + + print_success("Table 'images' exists") + + # Check for required columns + cursor.execute("PRAGMA table_info(images)") + columns = {row[1]: row[2] for row in cursor.fetchall()} + + required_columns = { + "latitude": "FLOAT", + "longitude": "FLOAT", + "captured_at": "DATETIME", + } + + all_columns_exist = True + for col_name, col_type in required_columns.items(): + if col_name in columns: + print_success(f"Column '{col_name}' ({columns[col_name]})") + else: + print_error(f"Column '{col_name}' NOT FOUND") + print_info(" → Run migration: python migrate_add_memories_columns.py") + print_info(" → Or restart the app (auto-migration enabled)") + all_columns_exist = False + + # Check for indexes + cursor.execute("SELECT name FROM sqlite_master WHERE type='index'") + indexes = [row[0] for row in cursor.fetchall()] + + required_indexes = [ + "ix_images_latitude", + "ix_images_longitude", + "ix_images_captured_at", + ] + + print() + for index_name in required_indexes: + if index_name in indexes: + print_success(f"Index '{index_name}'") + else: + print_warning( + f"Index '{index_name}' not found (recommended for performance)" + ) + + conn.close() + return all_columns_exist + + except Exception as e: + print_error(f"Database check failed: {e}") + return False + + +def check_imports(): + """Check if all modules can be imported""" + print_header("4. Checking Module Imports") + + modules_to_check = [ + "app.utils.extract_location_metadata", + "app.utils.memory_clustering", + "app.routes.memories", + "app.database.images", + ] + + all_imported = True + + for module_name in modules_to_check: + try: + importlib.import_module(module_name) + print_success(f"{module_name}") + except Exception as e: + print_error(f"{module_name} - {str(e)}") + all_imported = False + + return all_imported + + +def check_api_routes(): + """Check if Memories API routes are registered""" + print_header("5. Checking API Routes") + + try: + # Import main app + sys.path.insert(0, str(Path(__file__).parent.parent.parent)) + from main import app + + # Get all routes + routes = [route.path for route in app.routes] + + required_routes = [ + "/api/memories/generate", + "/api/memories/timeline", + "/api/memories/on-this-day", + "/api/memories/locations", + ] + + all_routes_exist = True + for route_path in required_routes: + if route_path in routes: + print_success(f"{route_path}") + else: + print_error(f"{route_path} NOT FOUND") + print_info(" → Check if memories router is included in main.py") + all_routes_exist = False + + return all_routes_exist + + except Exception as e: + print_error(f"Failed to check routes: {e}") + return False + + +def print_summary(results): + """Print final summary""" + print_header("Verification Summary") + + all_passed = all(result is not False for result in results.values()) + + for check_name, result in results.items(): + status = "✓ PASS" if result else ("⚠ WARNING" if result is None else "✗ FAIL") + color = ( + Colors.GREEN + if result + else (Colors.YELLOW if result is None else Colors.RED) + ) + print(f"{color}{status}{Colors.RESET} - {check_name}") + + print() + if all_passed: + print( + f"{Colors.BOLD}{Colors.GREEN}🎉 All checks passed! Memories feature is ready to use.{Colors.RESET}" + ) + print_info("Next steps:") + print_info("1. Start the backend: cd backend && ./run.sh") + print_info( + "2. Run metadata extraction: python -m app.utils.extract_location_metadata" + ) + print_info("3. Test API endpoints: see MEMORIES_TESTING_GUIDE.md") + else: + print( + f"{Colors.BOLD}{Colors.RED}❌ Some checks failed. Please fix the issues above.{Colors.RESET}" + ) + print_info("See MEMORIES_README.md for setup instructions") + + print() + + +def main(): + """Run all verification checks""" + print(f"\n{Colors.BOLD}PictoPy Memories Feature Verification{Colors.RESET}") + print(f"{Colors.BOLD}====================================={Colors.RESET}") + + results = { + "Dependencies": check_dependencies(), + "File Structure": check_file_structure(), + "Database Schema": check_database_schema(), + "Module Imports": check_imports(), + "API Routes": check_api_routes(), + } + + print_summary(results) + + +if __name__ == "__main__": + main() diff --git a/backend/extract_metadata_simple.py b/backend/extract_metadata_simple.py new file mode 100644 index 000000000..35b4b80b2 --- /dev/null +++ b/backend/extract_metadata_simple.py @@ -0,0 +1,123 @@ +#!/usr/bin/env python3 +""" +Standalone script to extract location data from metadata and update the database. +""" + +import json +import sqlite3 +from pathlib import Path + +# Database path +DB_PATH = Path(__file__).parent / "app" / "database" / "PictoPy.db" + + +def extract_and_update(): + """Extract location and datetime from metadata JSON and update database columns.""" + + print("=" * 70) + print("Starting metadata extraction...") + print("=" * 70) + + conn = sqlite3.connect(DB_PATH) + cursor = conn.cursor() + + # Get all images with metadata + cursor.execute( + "SELECT id, metadata FROM images WHERE metadata IS NOT NULL AND metadata != ''" + ) + images = cursor.fetchall() + + print(f"\nFound {len(images)} images with metadata") + + updated_count = 0 + location_count = 0 + datetime_count = 0 + both_count = 0 + + for image_id, metadata_str in images: + try: + # Parse JSON metadata + metadata = json.loads(metadata_str) + + # Extract values + latitude = metadata.get("latitude") + longitude = metadata.get("longitude") + date_created = metadata.get("date_created") + + has_location = latitude is not None and longitude is not None + has_datetime = date_created is not None + + if has_location or has_datetime: + # Update the database + if has_location and has_datetime: + cursor.execute( + "UPDATE images SET latitude = ?, longitude = ?, captured_at = ? WHERE id = ?", + (latitude, longitude, date_created, image_id), + ) + both_count += 1 + elif has_location: + cursor.execute( + "UPDATE images SET latitude = ?, longitude = ? WHERE id = ?", + (latitude, longitude, image_id), + ) + location_count += 1 + elif has_datetime: + cursor.execute( + "UPDATE images SET captured_at = ? WHERE id = ?", + (date_created, image_id), + ) + datetime_count += 1 + + updated_count += 1 + + # Show progress every 50 images + if updated_count % 50 == 0: + print(f" Processed {updated_count} images...") + + except Exception as e: + print(f" Error processing image {image_id}: {e}") + continue + + # Commit changes + conn.commit() + + # Get final statistics + cursor.execute("SELECT COUNT(*) FROM images WHERE latitude IS NOT NULL") + total_with_location = cursor.fetchone()[0] + + cursor.execute("SELECT COUNT(*) FROM images WHERE captured_at IS NOT NULL") + total_with_datetime = cursor.fetchone()[0] + + cursor.execute( + "SELECT COUNT(*) FROM images WHERE latitude IS NOT NULL AND captured_at IS NOT NULL" + ) + total_with_both = cursor.fetchone()[0] + + conn.close() + + # Print summary + print("\n" + "=" * 70) + print("METADATA EXTRACTION SUMMARY") + print("=" * 70) + print(f"Total images processed: {len(images)}") + print(f"Images updated: {updated_count}") + print( + f"Images with location data: {total_with_location} ({100 * total_with_location / len(images):.1f}%)" + ) + print( + f"Images with datetime: {total_with_datetime} ({100 * total_with_datetime / len(images):.1f}%)" + ) + print( + f"Images with both: {total_with_both} ({100 * total_with_both / len(images):.1f}%)" + ) + print(f"Images skipped (no data): {len(images) - updated_count}") + print("=" * 70) + print("\n✅ Migration completed successfully!") + print("\nNext steps:") + print(" 1. Start the backend: .env/bin/python3.12 main.py") + print(" 2. Test API: curl -X POST 'http://localhost:8000/api/memories/generate'") + print() + + +if __name__ == "__main__": + extract_and_update() diff --git a/backend/main.py b/backend/main.py index f9006727b..2e314c9bf 100644 --- a/backend/main.py +++ b/backend/main.py @@ -13,7 +13,7 @@ from contextlib import asynccontextmanager from concurrent.futures import ProcessPoolExecutor from app.database.faces import db_create_faces_table -from app.database.images import db_create_images_table +from app.database.images import db_create_images_table, db_migrate_add_memories_columns from app.database.face_clusters import db_create_clusters_table from app.database.yolo_mapping import db_create_YOLO_classes_table from app.database.albums import db_create_albums_table @@ -26,6 +26,7 @@ from app.routes.images import router as images_router from app.routes.face_clusters import router as face_clusters_router from app.routes.user_preferences import router as user_preferences_router +from app.routes.memories import router as memories_router from app.routes.shutdown import router as shutdown_router from fastapi.openapi.utils import get_openapi from app.logging.setup_logging import ( @@ -52,6 +53,19 @@ async def lifespan(app: FastAPI): generate_openapi_json() db_create_folders_table() db_create_images_table() + + # Only run migrations in the primary process or when explicitly enabled + should_run_migrations = os.getenv("RUN_MIGRATIONS", "true").lower() == "true" + if should_run_migrations: + try: + db_migrate_add_memories_columns() + logger.info("Database migrations completed successfully") + except Exception as e: + logger.error(f"Failed to run database migrations: {e}", exc_info=True) + + else: + logger.info("Skipping migrations (RUN_MIGRATIONS not set or false)") + db_create_YOLO_classes_table() db_create_clusters_table() # Create clusters table first since faces references it db_create_faces_table() @@ -137,6 +151,9 @@ async def root(): app.include_router( user_preferences_router, prefix="/user-preferences", tags=["User Preferences"] ) +app.include_router( + memories_router +) # Memories router (prefix already defined in router) app.include_router(shutdown_router, tags=["Shutdown"]) diff --git a/backend/migrate_add_memories_columns.py b/backend/migrate_add_memories_columns.py new file mode 100644 index 000000000..d36ca398c --- /dev/null +++ b/backend/migrate_add_memories_columns.py @@ -0,0 +1,253 @@ +""" +One-time migration script to add Memories feature columns. +Run this ONCE after pulling the new code. + +This script adds: +- latitude (REAL) column +- longitude (REAL) column +- captured_at (DATETIME) column +- Performance indexes for these columns + +Usage: + cd backend + python migrate_add_memories_columns.py +""" + +import sqlite3 +from pathlib import Path +import sys + + +# ANSI color codes for terminal output +class Colors: + GREEN = "\033[92m" + RED = "\033[91m" + YELLOW = "\033[93m" + BLUE = "\033[94m" + BOLD = "\033[1m" + RESET = "\033[0m" + + +DATABASE_PATH = Path(__file__).parent / "app" / "database" / "PictoPy.db" + + +def print_header(text): + """Print section header""" + print(f"\n{Colors.BOLD}{Colors.BLUE}{'=' * 70}{Colors.RESET}") + print(f"{Colors.BOLD}{Colors.BLUE}{text}{Colors.RESET}") + print(f"{Colors.BOLD}{Colors.BLUE}{'=' * 70}{Colors.RESET}\n") + + +def print_success(text): + """Print success message""" + print(f"{Colors.GREEN}✓ {text}{Colors.RESET}") + + +def print_error(text): + """Print error message""" + print(f"{Colors.RED}✗ {text}{Colors.RESET}") + + +def print_info(text): + """Print info message""" + print(f" {text}") + + +def check_database_exists(): + """Check if database file exists""" + if not DATABASE_PATH.exists(): + print_error(f"Database not found at: {DATABASE_PATH}") + print_info("The database will be created when you first run the app.") + print_info("Run this migration script AFTER the database is created.") + return False + + print_success(f"Database found at: {DATABASE_PATH}") + return True + + +def check_images_table(cursor): + """Check if images table exists""" + cursor.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND name='images'" + ) + if not cursor.fetchone(): + print_error("Table 'images' does not exist") + print_info("Run the app first to create the database schema.") + return False + + print_success("Table 'images' exists") + return True + + +def get_existing_columns(cursor): + """Get list of existing columns in images table""" + cursor.execute("PRAGMA table_info(images)") + columns = {row[1]: row[2] for row in cursor.fetchall()} + return columns + + +def add_columns(cursor): + """Add new columns if they don't exist""" + print_header("Adding Memories Feature Columns") + + columns = get_existing_columns(cursor) + changes_made = False + + # Add latitude column + if "latitude" not in columns: + print_info("Adding column: latitude (REAL)") + cursor.execute("ALTER TABLE images ADD COLUMN latitude REAL") + print_success("Column 'latitude' added") + changes_made = True + else: + print_success(f"Column 'latitude' already exists ({columns['latitude']})") + + # Add longitude column + if "longitude" not in columns: + print_info("Adding column: longitude (REAL)") + cursor.execute("ALTER TABLE images ADD COLUMN longitude REAL") + print_success("Column 'longitude' added") + changes_made = True + else: + print_success(f"Column 'longitude' already exists ({columns['longitude']})") + + # Add captured_at column + if "captured_at" not in columns: + print_info("Adding column: captured_at (DATETIME)") + cursor.execute("ALTER TABLE images ADD COLUMN captured_at DATETIME") + print_success("Column 'captured_at' added") + changes_made = True + else: + print_success(f"Column 'captured_at' already exists ({columns['captured_at']})") + + return changes_made + + +def create_indexes(cursor): + """Create indexes for performance""" + print_header("Creating Performance Indexes") + + indexes = [ + ( + "ix_images_latitude", + "CREATE INDEX IF NOT EXISTS ix_images_latitude ON images(latitude)", + ), + ( + "ix_images_longitude", + "CREATE INDEX IF NOT EXISTS ix_images_longitude ON images(longitude)", + ), + ( + "ix_images_captured_at", + "CREATE INDEX IF NOT EXISTS ix_images_captured_at ON images(captured_at)", + ), + ( + "ix_images_favourite_captured_at", + "CREATE INDEX IF NOT EXISTS ix_images_favourite_captured_at ON images(isFavourite, captured_at)", + ), + ] + + for index_name, sql in indexes: + cursor.execute(sql) + print_success(f"Index '{index_name}' created") + + +def show_final_schema(cursor): + """Display final table schema""" + print_header("Final 'images' Table Schema") + + cursor.execute("PRAGMA table_info(images)") + print(f"\n{Colors.BOLD}Columns:{Colors.RESET}") + for row in cursor.fetchall(): + col_id, col_name, col_type, not_null, default, pk = row + nullable = "NOT NULL" if not_null else "NULL" + primary = " PRIMARY KEY" if pk else "" + print(f" {col_name:<20} {col_type:<15} {nullable:<10}{primary}") + + cursor.execute( + "SELECT name FROM sqlite_master WHERE type='index' AND tbl_name='images'" + ) + indexes = cursor.fetchall() + print(f"\n{Colors.BOLD}Indexes:{Colors.RESET}") + for index in indexes: + print(f" - {index[0]}") + print() + + +def migrate(): + """Run the migration""" + print_header("PictoPy Memories Feature - Database Migration") + + # Check database exists + if not check_database_exists(): + sys.exit(1) + + conn = None + try: + # Connect to database + print_info("Connecting to database...") + conn = sqlite3.connect(DATABASE_PATH) + cursor = conn.cursor() + print_success("Connected successfully") + + # Check images table exists + if not check_images_table(cursor): + sys.exit(1) + + # Add columns + changes_made = add_columns(cursor) + + # Create indexes + create_indexes(cursor) + + # Commit changes + conn.commit() + + # Show final schema + show_final_schema(cursor) + + # Summary + print_header("Migration Summary") + if changes_made: + print( + f"{Colors.BOLD}{Colors.GREEN}✅ Migration completed successfully!{Colors.RESET}\n" + ) + print_info("New columns added to 'images' table:") + print_info(" - latitude (REAL)") + print_info(" - longitude (REAL)") + print_info(" - captured_at (DATETIME)") + print_info("") + print_info("Performance indexes created for fast queries.") + else: + print( + f"{Colors.BOLD}{Colors.GREEN}✅ Database is already up to date!{Colors.RESET}\n" + ) + print_info("All required columns and indexes already exist.") + + print(f"\n{Colors.BOLD}Next Steps:{Colors.RESET}") + print_info( + "1. Run metadata extraction: python -m app.utils.extract_location_metadata" + ) + print_info("2. Verify setup: python -m app.utils.verify_memories_setup") + print_info("3. Start the backend: ./run.sh") + print() + + except sqlite3.Error as e: + print_error(f"SQLite error: {e}") + if conn: + conn.rollback() + sys.exit(1) + + except Exception as e: + print_error(f"Unexpected error: {e}") + if conn: + conn.rollback() + sys.exit(1) + + finally: + if conn: + conn.close() + print_info("Database connection closed") + + +if __name__ == "__main__": + migrate() diff --git a/backend/requirements.txt b/backend/requirements.txt index 743538462..0d042edaf 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -31,7 +31,7 @@ mkdocs-material==9.6.16 mkdocs-material-extensions==1.3.1 mkdocs-swagger-ui-tag==0.7.1 mpmath==1.3.0 -numpy==1.26.4 +numpy==1.26.4 # Required for Memories feature: GPS calculations and array operations onnxruntime==1.17.1 opencv-python==4.9.0.80 orjson==3.10.3 @@ -46,7 +46,7 @@ python-dotenv==1.0.1 python-multipart==0.0.9 PyYAML==6.0.1 rich==13.7.1 -scikit-learn==1.5.1 +scikit-learn==1.5.1 # Required for Memories feature: DBSCAN spatial clustering algorithm scipy==1.14.0 shellingham==1.5.4 sniffio==1.3.1 diff --git a/backend/test_auto_gps_extraction.py b/backend/test_auto_gps_extraction.py new file mode 100644 index 000000000..fec6bcba2 --- /dev/null +++ b/backend/test_auto_gps_extraction.py @@ -0,0 +1,85 @@ +""" +Test script to verify automatic GPS extraction on image import. + +This script simulates adding a new image and verifies that: +1. GPS coordinates are automatically extracted +2. Capture datetime is automatically extracted +3. Data is properly saved to the database + +Usage: + python test_auto_gps_extraction.py +""" + +import sys +import os +import json + +# Add backend to path +sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) + +from app.utils.extract_location_metadata import MetadataExtractor + + +def test_gps_extraction(): + """Test the GPS extraction functionality.""" + print("=" * 70) + print("Testing Automatic GPS Extraction") + print("=" * 70) + + extractor = MetadataExtractor() + + # Test case 1: Sample metadata with GPS + sample_metadata = { + "latitude": 28.6139, + "longitude": 77.2090, + "CreateDate": "2024:11:15 14:30:00", + } + + metadata_json = json.dumps(sample_metadata) + lat, lon, captured_at = extractor.extract_all(metadata_json) + + print("\nTest Case 1: Metadata with GPS") + print(f"Input: {sample_metadata}") + print("Extracted:") + print(f" - Latitude: {lat}") + print(f" - Longitude: {lon}") + print(f" - Captured At: {captured_at}") + + if lat and lon: + print("✅ GPS extraction working!") + else: + print("❌ GPS extraction failed") + + # Test case 2: Metadata without GPS + sample_metadata_no_gps = {"CreateDate": "2024:11:15 14:30:00"} + + metadata_json_no_gps = json.dumps(sample_metadata_no_gps) + lat2, lon2, captured_at2 = extractor.extract_all(metadata_json_no_gps) + + print("\nTest Case 2: Metadata without GPS") + print(f"Input: {sample_metadata_no_gps}") + print("Extracted:") + print(f" - Latitude: {lat2}") + print(f" - Longitude: {lon2}") + print(f" - Captured At: {captured_at2}") + + if lat2 is None and lon2 is None and captured_at2: + print("✅ Correctly handles images without GPS") + else: + print("❌ Unexpected behavior for images without GPS") + + print("\n" + "=" * 70) + print("INTEGRATION STATUS:") + print("=" * 70) + print("✅ MetadataExtractor imported successfully") + print("✅ extract_all() function working") + print("✅ Ready for automatic extraction on image import") + print("\nNEXT STEPS:") + print("1. Add a new folder with images that have GPS data") + print("2. Check the database to verify GPS fields are populated") + print("3. View Memories page to see the new images appear") + print("=" * 70) + + +if __name__ == "__main__": + test_gps_extraction() diff --git a/backend/test_memories_api.py b/backend/test_memories_api.py new file mode 100644 index 000000000..c02845da6 --- /dev/null +++ b/backend/test_memories_api.py @@ -0,0 +1,164 @@ +""" +Test script for Memories API endpoints + +This script tests all the Memories feature endpoints to verify they're working correctly. + +Usage: + python test_memories_api.py +""" + +import requests +import json + +BASE_URL = "http://localhost:52123/api/memories" +REQUEST_TIMEOUT = 10 # seconds + + +def print_response(endpoint: str, response: requests.Response): + """Pretty print API response.""" + print("\n" + "=" * 70) + print(f"Testing: {endpoint}") + print("=" * 70) + print(f"Status Code: {response.status_code}") + + if response.status_code == 200: + print("SUCCESS") + data = response.json() + print("\nResponse Preview:") + print(json.dumps(data, indent=2)[:500] + "...") + else: + print("FAILED") + print(f"Error: {response.text}") + print("=" * 70) + + +def test_generate_memories(): + """Test POST /api/memories/generate""" + print("\nTesting: Generate Memories") + + response = requests.post( + f"{BASE_URL}/generate", + params={"location_radius_km": 5.0, "date_tolerance_days": 3, "min_images": 2}, + timeout=REQUEST_TIMEOUT, + ) + + print_response("POST /api/memories/generate", response) + + if response.status_code == 200: + data = response.json() + print("\nSummary:") + print(f" - Memory Count: {data.get('memory_count', 0)}") + print(f" - Image Count: {data.get('image_count', 0)}") + print(f" - Message: {data.get('message', 'N/A')}") + + +def test_timeline(): + """Test GET /api/memories/timeline""" + print("\nTesting: Timeline") + + response = requests.get( + f"{BASE_URL}/timeline", + params={"days": 30, "location_radius_km": 5.0, "date_tolerance_days": 3}, + timeout=REQUEST_TIMEOUT, + ) + + print_response("GET /api/memories/timeline", response) + + if response.status_code == 200: + data = response.json() + print("\nSummary:") + print(f" - Memory Count: {data.get('memory_count', 0)}") + print(f" - Date Range: {data.get('date_range', {})}") + + +def test_on_this_day(): + """Test GET /api/memories/on-this-day""" + print("\nTesting: On This Day") + + response = requests.get(f"{BASE_URL}/on-this-day", timeout=REQUEST_TIMEOUT) + + print_response("GET /api/memories/on-this-day", response) + + if response.status_code == 200: + data = response.json() + print("\nSummary:") + print(f" - Today: {data.get('today', 'N/A')}") + print(f" - Years Found: {data.get('years', [])}") + print(f" - Image Count: {data.get('image_count', 0)}") + + +def test_locations(): + """Test GET /api/memories/locations""" + print("\nTesting: Locations") + + response = requests.get( + f"{BASE_URL}/locations", + params={"location_radius_km": 5.0, "max_sample_images": 3}, + timeout=REQUEST_TIMEOUT, + ) + + print_response("GET /api/memories/locations", response) + + if response.status_code == 200: + data = response.json() + print("\nSummary:") + print(f" - Location Count: {data.get('location_count', 0)}") + if data.get("locations"): + print( + f" - Top Location: {data['locations'][0].get('location_name', 'N/A')}" + ) + print( + f" - Photos at Top Location: {data['locations'][0].get('image_count', 0)}" + ) + + +def check_server(): + """Check if the server is running.""" + try: + response = requests.get("http://localhost:52123/health", timeout=2) + if response.status_code == 200: + print("Server is running") + return True + else: + print("Server responded but with unexpected status") + return False + except requests.exceptions.ConnectionError: + print("Server is not running") + print("\nStart the server with:") + print(" cd backend") + print(" python main.py") + return False + + +def main(): + """Run all tests.""" + print("\n" + "=" * 70) + print("MEMORIES API TEST SUITE") + print("=" * 70 + "\n") + + # Check if server is running + if not check_server(): + return + + print("\nRunning all tests...\n") + + try: + # Run all tests + test_generate_memories() + test_timeline() + test_on_this_day() + test_locations() + + print("\n" + "=" * 70) + print("ALL TESTS COMPLETED") + print("=" * 70 + "\n") + + except Exception as e: + print(f"\nTest failed with error: {e}") + import traceback + + traceback.print_exc() + + +if __name__ == "__main__": + main() diff --git a/backend/tests/test_face_clusters.py b/backend/tests/test_face_clusters.py index 1e6f7c398..3ccd284d5 100644 --- a/backend/tests/test_face_clusters.py +++ b/backend/tests/test_face_clusters.py @@ -4,7 +4,6 @@ from fastapi.testclient import TestClient from app.routes.face_clusters import router as face_clusters_router - app = FastAPI() app.include_router(face_clusters_router, prefix="/face_clusters") client = TestClient(app) diff --git a/backend/tests/test_folders.py b/backend/tests/test_folders.py index a0d26f0e5..21c4525a8 100644 --- a/backend/tests/test_folders.py +++ b/backend/tests/test_folders.py @@ -10,7 +10,6 @@ from app.routes.folders import router as folders_router - # ############################## # Pytest Fixtures # ############################## diff --git a/backend/tests/test_user_preferences.py b/backend/tests/test_user_preferences.py index 8a70c4b34..e77ec15f4 100644 --- a/backend/tests/test_user_preferences.py +++ b/backend/tests/test_user_preferences.py @@ -416,7 +416,6 @@ def test_update_user_preferences_response_structure(self): with patch("app.routes.user_preferences.db_get_metadata") as mock_get, patch( "app.routes.user_preferences.db_update_metadata" ) as mock_update: - mock_get.return_value = {} mock_update.return_value = True @@ -441,7 +440,6 @@ def test_update_user_preferences_preserves_other_metadata(self): with patch("app.routes.user_preferences.db_get_metadata") as mock_get, patch( "app.routes.user_preferences.db_update_metadata" ) as mock_update: - existing_metadata = { "user_preferences": {"YOLO_model_size": "small"}, "other_field": "should_be_preserved", diff --git a/docs/backend/backend_python/openapi.json b/docs/backend/backend_python/openapi.json index 39ce577cc..1389b792b 100644 --- a/docs/backend/backend_python/openapi.json +++ b/docs/backend/backend_python/openapi.json @@ -1117,9 +1117,14 @@ "in": "query", "required": false, "schema": { - "$ref": "#/components/schemas/InputType", + "allOf": [ + { + "$ref": "#/components/schemas/InputType" + } + ], "description": "Choose input type: 'path' or 'base64'", - "default": "path" + "default": "path", + "title": "Input Type" }, "description": "Choose input type: 'path' or 'base64'" } @@ -1232,7 +1237,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ErrorResponse" + "$ref": "#/components/schemas/app__schemas__user_preferences__ErrorResponse" } } } @@ -1272,7 +1277,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ErrorResponse" + "$ref": "#/components/schemas/app__schemas__user_preferences__ErrorResponse" } } } @@ -1282,7 +1287,243 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ErrorResponse" + "$ref": "#/components/schemas/app__schemas__user_preferences__ErrorResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/api/memories/generate": { + "post": { + "tags": [ + "memories" + ], + "summary": "Generate Memories", + "description": "SIMPLIFIED: Generate memories from ALL images.\n- GPS images \u2192 location-based memories\n- Non-GPS images \u2192 monthly date-based memories\n\nReturns simple breakdown: {location_count, date_count, total}", + "operationId": "generate_memories_api_memories_generate_post", + "parameters": [ + { + "name": "location_radius_km", + "in": "query", + "required": false, + "schema": { + "type": "number", + "maximum": 100.0, + "minimum": 0.1, + "description": "Location clustering radius in km", + "default": 5.0, + "title": "Location Radius Km" + }, + "description": "Location clustering radius in km" + }, + { + "name": "date_tolerance_days", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 30, + "minimum": 1, + "description": "Date tolerance in days", + "default": 3, + "title": "Date Tolerance Days" + }, + "description": "Date tolerance in days" + }, + { + "name": "min_images", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 10, + "minimum": 1, + "description": "Minimum images per memory", + "default": 2, + "title": "Min Images" + }, + "description": "Minimum images per memory" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GenerateMemoriesResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/api/memories/timeline": { + "get": { + "tags": [ + "memories" + ], + "summary": "Get Timeline", + "description": "Get memories from the past N days as a timeline.\n\nThis endpoint:\n1. Calculates date range (today - N days to today)\n2. Fetches images within that date range\n3. Clusters them into memories\n4. Returns timeline of memories\n\nArgs:\n days: Number of days to look back (default: 365 = 1 year)\n location_radius_km: Location clustering radius (default: 5km)\n date_tolerance_days: Date tolerance for temporal clustering (default: 3)\n\nReturns:\n TimelineResponse with memories ordered by date\n\nRaises:\n HTTPException: If database query fails", + "operationId": "get_timeline_api_memories_timeline_get", + "parameters": [ + { + "name": "days", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 3650, + "minimum": 1, + "description": "Number of days to look back", + "default": 365, + "title": "Days" + }, + "description": "Number of days to look back" + }, + { + "name": "location_radius_km", + "in": "query", + "required": false, + "schema": { + "type": "number", + "maximum": 100.0, + "minimum": 0.1, + "description": "Location clustering radius in km", + "default": 5.0, + "title": "Location Radius Km" + }, + "description": "Location clustering radius in km" + }, + { + "name": "date_tolerance_days", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 30, + "minimum": 1, + "description": "Date tolerance in days", + "default": 3, + "title": "Date Tolerance Days" + }, + "description": "Date tolerance in days" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TimelineResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/api/memories/on-this-day": { + "get": { + "tags": [ + "memories" + ], + "summary": "Get On This Day", + "description": "Get photos taken on this date in previous years.\n\nThis endpoint:\n1. Gets current month and day\n2. Searches for images from this month-day in all previous years\n3. Groups by year\n4. Returns images sorted by year (most recent first)\n\nReturns:\n OnThisDayResponse with images from this date in previous years\n\nRaises:\n HTTPException: If database query fails", + "operationId": "get_on_this_day_api_memories_on_this_day_get", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OnThisDayResponse" + } + } + } + } + } + } + }, + "/api/memories/locations": { + "get": { + "tags": [ + "memories" + ], + "summary": "Get Locations", + "description": "Get all unique locations where photos were taken.\n\nThis endpoint:\n1. Fetches all images with GPS coordinates\n2. Clusters them by location\n3. Returns location clusters with photo counts\n4. Includes sample images for each location\n\nArgs:\n location_radius_km: Location clustering radius (default: 5km)\n max_sample_images: Maximum sample images per location (default: 5)\n\nReturns:\n LocationsResponse with list of location clusters\n\nRaises:\n HTTPException: If database query fails", + "operationId": "get_locations_api_memories_locations_get", + "parameters": [ + { + "name": "location_radius_km", + "in": "query", + "required": false, + "schema": { + "type": "number", + "maximum": 100.0, + "minimum": 0.1, + "description": "Location clustering radius in km", + "default": 5.0, + "title": "Location Radius Km" + }, + "description": "Location clustering radius in km" + }, + { + "name": "max_sample_images", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 20, + "minimum": 1, + "description": "Max sample images per location", + "default": 5, + "title": "Max Sample Images" + }, + "description": "Max sample images per location" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LocationsResponse" } } } @@ -1637,30 +1878,6 @@ ], "title": "DeleteFoldersResponse" }, - "ErrorResponse": { - "properties": { - "success": { - "type": "boolean", - "title": "Success" - }, - "error": { - "type": "string", - "title": "Error" - }, - "message": { - "type": "string", - "title": "Message" - } - }, - "type": "object", - "required": [ - "success", - "error", - "message" - ], - "title": "ErrorResponse", - "description": "Error response model" - }, "FaceSearchRequest": { "properties": { "path": { @@ -1739,6 +1956,43 @@ ], "title": "FolderDetails" }, + "GenerateMemoriesResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success" + }, + "message": { + "type": "string", + "title": "Message" + }, + "memory_count": { + "type": "integer", + "title": "Memory Count" + }, + "image_count": { + "type": "integer", + "title": "Image Count" + }, + "memories": { + "items": { + "$ref": "#/components/schemas/Memory" + }, + "type": "array", + "title": "Memories" + } + }, + "type": "object", + "required": [ + "success", + "message", + "memory_count", + "image_count", + "memories" + ], + "title": "GenerateMemoriesResponse", + "description": "Response for generate memories endpoint." + }, "GetAlbumImagesRequest": { "properties": { "password": { @@ -2246,7 +2500,6 @@ "metadata": { "anyOf": [ { - "additionalProperties": true, "type": "object" }, { @@ -2309,6 +2562,244 @@ ], "title": "InputType" }, + "LocationCluster": { + "properties": { + "location_name": { + "type": "string", + "title": "Location Name" + }, + "center_lat": { + "type": "number", + "title": "Center Lat" + }, + "center_lon": { + "type": "number", + "title": "Center Lon" + }, + "image_count": { + "type": "integer", + "title": "Image Count" + }, + "sample_images": { + "items": { + "$ref": "#/components/schemas/MemoryImage" + }, + "type": "array", + "title": "Sample Images" + } + }, + "type": "object", + "required": [ + "location_name", + "center_lat", + "center_lon", + "image_count", + "sample_images" + ], + "title": "LocationCluster", + "description": "Location cluster with photo count." + }, + "LocationsResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success" + }, + "location_count": { + "type": "integer", + "title": "Location Count" + }, + "locations": { + "items": { + "$ref": "#/components/schemas/LocationCluster" + }, + "type": "array", + "title": "Locations" + } + }, + "type": "object", + "required": [ + "success", + "location_count", + "locations" + ], + "title": "LocationsResponse", + "description": "Response for locations endpoint." + }, + "Memory": { + "properties": { + "memory_id": { + "type": "string", + "title": "Memory Id" + }, + "title": { + "type": "string", + "title": "Title" + }, + "description": { + "type": "string", + "title": "Description" + }, + "location_name": { + "type": "string", + "title": "Location Name" + }, + "date_start": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Date Start" + }, + "date_end": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Date End" + }, + "image_count": { + "type": "integer", + "title": "Image Count" + }, + "images": { + "items": { + "$ref": "#/components/schemas/MemoryImage" + }, + "type": "array", + "title": "Images" + }, + "thumbnail_image_id": { + "type": "string", + "title": "Thumbnail Image Id" + }, + "center_lat": { + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "title": "Center Lat" + }, + "center_lon": { + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "title": "Center Lon" + }, + "type": { + "type": "string", + "enum": [ + "location", + "date" + ], + "title": "Type" + } + }, + "type": "object", + "required": [ + "memory_id", + "title", + "description", + "location_name", + "date_start", + "date_end", + "image_count", + "images", + "thumbnail_image_id", + "type" + ], + "title": "Memory", + "description": "Memory object containing grouped images." + }, + "MemoryImage": { + "properties": { + "id": { + "type": "string", + "title": "Id" + }, + "path": { + "type": "string", + "title": "Path" + }, + "thumbnailPath": { + "type": "string", + "title": "Thumbnailpath" + }, + "latitude": { + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "title": "Latitude" + }, + "longitude": { + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "title": "Longitude" + }, + "captured_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Captured At" + }, + "isFavourite": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "title": "Isfavourite", + "default": false + } + }, + "type": "object", + "required": [ + "id", + "path", + "thumbnailPath", + "latitude", + "longitude", + "captured_at" + ], + "title": "MemoryImage", + "description": "Image within a memory." + }, "MetadataModel": { "properties": { "name": { @@ -2392,6 +2883,46 @@ ], "title": "MetadataModel" }, + "OnThisDayResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success" + }, + "today": { + "type": "string", + "title": "Today" + }, + "years": { + "items": { + "type": "integer" + }, + "type": "array", + "title": "Years" + }, + "image_count": { + "type": "integer", + "title": "Image Count" + }, + "images": { + "items": { + "$ref": "#/components/schemas/MemoryImage" + }, + "type": "array", + "title": "Images" + } + }, + "type": "object", + "required": [ + "success", + "today", + "years", + "image_count", + "images" + ], + "title": "OnThisDayResponse", + "description": "Response for on-this-day endpoint." + }, "RenameClusterData": { "properties": { "cluster_id": { @@ -2612,6 +3143,41 @@ ], "title": "SyncFolderResponse" }, + "TimelineResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success" + }, + "date_range": { + "additionalProperties": { + "type": "string" + }, + "type": "object", + "title": "Date Range" + }, + "memory_count": { + "type": "integer", + "title": "Memory Count" + }, + "memories": { + "items": { + "$ref": "#/components/schemas/Memory" + }, + "type": "array", + "title": "Memories" + } + }, + "type": "object", + "required": [ + "success", + "date_range", + "memory_count", + "memories" + ], + "title": "TimelineResponse", + "description": "Response for timeline endpoint." + }, "ToggleFavouriteRequest": { "properties": { "image_id": { @@ -2959,6 +3525,30 @@ "error" ], "title": "ErrorResponse" + }, + "app__schemas__user_preferences__ErrorResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success" + }, + "error": { + "type": "string", + "title": "Error" + }, + "message": { + "type": "string", + "title": "Message" + } + }, + "type": "object", + "required": [ + "success", + "error", + "message" + ], + "title": "ErrorResponse", + "description": "Error response model" } } } diff --git a/docs/frontend/memories.md b/docs/frontend/memories.md new file mode 100644 index 000000000..1a3203f4c --- /dev/null +++ b/docs/frontend/memories.md @@ -0,0 +1,309 @@ +# Memories Feature Documentation + +## Overview + +The Memories feature automatically organizes photos into meaningful collections based on location and date, providing a Google Photos-style experience for reliving past moments. + +## Features + +### 1. On This Day +Shows photos from the same date in previous years with a prominent featured card. + +**Display:** +- "On this day last year" for photos from exactly 1 year ago +- "[X] years ago" for photos from multiple years ago +- Featured hero image with gradient overlay +- Photo count and year badges + +### 2. Memory Types + +#### Location-Based Memories +Photos grouped by GPS coordinates using DBSCAN clustering: +- **Radius**: 5km (configurable) +- **Title Format**: "Trip to [City Name], [Year]" +- **Example**: "Trip to Jaipur, 2025" +- **Reverse Geocoding**: Maps coordinates to actual city names +- **Supported Cities**: 30+ major cities worldwide (Indian, European, American, Asian, etc.) + +#### Date-Based Memories +Photos grouped by month for images without GPS: +- **Grouping**: Monthly clusters +- **Title Format**: "[Month] [Year]" +- **Flexibility**: Works even without location data + +### 3. Memory Sections + +#### Recent Memories +- **Timeframe**: Last 30 days +- **Use Case**: Recent trips and events +- **API**: `GET /api/memories/timeline?days=30` + +#### This Year +- **Timeframe**: Last 365 days (current year) +- **Use Case**: Year-in-review +- **API**: `GET /api/memories/timeline?days=365` + +#### All Memories +- **Timeframe**: All time +- **Use Case**: Complete memory collection +- **API**: `POST /api/memories/generate` + +### 4. Filtering + +**Filter Options:** +- **All**: Shows all memories (default) +- **Location**: Only memories with GPS coordinates +- **Date**: Only memories without GPS (date-based) + +**Implementation:** +```typescript +const applyFilter = (memories: Memory[]) => { + if (filter === 'location') { + return memories.filter(m => m.center_lat !== 0 || m.center_lon !== 0); + } + if (filter === 'date') { + return memories.filter(m => m.center_lat === 0 && m.center_lon === 0); + } + return memories; // 'all' +}; +``` + +### 5. Memory Viewer + +Full-screen modal for viewing memory photos: + +**Features:** +- Image grid with hover effects +- Click to open MediaView +- Zoom and pan support +- Slideshow mode +- Keyboard navigation +- Info panel with metadata +- Thumbnail strip + +**Controls:** +- **Zoom**: Mouse wheel or +/- keys +- **Navigation**: Arrow keys or buttons +- **Slideshow**: Play/Pause button or Space key +- **Info Panel**: Toggle with 'I' key +- **Close**: ESC key or X button + +## Components + +### MemoriesPage +Main page component with sections: +- Header with refresh button +- Filter buttons +- On This Day section +- Recent Memories grid +- This Year grid +- All Memories grid + +### MemoryCard +Individual memory card display: +- Thumbnail image +- Memory title (formatted based on type) +- Date range (relative format) +- Location (if available) +- Photo count badge +- Type badge (Location/Date) + +### FeaturedMemoryCard +Large featured card for "On This Day": +- Hero image with gradient overlay +- "On this day last year" text +- Photo count and year info +- Additional image previews + +### MemoryViewer +Modal for viewing memory album: +- Conditionally rendered to prevent event bubbling +- Grid layout of all photos +- MediaView integration for full-screen viewing +- Proper z-index layering + +## State Management + +Using Redux Toolkit with slices: + +```typescript +// Store structure +{ + memories: { + onThisDay: { + images: MemoryImage[], + meta: { today: string, years: number[] } + }, + recent: Memory[], + year: Memory[], + all: Memory[], + selectedMemory: Memory | null, + loading: { onThisDay, recent, year, all }, + error: { onThisDay, recent, year, all } + } +} +``` + +**Key Actions:** +- `fetchOnThisDay()` - Get photos from same date +- `fetchRecentMemories(days)` - Get timeline memories +- `fetchYearMemories(days)` - Get year memories +- `fetchAllMemories()` - Generate all memories +- `setSelectedMemory(memory)` - Open memory viewer + +## API Endpoints + +### GET `/api/memories/on-this-day` +Returns photos from the same date in previous years. + +**Response:** +```json +{ + "images": [...], + "today": "December 14", + "years": [2024, 2023, 2022] +} +``` + +### GET `/api/memories/timeline?days=30` +Returns timeline-based memories for specified days. + +**Parameters:** +- `days` (query): Number of days to look back + +**Response:** +```json +{ + "memories": [...] +} +``` + +### POST `/api/memories/generate` +Generates all memories with clustering. + +**Parameters (query):** +- `location_radius_km` (default: 5.0) +- `date_tolerance_days` (default: 3) +- `min_images` (default: 2) + +**Response:** +```json +{ + "memories": [...], + "breakdown": { + "total": 10, + "location": 6, + "date": 4 + } +} +``` + +## Backend Implementation + +### Memory Clustering Algorithm + +**Location-based (DBSCAN):** +1. Extract GPS coordinates from images +2. Convert to radians for haversine distance +3. Apply DBSCAN clustering (5km radius) +4. Group images by cluster +5. Reverse geocode center coordinates +6. Generate title with city name and year + +**Date-based (Monthly grouping):** +1. Filter images without GPS +2. Group by year-month +3. Create monthly memories +4. Use date as title + +### Reverse Geocoding + +Maps GPS coordinates to city names using pre-defined database: + +```python +def _reverse_geocode(self, lat: float, lon: float) -> str: + """Find nearest city within 50km""" + for city_name, (city_lat, city_lon) in self.CITY_COORDINATES.items(): + distance = haversine_distance(lat, lon, city_lat, city_lon) + if distance < 50: + return city_name + return f"{lat:.4f}°, {lon:.4f}°" +``` + +**Supported Cities:** +- India: Mumbai, Delhi, Bangalore, Hyderabad, Chennai, Kolkata, Pune, Ahmedabad, Jaipur, Lucknow, Kanpur, Nagpur, Visakhapatnam, Bhopal, Patna, Vadodara +- Europe: London, Paris, Berlin, Madrid, Rome, Amsterdam, Prague, Vienna, Barcelona, Budapest, Lisbon +- Americas: New York, Los Angeles, Toronto, San Francisco, Chicago, Vancouver +- Asia-Pacific: Tokyo, Seoul, Singapore, Hong Kong, Sydney, Melbourne + +## Bug Fixes & Improvements + +### Event Bubbling Fix +**Problem:** Clicking MediaView controls (slideshow, info) closed the entire viewer. + +**Solution:** Conditional rendering of MemoryViewer backdrop: +```tsx +{!showMediaView && ( +
+ {/* Grid content */} +
+)} +``` + +### Image Upload Fix +**Problem:** Images without GPS couldn't be inserted into database. + +**Solution:** Always include latitude/longitude fields (set to `None` if not available): +```python +image_record = { + "latitude": latitude, # Can be None + "longitude": longitude, # Can be None + "captured_at": captured_at +} +``` + +### Title Display Enhancement +**Problem:** Generic "Location - Nov 2025" titles. + +**Solution:** Format as "Trip to [City], [Year]" using reverse geocoding: +```typescript +const year = memory.date_start ? new Date(memory.date_start).getFullYear() : ''; +displayTitle = `Trip to ${displayLocation}${year ? `, ${year}` : ''}`; +``` + +## Testing + +### Backend Tests +Located in `backend/tests/`: +- 100 unit tests covering all routes +- Run with: `pytest tests/` + +### Frontend Tests +Located in `frontend/src/pages/__tests__/`: +- Page rendering tests +- Run with: `npm test` + +### Manual Testing +Use `backend/test_memories_api.py` for API endpoint testing: +```bash +python test_memories_api.py +``` + +## Performance Considerations + +1. **Lazy Loading**: Images load on-demand with `loading="lazy"` +2. **Thumbnail Optimization**: Uses Tauri's `convertFileSrc()` for efficient file access +3. **Redux Memoization**: Uses `React.memo()` for card components +4. **Efficient Queries**: SQLite indexes on `latitude`, `longitude`, `captured_at` +5. **Background Processing**: Memory generation runs asynchronously + +## Future Enhancements + +- [ ] Custom memory creation +- [ ] Memory sharing and export +- [ ] Advanced filtering (by location, date range, etc.) +- [ ] Memory annotations and descriptions +- [ ] Map view for location-based memories +- [ ] AI-generated memory titles +- [ ] Multi-photo featured cards +- [ ] Memory notifications and reminders diff --git a/docs/overview/features.md b/docs/overview/features.md index 801b9b086..4643de9e1 100644 --- a/docs/overview/features.md +++ b/docs/overview/features.md @@ -4,11 +4,15 @@ - **Intelligent Photo Tagging**: Automatically tags photos based on detected objects, faces, and facial recognition. - **Traditional Gallery Management**: Complete album organization and management tools. +- **Memories Feature**: Automatically organize photos into meaningful collections based on location and date, with Google Photos-style presentation. ### Advanced Image Analysis - Object detection using **YOLOv11** for identifying various items in images - Face detection and clustering powered by **FaceNet**. +- **Spatial Clustering**: Groups photos by location using DBSCAN algorithm (5km radius) +- **Temporal Grouping**: Organizes photos by date with monthly grouping +- **Reverse Geocoding**: Identifies actual city names from GPS coordinates ### Privacy-Focused Design @@ -29,6 +33,49 @@ - Embedded metadata - Find visually or semantically similar images +### Memories Feature + +Automatically creates meaningful photo collections inspired by Google Photos: + +#### **On This Day** +- Shows photos from the same date in previous years +- Featured card display with "On this day last year" messaging +- Nostalgic look back at past moments + +#### **Smart Grouping** +- **Location-based Memories**: Groups photos taken at the same location (5km radius using DBSCAN clustering) + - Displays as "Trip to [City Name], [Year]" (e.g., "Trip to Jaipur, 2025") + - Uses reverse geocoding to show actual city names + - Supports 30+ major cities worldwide +- **Date-based Memories**: Groups photos by month for images without GPS data + - Perfect for photos without location metadata + - Organized chronologically + +#### **Intelligent Filtering** +- Filter by All, Location, or Date memories +- View counts for each category +- Seamless navigation between memory types + +#### **Memory Sections** +- **Recent Memories**: Last 30 days of captured moments +- **This Year**: All memories from the current year +- **All Memories**: Complete collection organized by recency + +#### **Rich Viewing Experience** +- Full-screen image viewer with zoom support +- Slideshow mode for automatic playback +- Image metadata panel with EXIF data +- Keyboard shortcuts (Space, arrows, +/-, R, ESC) +- Thumbnail navigation strip +- Favorite marking and folder opening + +#### **Technical Implementation** +- Backend: Python with DBSCAN clustering algorithm +- Frontend: React + Redux Toolkit for state management +- Real-time memory generation with configurable parameters +- Flexible clustering: works with date OR location (not both required) +- Efficient SQLite queries for fast retrieval + ### Cross-Platform Compatibility - Available on major operating systems (Windows, macOS, Linux) diff --git a/frontend/src/app/store.ts b/frontend/src/app/store.ts index 7252274a6..7fb99d6a3 100644 --- a/frontend/src/app/store.ts +++ b/frontend/src/app/store.ts @@ -6,6 +6,7 @@ import imageReducer from '@/features/imageSlice'; import faceClustersReducer from '@/features/faceClustersSlice'; import infoDialogReducer from '@/features/infoDialogSlice'; import folderReducer from '@/features/folderSlice'; +import memoriesReducer from '@/store/slices/memoriesSlice'; export const store = configureStore({ reducer: { @@ -16,6 +17,7 @@ export const store = configureStore({ infoDialog: infoDialogReducer, folders: folderReducer, search: searchReducer, + memories: memoriesReducer, }, }); // Infer the `RootState` and `AppDispatch` types from the store itself diff --git a/frontend/src/components/Media/MediaView.tsx b/frontend/src/components/Media/MediaView.tsx index afc111d3e..7530c8bf4 100644 --- a/frontend/src/components/Media/MediaView.tsx +++ b/frontend/src/components/Media/MediaView.tsx @@ -26,6 +26,7 @@ export function MediaView({ onClose, type = 'image', images = [], + onToggleFavorite, }: MediaViewProps) { const dispatch = useDispatch(); @@ -114,11 +115,22 @@ export function MediaView({ const handleToggleFavourite = useCallback(() => { if (currentImage) { if (currentImage?.id) { - toggleFavourite(currentImage.id); + // Use custom handler if provided, otherwise use default + if (onToggleFavorite) { + onToggleFavorite(currentImage.id); + } else { + toggleFavourite(currentImage.id); + } } if (location.pathname === ROUTES.FAVOURITES) handleClose(); } - }, [currentImage, toggleFavourite]); + }, [ + currentImage, + toggleFavourite, + onToggleFavorite, + location.pathname, + handleClose, + ]); const handleZoomIn = useCallback(() => { imageViewerRef.current?.zoomIn(); diff --git a/frontend/src/components/Memories/FeaturedMemoryCard.tsx b/frontend/src/components/Memories/FeaturedMemoryCard.tsx new file mode 100644 index 000000000..b7ce17c76 --- /dev/null +++ b/frontend/src/components/Memories/FeaturedMemoryCard.tsx @@ -0,0 +1,167 @@ +/** + * FeaturedMemoryCard Component + * + * Large, prominent card for "On This Day" section. + * Shows hero image with "X years ago today" text overlay. + */ + +import React from 'react'; +import { MemoryImage } from '@/services/memoriesApi'; +import { + calculateYearsAgo, + formatPhotoCount, + getThumbnailUrl, +} from '@/services/memoriesApi'; + +interface FeaturedMemoryCardProps { + images: MemoryImage[]; + today: string; + years: number[]; + onClick: () => void; +} + +/** + * Featured memory card for "On This Day" section + * Shows larger hero image with prominent styling + */ +export const FeaturedMemoryCard = React.memo( + ({ images, years, onClick }) => { + // Get the first image as hero + const heroImage = images[0]; + + if (!heroImage) return null; + + const thumbnailUrl = getThumbnailUrl(heroImage); + + // Calculate years ago from the captured date + const yearsAgo = heroImage.captured_at + ? calculateYearsAgo(heroImage.captured_at) + : 0; + + // Handle image load error + const handleImageError = (e: React.SyntheticEvent) => { + e.currentTarget.src = '/photo.png'; + }; + + return ( +
{ + if (e.key === 'Enter' || e.key === ' ') { + e.preventDefault(); + onClick(); + } + }} + aria-label={`View On This Day memory from ${yearsAgo} years ago`} + > +
+ {/* Hero Image */} +
+ On This Day + + {/* Gradient Overlay */} +
+ + {/* Content Overlay */} +
+ {/* "On This Day" Badge */} +
+ + + + On This Day +
+ + {/* Years Ago Text */} +

+ {yearsAgo === 1 + ? 'On this day last year' + : yearsAgo > 0 + ? `${yearsAgo} years ago` + : 'Today'} +

+ + {/* Photo Count */} +
+ + + + {formatPhotoCount(images.length)} + {years.length > 1 && + ` from ${years.length} ${years.length === 1 ? 'year' : 'years'}`} +
+
+
+ + {/* Additional Images Preview (if more than 1) */} + {images.length > 1 && ( +
+ {images.slice(1, 4).map((img, idx) => ( +
+ +
+ ))} + {images.length > 4 && ( +
+ +{images.length - 4} +
+ )} +
+ )} +
+ + {/* CTA Text */} +
+

+ Click to relive these memories → +

+
+
+ ); + }, +); + +FeaturedMemoryCard.displayName = 'FeaturedMemoryCard'; + +export default FeaturedMemoryCard; diff --git a/frontend/src/components/Memories/MemoriesPage.tsx b/frontend/src/components/Memories/MemoriesPage.tsx new file mode 100644 index 000000000..e9dec3429 --- /dev/null +++ b/frontend/src/components/Memories/MemoriesPage.tsx @@ -0,0 +1,469 @@ +/** + * MemoriesPage Component + * + * Main page for the Memories feature. + * Displays memories in sections: On This Day, Recent, This Year, All Memories. + * Includes filter tabs for All/Location/Date memories. + * + * Layout mimics Google Photos Memories with smart feed organization. + */ + +import React, { useEffect, useState } from 'react'; +import { useAppDispatch, useAppSelector } from '@/store/hooks'; +import { + fetchAllMemoriesData, + fetchAllMemories, + fetchRecentMemories, + fetchYearMemories, + fetchOnThisDay, + setSelectedMemory, + selectOnThisDayImages, + selectOnThisDayMeta, + selectRecentMemories, + selectYearMemories, + selectAllMemories, + selectMemoriesLoading, + selectMemoriesError, + selectTotalMemoryCount, +} from '@/store/slices/memoriesSlice'; +import { MemoryCard } from './MemoryCard'; +import { FeaturedMemoryCard } from './FeaturedMemoryCard'; +import { MemoryViewer } from './MemoryViewer.tsx'; +import type { Memory } from '@/services/memoriesApi'; + +/** + * Loading skeleton for memory cards + */ +const MemoryCardSkeleton: React.FC = () => ( +
+
+
+
+
+
+
+
+); + +/** + * Featured card skeleton for On This Day + */ +const FeaturedSkeleton: React.FC = () => ( +
+
+
+
+
+
+); + +/** + * Section header component + */ +const SectionHeader: React.FC<{ title: string; count?: number }> = ({ + title, + count, +}) => ( +

+ {title} + {count !== undefined && count > 0 && ( + + ({count}) + + )} +

+); + +/** + * Error message component with retry button + */ +const ErrorMessage: React.FC<{ message: string; onRetry: () => void }> = ({ + message, + onRetry, +}) => ( +
+
+ + + +

{message}

+ +
+
+); + +/** + * Empty state component + */ +const EmptyState: React.FC<{ message: string }> = ({ message }) => ( +
+ + + +

{message}

+
+); + +/** + * Main Memories Page Component + * SIMPLIFIED: Basic All/Location/Date filter buttons + */ +export const MemoriesPage: React.FC = () => { + const dispatch = useAppDispatch(); + + // Selectors + const onThisDayImages = useAppSelector(selectOnThisDayImages); + const onThisDayMeta = useAppSelector(selectOnThisDayMeta); + const recentMemories = useAppSelector(selectRecentMemories); + const yearMemories = useAppSelector(selectYearMemories); + const allMemories = useAppSelector(selectAllMemories); + const loading = useAppSelector(selectMemoriesLoading); + const error = useAppSelector(selectMemoriesError); + const totalCount = useAppSelector(selectTotalMemoryCount); + + // Simple filter state: 'all' | 'location' | 'date' + const [filter, setFilter] = useState<'all' | 'location' | 'date'>('all'); + + // Calculate counts + const locationCount = allMemories.filter( + (m) => m.center_lat != null && m.center_lon != null, + ).length; + const dateCount = allMemories.filter( + (m) => m.center_lat == null || m.center_lon == null, + ).length; + + // Simple filter function + const applyFilter = (memories: Memory[]) => { + if (filter === 'location') { + return memories.filter( + (m) => m.center_lat != null && m.center_lon != null, + ); + } + if (filter === 'date') { + return memories.filter( + (m) => m.center_lat == null || m.center_lon == null, + ); + } + return memories; // 'all' + }; + + // Apply filter + const filteredRecentMemories = applyFilter(recentMemories); + const filteredYearMemories = applyFilter(yearMemories); + const filteredAllMemories = applyFilter(allMemories); + + // Fetch all data on mount + useEffect(() => { + dispatch(fetchAllMemoriesData()); + }, [dispatch]); + + // Handle memory card click + const handleMemoryClick = (memory: Memory) => { + dispatch(setSelectedMemory(memory)); + }; + + // Handle On This Day click - create a temporary memory from images + const handleOnThisDayClick = () => { + if (onThisDayImages.length > 0 && onThisDayMeta) { + const tempMemory: Memory = { + memory_id: 'on-this-day', + title: `On This Day - ${onThisDayMeta.today}`, + description: `Photos from ${onThisDayMeta.years.join(', ')}`, + location_name: 'Various locations', + date_start: onThisDayImages[0]?.captured_at || null, + date_end: + onThisDayImages[onThisDayImages.length - 1]?.captured_at || null, + image_count: onThisDayImages.length, + images: onThisDayImages, + thumbnail_image_id: onThisDayImages[0]?.id || '', + center_lat: onThisDayImages[0]?.latitude ?? null, + center_lon: onThisDayImages[0]?.longitude ?? null, + }; + dispatch(setSelectedMemory(tempMemory)); + } + }; + + // Retry handlers + const handleRetryAll = () => dispatch(fetchAllMemories()); + const handleRetryRecent = () => dispatch(fetchRecentMemories(30)); + const handleRetryYear = () => dispatch(fetchYearMemories(365)); + const handleRetryOnThisDay = () => dispatch(fetchOnThisDay()); + + // Check if any data exists + const hasAnyData = + onThisDayImages.length > 0 || + recentMemories.length > 0 || + yearMemories.length > 0 || + allMemories.length > 0; + + return ( +
+ {/* Header */} +
+
+
+
+ + + +

+ Memories + {totalCount > 0 && ( + + ({totalCount}) + + )} +

+
+ + {/* Refresh button */} + +
+
+
+ + {/* Main Content */} +
+ {/* Simple Filter Buttons */} + {hasAnyData && ( +
+ + + +
+ )} + + {/* Global Loading State */} + {!hasAnyData && loading.all && ( +
+ +
+ {[...Array(8)].map((_, i) => ( + + ))} +
+
+ )} + + {/* Global Error State */} + {!hasAnyData && error.all && ( + + )} + + {/* Global Empty State */} + {!hasAnyData && !loading.all && !error.all && ( + + )} + + {/* ==================================================================== + SECTION 1: On This Day + ==================================================================== */} + {onThisDayImages.length > 0 && onThisDayMeta && ( +
+ + {loading.onThisDay ? ( + + ) : error.onThisDay ? ( + + ) : ( + + )} +
+ )} + + {/* ==================================================================== + SECTION 2: Recent Memories (Last 30 days) + ==================================================================== */} + {filteredRecentMemories.length > 0 && ( +
+ + {loading.recent ? ( +
+ {[...Array(4)].map((_, i) => ( + + ))} +
+ ) : error.recent ? ( + + ) : ( +
+ {filteredRecentMemories.map((memory: Memory) => ( + + ))} +
+ )} +
+ )} + + {/* ==================================================================== + SECTION 3: This Year + ==================================================================== */} + {filteredYearMemories.length > 0 && ( +
+ + {loading.year ? ( +
+ {[...Array(4)].map((_, i) => ( + + ))} +
+ ) : error.year ? ( + + ) : ( +
+ {filteredYearMemories.map((memory: Memory) => ( + + ))} +
+ )} +
+ )} + + {/* ==================================================================== + SECTION 4: All Memories + ==================================================================== */} + {filteredAllMemories.length > 0 && ( +
+ + {loading.all ? ( +
+ {[...Array(8)].map((_, i) => ( + + ))} +
+ ) : error.all ? ( + + ) : ( +
+ {filteredAllMemories.map((memory: Memory) => ( + + ))} +
+ )} +
+ )} +
+ + {/* Memory Viewer Modal */} + +
+ ); +}; + +export default MemoriesPage; diff --git a/frontend/src/components/Memories/MemoryCard.tsx b/frontend/src/components/Memories/MemoryCard.tsx new file mode 100644 index 000000000..21e06a8d2 --- /dev/null +++ b/frontend/src/components/Memories/MemoryCard.tsx @@ -0,0 +1,187 @@ +/** + * MemoryCard Component + * + * Displays a memory card with thumbnail, title, date, location, and photo count. + * Used in grid layouts for Recent Memories, This Year, and All Memories sections. + */ + +import React from 'react'; +import { Memory } from '@/services/memoriesApi'; +import { + formatDateRangeRelative, + formatPhotoCount, + getThumbnailUrl, +} from '@/services/memoriesApi'; + +interface MemoryCardProps { + memory: Memory; + onClick: (memory: Memory) => void; +} + +/** + * Memory card component with hover effects and responsive design + * SIMPLIFIED: Just show type badge, handle missing thumbnails, use convertFileSrc + */ +export const MemoryCard = React.memo(({ memory, onClick }) => { + // Get thumbnail image (first image or find by thumbnail_image_id) + const thumbnailImage = + memory.images.find((img) => img.id === memory.thumbnail_image_id) || + memory.images[0]; + + // Handle missing thumbnail gracefully - use path as fallback + const thumbnailUrl = thumbnailImage + ? getThumbnailUrl(thumbnailImage) + : memory.images[0]?.path + ? getThumbnailUrl(memory.images[0]) + : '/photo.png'; // Default placeholder + + // Determine memory type + // Backend uses 0,0 as sentinel for date-based memories (no GPS data) + const isDateBased = memory.center_lat == null || memory.center_lon == null; + + // Format title based on memory type + let displayTitle = memory.title || 'Untitled Memory'; + const displayLocation = memory.location_name || ''; + + // For location-based memories, format as "Trip to [Location], [Year]" + if (!isDateBased && displayLocation) { + // Extract year from date_start + const year = memory.date_start + ? new Date(memory.date_start).getFullYear() + : ''; + displayTitle = `Trip to ${displayLocation}${year ? `, ${year}` : ''}`; + } + + // Handle image load error + const handleImageError = (e: React.SyntheticEvent) => { + e.currentTarget.src = '/photo.png'; // Fallback to default + }; + + return ( +
onClick(memory)} + className="group transform cursor-pointer overflow-hidden rounded-lg bg-white shadow-md transition-all duration-200 hover:scale-[1.02] hover:shadow-xl dark:bg-gray-800" + role="button" + tabIndex={0} + onKeyDown={(e) => { + if (e.key === 'Enter' || e.key === ' ') { + e.preventDefault(); + onClick(memory); + } + }} + aria-label={`View memory: ${displayTitle}`} + > + {/* Thumbnail Image */} +
+ {displayTitle} + + {/* Type Badge - Location or Date */} +
+ {isDateBased ? ( + <> + + + + Date + + ) : ( + <> + + + + + Location + + )} +
+ + {/* Photo Count Badge */} +
+ {formatPhotoCount(memory.image_count)} +
+
+ + {/* Card Content */} +
+ {/* Title */} +

+ {displayTitle} +

+ + {/* Date Range - Relative Format */} +

+ {formatDateRangeRelative(memory.date_start, memory.date_end)} +

+ + {/* Location - Only show if not coordinates */} + {displayLocation && ( +
+ + + + + {displayLocation} +
+ )} + + {/* Description (optional, hidden on small screens) */} + {memory.description && ( +

+ {memory.description} +

+ )} +
+
+ ); +}); + +MemoryCard.displayName = 'MemoryCard'; + +export default MemoryCard; diff --git a/frontend/src/components/Memories/MemoryViewer.tsx b/frontend/src/components/Memories/MemoryViewer.tsx new file mode 100644 index 000000000..12fb41da7 --- /dev/null +++ b/frontend/src/components/Memories/MemoryViewer.tsx @@ -0,0 +1,366 @@ +/** + * MemoryViewer Component + * + * Full-screen modal for viewing a memory's details and all photos. + * Shows title, description, date, location, and a grid of all images. + * When an image is clicked, opens MediaView for full slideshow/zoom experience. + */ + +import React, { useEffect, useCallback, useState } from 'react'; +import { useAppDispatch, useAppSelector } from '@/store/hooks'; +import { + setSelectedMemory, + selectSelectedMemory, + toggleImageFavorite, +} from '@/store/slices/memoriesSlice'; +import { setCurrentViewIndex, setImages } from '@/features/imageSlice'; +import { showInfoDialog } from '@/features/infoDialogSlice'; +import { MediaView } from '@/components/Media/MediaView'; +import { + formatDateRangeRelative, + formatPhotoCount, + getThumbnailUrl, + generateMemoryTitle, + formatLocationName, +} from '@/services/memoriesApi'; +import { togglefav } from '@/api/api-functions/togglefav'; +import { getErrorMessage } from '@/lib/utils'; + +/** + * Memory Viewer Modal Component + */ +export const MemoryViewer: React.FC = () => { + const dispatch = useAppDispatch(); + const memory = useAppSelector(selectSelectedMemory); + const [showMediaView, setShowMediaView] = useState(false); + + // Handle close memory viewer + const handleCloseViewer = useCallback(() => { + dispatch(setSelectedMemory(null)); + }, [dispatch]); + + // Handle favorite toggle - update both API and Redux state + const handleToggleFavorite = useCallback( + async (imageId: string) => { + // Optimistic update - toggle UI immediately + dispatch(toggleImageFavorite(imageId)); + + try { + // Call API to toggle favorite in database + await togglefav(imageId); + } catch (error) { + // Revert the optimistic change on failure + dispatch(toggleImageFavorite(imageId)); + + // Show error dialog to user + dispatch( + showInfoDialog({ + title: 'Failed to Update Favorite', + message: getErrorMessage(error), + variant: 'error', + }), + ); + console.error('Failed to toggle favorite:', error); + } + }, + [dispatch], + ); + + // Handle image click - open MediaView + const handleImageClick = useCallback( + (index: number) => { + if (!memory) return; + + // Convert memory images to Image[] format for Redux state + const formattedImages = memory.images.map((img) => ({ + id: img.id, + path: img.path, + thumbnailPath: img.thumbnailPath, + folder_id: '', + isTagged: false, + isFavourite: img.isFavourite || false, + tags: [], + metadata: { + name: img.path.split('/').pop() || '', + date_created: img.captured_at, + width: 0, + height: 0, + file_location: img.path, + file_size: 0, + item_type: 'image' as const, + latitude: img.latitude || undefined, + longitude: img.longitude || undefined, + }, + })); + + // Set images in Redux state first + dispatch(setImages(formattedImages)); + // Then set the current index + dispatch(setCurrentViewIndex(index)); + setShowMediaView(true); + }, + [memory, dispatch], + ); + + // Handle MediaView close - go back to memory grid + const handleMediaViewClose = useCallback(() => { + setShowMediaView(false); + dispatch(setCurrentViewIndex(-1)); // Reset view index + }, [dispatch]); + + // Handle ESC key press + useEffect(() => { + const handleEsc = (e: KeyboardEvent) => { + if (e.key === 'Escape') { + handleCloseViewer(); + } + }; + + if (memory) { + document.addEventListener('keydown', handleEsc); + // Prevent body scroll when modal is open + document.body.style.overflow = 'hidden'; + } + + return () => { + document.removeEventListener('keydown', handleEsc); + document.body.style.overflow = 'unset'; + }; + }, [memory, handleCloseViewer]); + + // Don't render if no memory selected + if (!memory) return null; + + // Generate better title and format location + const displayTitle = generateMemoryTitle(memory); + const displayLocation = formatLocationName(memory.location_name); + + // Handle image load error + const handleImageError = (e: React.SyntheticEvent) => { + e.currentTarget.src = '/photo.png'; + }; + + return ( + <> + {/* Memory Grid Modal - hide when MediaView is open */} + {!showMediaView && ( +
+ {/* Modal Container */} +
+
e.stopPropagation()} + > + {/* Header */} +
+
+
+ {/* Title */} +

+ {displayTitle} +

+ + {/* Metadata */} +
+ {/* Date Range - Relative */} +
+ + + + + {formatDateRangeRelative( + memory.date_start, + memory.date_end, + )} + +
+ + {/* Location - Only show if not coordinates */} + {displayLocation && ( +
+ + + + + {displayLocation} +
+ )} + + {/* Photo Count */} +
+ + + + {formatPhotoCount(memory.image_count)} +
+
+ + {/* Description */} + {memory.description && ( +

+ {memory.description} +

+ )} +
+ + {/* Close Button */} + +
+
+ + {/* Images Grid */} +
+
+ {memory.images.map((image, index) => ( +
handleImageClick(index)} + > + {`Photo + + {/* Hover Overlay */} +
+ + + +
+
+ ))} +
+
+ + {/* Footer (optional - for future features like share, download, etc.) */} +
+
+

+ Click any photo to view with zoom and slideshow +

+ + {/* Future: Add share, download buttons here */} +
+ {/* Placeholder for future actions */} +
+
+
+
+
+
+ )} + + {/* MediaView for full-screen image viewing with zoom/slideshow */} + {showMediaView && memory && ( + ({ + id: img.id, + path: img.path, + thumbnailPath: img.thumbnailPath, + folder_id: '', // Memory images don't have folder_id + isTagged: false, // Memory images don't track tagging + isFavourite: img.isFavourite || false, // Use actual favorite status from backend + tags: [], // Can be added later if needed + metadata: { + name: img.path.split('/').pop() || '', + date_created: img.captured_at, + width: 0, + height: 0, + file_location: img.path, + file_size: 0, + item_type: 'image', + latitude: img.latitude || undefined, + longitude: img.longitude || undefined, + }, + }))} + /> + )} + + ); +}; + +export default MemoryViewer; diff --git a/frontend/src/components/Memories/index.ts b/frontend/src/components/Memories/index.ts new file mode 100644 index 000000000..9e65554e2 --- /dev/null +++ b/frontend/src/components/Memories/index.ts @@ -0,0 +1,14 @@ +/** + * Memories Component Exports + * + * Barrel file for clean imports across the application. + * Import components like: import { MemoriesPage, MemoryCard } from '@/components/Memories' + */ + +export { default as MemoriesPage } from './MemoriesPage'; +export { default as MemoryCard } from './MemoryCard'; +export { default as FeaturedMemoryCard } from './FeaturedMemoryCard'; +export { default as MemoryViewer } from './MemoryViewer'; + +// Export types if needed +export type { Memory, MemoryImage } from '@/services/memoriesApi'; diff --git a/frontend/src/routes/AppRoutes.tsx b/frontend/src/routes/AppRoutes.tsx index 22153edbb..1dfb8177a 100644 --- a/frontend/src/routes/AppRoutes.tsx +++ b/frontend/src/routes/AppRoutes.tsx @@ -9,6 +9,7 @@ import { MyFav } from '@/pages/Home/MyFav'; import { AITagging } from '@/pages/AITagging/AITagging'; import { PersonImages } from '@/pages/PersonImages/PersonImages'; import { ComingSoon } from '@/pages/ComingSoon/ComingSoon'; +import { MemoriesPage } from '@/components/Memories'; export const AppRoutes: React.FC = () => { return ( @@ -21,7 +22,7 @@ export const AppRoutes: React.FC = () => { } /> } /> } /> - } /> + } /> } /> diff --git a/frontend/src/services/memoriesApi.ts b/frontend/src/services/memoriesApi.ts new file mode 100644 index 000000000..2d18de9f6 --- /dev/null +++ b/frontend/src/services/memoriesApi.ts @@ -0,0 +1,528 @@ +/** + * Memories API Service + * + * Handles all HTTP requests to the memories backend endpoints. + * Provides type-safe interfaces and error handling. + */ + +import axios, { AxiosError } from 'axios'; +import { convertFileSrc } from '@tauri-apps/api/core'; +import { BACKEND_URL } from '@/config/Backend'; + +const API_BASE_URL = `${BACKEND_URL}/api/memories`; + +// ============================================================================ +// TypeScript Interfaces +// ============================================================================ + +/** + * Individual image within a memory + */ +export interface MemoryImage { + id: string; + path: string; + thumbnailPath: string; + latitude: number | null; + longitude: number | null; + captured_at: string | null; // ISO 8601 format + isFavourite?: boolean; // Favorite status +} + +/** + * Memory object representing a collection of photos + */ +export interface Memory { + memory_id: string; + title: string; + description: string; + location_name: string; + date_start: string | null; // ISO 8601 format + date_end: string | null; // ISO 8601 format + image_count: number; + images: MemoryImage[]; + thumbnail_image_id: string; + center_lat: number | null; + center_lon: number | null; +} + +/** + * Response from POST /api/memories/generate + */ +export interface GenerateMemoriesResponse { + success: boolean; + message: string; + memory_count: number; + image_count: number; + memories: Memory[]; +} + +/** + * Response from GET /api/memories/timeline + */ +export interface TimelineResponse { + success: boolean; + date_range: { + start: string; + end: string; + }; + memory_count: number; + memories: Memory[]; +} + +/** + * Response from GET /api/memories/on-this-day + */ +export interface OnThisDayResponse { + success: boolean; + today: string; // e.g., "December 14" + years: number[]; // [2024, 2023, 2022] + image_count: number; + images: MemoryImage[]; +} + +/** + * Location cluster with sample images + */ +export interface LocationCluster { + location_name: string; + center_lat: number; + center_lon: number; + image_count: number; + sample_images: MemoryImage[]; +} + +/** + * Response from GET /api/memories/locations + */ +export interface LocationsResponse { + success: boolean; + location_count: number; + locations: LocationCluster[]; +} + +/** + * API Error structure + */ +export interface ApiError { + message: string; + status?: number; + details?: string; +} + +// ============================================================================ +// API Functions +// ============================================================================ + +/** + * Generate all memories from images with location data + * + * @param options - Clustering parameters + * @returns Generated memories + */ +export const generateMemories = async (options?: { + location_radius_km?: number; + date_tolerance_days?: number; + min_images?: number; +}): Promise => { + try { + const params = new URLSearchParams(); + if (options?.location_radius_km) + params.append( + 'location_radius_km', + options.location_radius_km.toString(), + ); + if (options?.date_tolerance_days) + params.append( + 'date_tolerance_days', + options.date_tolerance_days.toString(), + ); + if (options?.min_images) + params.append('min_images', options.min_images.toString()); + + const response = await axios.post( + `${API_BASE_URL}/generate${params.toString() ? '?' + params.toString() : ''}`, + ); + + return response.data; + } catch (error) { + throw handleApiError(error); + } +}; + +/** + * Get memories from the past N days as a timeline + * + * @param days - Number of days to look back (default: 365) + * @param options - Clustering parameters + * @returns Timeline memories + */ +export const getTimeline = async ( + days: number = 365, + options?: { + location_radius_km?: number; + date_tolerance_days?: number; + }, +): Promise => { + try { + const params = new URLSearchParams(); + params.append('days', days.toString()); + if (options?.location_radius_km) + params.append( + 'location_radius_km', + options.location_radius_km.toString(), + ); + if (options?.date_tolerance_days) + params.append( + 'date_tolerance_days', + options.date_tolerance_days.toString(), + ); + + const response = await axios.get( + `${API_BASE_URL}/timeline?${params.toString()}`, + ); + + return response.data; + } catch (error) { + throw handleApiError(error); + } +}; + +/** + * Get photos taken on this date in previous years + * + * @returns On This Day images + */ +export const getOnThisDay = async (): Promise => { + try { + const response = await axios.get( + `${API_BASE_URL}/on-this-day`, + ); + + return response.data; + } catch (error) { + throw handleApiError(error); + } +}; + +/** + * Get all unique locations where photos were taken + * + * @param options - Clustering and sampling parameters + * @returns Location clusters + */ +export const getLocations = async (options?: { + location_radius_km?: number; + max_sample_images?: number; +}): Promise => { + try { + const params = new URLSearchParams(); + if (options?.location_radius_km) + params.append( + 'location_radius_km', + options.location_radius_km.toString(), + ); + if (options?.max_sample_images) + params.append('max_sample_images', options.max_sample_images.toString()); + + const response = await axios.get( + `${API_BASE_URL}/locations${params.toString() ? '?' + params.toString() : ''}`, + ); + + return response.data; + } catch (error) { + throw handleApiError(error); + } +}; + +// ============================================================================ +// Error Handling +// ============================================================================ + +/** + * Convert Axios errors to our ApiError format + */ +const handleApiError = (error: unknown): ApiError => { + if (axios.isAxiosError(error)) { + const axiosError = error as AxiosError<{ + detail?: string; + message?: string; + }>; + + return { + message: + axiosError.response?.data?.message || + axiosError.response?.data?.detail || + axiosError.message || + 'An unknown error occurred', + status: axiosError.response?.status, + details: axiosError.response?.statusText, + }; + } + + if (error instanceof Error) { + return { + message: error.message, + }; + } + + return { + message: 'An unexpected error occurred', + }; +}; + +// ============================================================================ +// Utility Functions +// ============================================================================ + +/** + * Format a date string to human-readable format + * + * @param isoDate - ISO 8601 date string + * @returns Formatted date (e.g., "November 25, 2025") + */ +export const formatMemoryDate = (isoDate: string | null): string => { + if (!isoDate) return 'Unknown date'; + + try { + const date = new Date(isoDate); + return date.toLocaleDateString('en-US', { + year: 'numeric', + month: 'long', + day: 'numeric', + }); + } catch { + return 'Invalid date'; + } +}; + +/** + * Format date range for memory display + * + * @param startDate - Start date ISO string + * @param endDate - End date ISO string + * @returns Formatted range (e.g., "Nov 25 - Nov 27, 2025") + */ +export const formatDateRange = ( + startDate: string | null, + endDate: string | null, +): string => { + if (!startDate || !endDate) return 'Unknown date'; + + try { + const start = new Date(startDate); + const end = new Date(endDate); + + // Same day + if (start.toDateString() === end.toDateString()) { + return start.toLocaleDateString('en-US', { + year: 'numeric', + month: 'long', + day: 'numeric', + }); + } + + // Same month and year + if ( + start.getMonth() === end.getMonth() && + start.getFullYear() === end.getFullYear() + ) { + const monthYear = start.toLocaleDateString('en-US', { + month: 'long', + year: 'numeric', + }); + return `${start.getDate()} - ${end.getDate()}, ${monthYear}`; + } + + // Different months or years + const startFormatted = start.toLocaleDateString('en-US', { + month: 'short', + day: 'numeric', + }); + const endFormatted = end.toLocaleDateString('en-US', { + month: 'short', + day: 'numeric', + year: 'numeric', + }); + return `${startFormatted} - ${endFormatted}`; + } catch { + return 'Invalid date range'; + } +}; + +/** + * Calculate years ago from a date + * + * @param isoDate - ISO date string + * @returns Number of years ago + */ +export const calculateYearsAgo = (isoDate: string): number => { + try { + const date = new Date(isoDate); + const now = new Date(); + return now.getFullYear() - date.getFullYear(); + } catch { + return 0; + } +}; + +/** + * Format photo count + * + * @param count - Number of photos + * @returns Formatted string (e.g., "1 photo" or "5 photos") + */ +export const formatPhotoCount = (count: number): string => { + return count === 1 ? '1 photo' : `${count} photos`; +}; + +/** + * Format date range with relative time for recent dates + * + * @param startDate - Start date ISO string + * @param endDate - End date ISO string + * @returns Formatted range with relative dates like "Yesterday", "Last week", "2 months ago" + */ +export const formatDateRangeRelative = ( + startDate: string | null, + endDate: string | null, +): string => { + if (!startDate || !endDate) return 'Unknown date'; + + try { + const start = new Date(startDate); + const end = new Date(endDate); + const now = new Date(); + + // Calculate days difference from end date + const daysDiff = Math.floor( + (now.getTime() - end.getTime()) / (1000 * 60 * 60 * 24), + ); + + // Today + if (daysDiff === 0) { + return 'Today'; + } + + // Yesterday + if (daysDiff === 1) { + return 'Yesterday'; + } + + // This week (2-6 days ago) + if (daysDiff >= 2 && daysDiff <= 6) { + return `${daysDiff} days ago`; + } + + // Last week + if (daysDiff >= 7 && daysDiff <= 13) { + return 'Last week'; + } + + // This month (2-4 weeks ago) + if (daysDiff >= 14 && daysDiff <= 30) { + const weeks = Math.floor(daysDiff / 7); + return `${weeks} weeks ago`; + } + + // Recent months (1-12 months ago) + const monthsDiff = Math.floor(daysDiff / 30); + if (monthsDiff >= 1 && monthsDiff <= 11) { + return monthsDiff === 1 ? 'Last month' : `${monthsDiff} months ago`; + } + + // Over a year ago - show month and year + return start.toLocaleDateString('en-US', { + month: 'short', + year: 'numeric', + }); + } catch { + return formatDateRange(startDate, endDate); + } +}; + +/** + * Generate a human-readable title from location and date + * Improves ugly coordinate-based titles like "26.9333°, 75.9228° - November 2025" + * + * @param memory - Memory object with location and date info + * @returns Better title like "Weekend in Jaipur", "Jaipur Trip", or "December 2024" + */ +export const generateMemoryTitle = (memory: Memory): string => { + const location = memory.location_name; + const imageCount = memory.image_count; + + // Check if it's a date-based memory (no GPS data) + if (location === 'Date-Based Memory') { + // Use the title from backend which is already well-formatted for date-only memories + return memory.title; + } + + // If location doesn't look like coordinates, use it + if (!location.includes('°') && !location.match(/^-?\d+\.\d+/)) { + // Parse city name from location (e.g., "Jaipur, Rajasthan" -> "Jaipur") + const cityName = location.split(',')[0].trim(); + + // Add descriptive word based on image count + if (imageCount >= 50) { + return `${cityName} Adventure`; + } else if (imageCount >= 20) { + return `${cityName} Trip`; + } else if (imageCount >= 10) { + return `Weekend in ${cityName}`; + } else { + return `${cityName} Memories`; + } + } + + // Fallback: coordinates - try to make it cleaner + if (memory.date_start) { + const date = new Date(memory.date_start); + const monthYear = date.toLocaleDateString('en-US', { + month: 'long', + year: 'numeric', + }); + return `Memories from ${monthYear}`; + } + + // Last resort + return memory.title || 'Photo Collection'; +}; + +/** + * Format location name by removing coordinates if present + * + * @param locationName - Raw location name from API + * @returns Cleaned location name or empty string if only coordinates or date-based + */ +export const formatLocationName = (locationName: string): string => { + // Hide date-based memories indicator (backend sends "Date-Based Memory") + if (locationName === 'Date-Based Memory') { + return ''; + } + + // If it looks like coordinates (contains ° or is a number pattern), hide it + if ( + locationName.includes('°') || + locationName.match(/^-?\d+\.\d+.*-?\d+\.\d+/) + ) { + return ''; // Hide ugly coordinates + } + + return locationName; +}; + +/** + * Get thumbnail URL with fallback + * + * @param image - Memory image object + * @returns Thumbnail URL or placeholder + */ +export const getThumbnailUrl = (image: MemoryImage): string => { + // Use Tauri's convertFileSrc for proper file path handling in desktop app + if (image.thumbnailPath) { + return convertFileSrc(image.thumbnailPath); + } + + // Fallback to placeholder + return '/photo.png'; +}; diff --git a/frontend/src/store/hooks.ts b/frontend/src/store/hooks.ts new file mode 100644 index 000000000..96fc8a456 --- /dev/null +++ b/frontend/src/store/hooks.ts @@ -0,0 +1,14 @@ +/** + * Redux Hooks + * + * Typed hooks for use throughout the application. + * These hooks ensure type safety when using Redux with TypeScript. + */ + +import { useDispatch, useSelector } from 'react-redux'; +import type { TypedUseSelectorHook } from 'react-redux'; +import type { RootState, AppDispatch } from '../app/store'; + +// Use throughout the app instead of plain `useDispatch` and `useSelector` +export const useAppDispatch: () => AppDispatch = useDispatch; +export const useAppSelector: TypedUseSelectorHook = useSelector; diff --git a/frontend/src/store/slices/memoriesSlice.ts b/frontend/src/store/slices/memoriesSlice.ts new file mode 100644 index 000000000..472255be6 --- /dev/null +++ b/frontend/src/store/slices/memoriesSlice.ts @@ -0,0 +1,390 @@ +/** + * Memories Redux Slice + * + * Manages state for the Memories feature including: + * - All memories (generated from all photos) + * - Recent memories (last 30 days) + * - Year memories (current year) + * - On This Day images + * - Selected memory for viewer modal + */ + +import { createSlice, createAsyncThunk, PayloadAction } from '@reduxjs/toolkit'; +import { + generateMemories, + getTimeline, + getOnThisDay, + Memory, + MemoryImage, + ApiError, +} from '@/services/memoriesApi'; + +// ============================================================================ +// State Interface +// ============================================================================ + +interface MemoriesState { + // Memory collections + allMemories: Memory[]; + recentMemories: Memory[]; + yearMemories: Memory[]; + onThisDayImages: MemoryImage[]; + onThisDayMeta: { + today: string; + years: number[]; + } | null; + + // Selected memory for viewer modal + selectedMemory: Memory | null; + + // Loading states for each section + loading: { + all: boolean; + recent: boolean; + year: boolean; + onThisDay: boolean; + }; + + // Error states + error: { + all: string | null; + recent: string | null; + year: string | null; + onThisDay: string | null; + }; + + // Metadata + lastFetched: number | null; +} + +// ============================================================================ +// Initial State +// ============================================================================ + +const initialState: MemoriesState = { + allMemories: [], + recentMemories: [], + yearMemories: [], + onThisDayImages: [], + onThisDayMeta: null, + selectedMemory: null, + loading: { + all: false, + recent: false, + year: false, + onThisDay: false, + }, + error: { + all: null, + recent: null, + year: null, + onThisDay: null, + }, + lastFetched: null, +}; + +// ============================================================================ +// Async Thunks +// ============================================================================ + +/** + * Fetch all memories from photos with location data + */ +export const fetchAllMemories = createAsyncThunk< + Memory[], + void, + { rejectValue: string } +>('memories/fetchAll', async (_, { rejectWithValue }) => { + try { + const response = await generateMemories(); + return response.memories; + } catch (error) { + const apiError = error as ApiError; + return rejectWithValue(apiError.message); + } +}); + +/** + * Fetch recent memories (last 30 days) + */ +export const fetchRecentMemories = createAsyncThunk< + Memory[], + number, + { rejectValue: string } +>('memories/fetchRecent', async (days = 30, { rejectWithValue }) => { + try { + const response = await getTimeline(days); + return response.memories; + } catch (error) { + const apiError = error as ApiError; + return rejectWithValue(apiError.message); + } +}); + +/** + * Fetch memories from current year + */ +export const fetchYearMemories = createAsyncThunk< + Memory[], + number, + { rejectValue: string } +>('memories/fetchYear', async (days = 365, { rejectWithValue }) => { + try { + const response = await getTimeline(days); + return response.memories; + } catch (error) { + const apiError = error as ApiError; + return rejectWithValue(apiError.message); + } +}); + +/** + * Fetch "On This Day" images + */ +export const fetchOnThisDay = createAsyncThunk< + { images: MemoryImage[]; today: string; years: number[] }, + void, + { rejectValue: string } +>('memories/fetchOnThisDay', async (_, { rejectWithValue }) => { + try { + const response = await getOnThisDay(); + return { + images: response.images, + today: response.today, + years: response.years, + }; + } catch (error) { + const apiError = error as ApiError; + return rejectWithValue(apiError.message); + } +}); + +/** + * Fetch all memories data at once (parallel requests) + */ +export const fetchAllMemoriesData = createAsyncThunk< + void, + void, + { rejectValue: string } +>('memories/fetchAllData', async (_, { dispatch, rejectWithValue }) => { + try { + await Promise.all([ + dispatch(fetchOnThisDay()), + dispatch(fetchRecentMemories(30)), + dispatch(fetchYearMemories(365)), + dispatch(fetchAllMemories()), + ]); + } catch (error) { + const apiError = error as ApiError; + return rejectWithValue(apiError.message); + } +}); + +// ============================================================================ +// Slice +// ============================================================================ + +const memoriesSlice = createSlice({ + name: 'memories', + initialState, + reducers: { + /** + * Set the selected memory for the viewer modal + */ + setSelectedMemory: (state, action: PayloadAction) => { + state.selectedMemory = action.payload; + }, + + /** + * Toggle favorite status of an image across all memories + */ + toggleImageFavorite: (state, action: PayloadAction) => { + const imageId = action.payload; + + // Helper function to update image in a memory array + const updateMemoriesArray = (memories: Memory[]) => { + memories.forEach((memory) => { + memory.images.forEach((image) => { + if (image.id === imageId) { + image.isFavourite = !image.isFavourite; + } + }); + }); + }; + + // Update across all memory collections + updateMemoriesArray(state.allMemories); + updateMemoriesArray(state.recentMemories); + updateMemoriesArray(state.yearMemories); + + // Update onThisDay images + state.onThisDayImages.forEach((image) => { + if (image.id === imageId) { + image.isFavourite = !image.isFavourite; + } + }); + + // Update selected memory if it exists + if (state.selectedMemory) { + state.selectedMemory.images.forEach((image) => { + if (image.id === imageId) { + image.isFavourite = !image.isFavourite; + } + }); + } + }, + + /** + * Clear all errors + */ + clearErrors: (state) => { + state.error = { + all: null, + recent: null, + year: null, + onThisDay: null, + }; + }, + + /** + * Reset memories state + */ + resetMemories: () => { + return initialState; + }, + }, + extraReducers: (builder) => { + // ======================================================================== + // Fetch All Memories + // ======================================================================== + builder + .addCase(fetchAllMemories.pending, (state) => { + state.loading.all = true; + state.error.all = null; + }) + .addCase(fetchAllMemories.fulfilled, (state, action) => { + state.loading.all = false; + state.allMemories = action.payload; + state.lastFetched = Date.now(); + }) + .addCase(fetchAllMemories.rejected, (state, action) => { + state.loading.all = false; + state.error.all = action.payload || 'Failed to fetch memories'; + }); + + // ======================================================================== + // Fetch Recent Memories + // ======================================================================== + builder + .addCase(fetchRecentMemories.pending, (state) => { + state.loading.recent = true; + state.error.recent = null; + }) + .addCase(fetchRecentMemories.fulfilled, (state, action) => { + state.loading.recent = false; + state.recentMemories = action.payload; + }) + .addCase(fetchRecentMemories.rejected, (state, action) => { + state.loading.recent = false; + state.error.recent = + action.payload || 'Failed to fetch recent memories'; + }); + + // ======================================================================== + // Fetch Year Memories + // ======================================================================== + builder + .addCase(fetchYearMemories.pending, (state) => { + state.loading.year = true; + state.error.year = null; + }) + .addCase(fetchYearMemories.fulfilled, (state, action) => { + state.loading.year = false; + state.yearMemories = action.payload; + }) + .addCase(fetchYearMemories.rejected, (state, action) => { + state.loading.year = false; + state.error.year = action.payload || 'Failed to fetch year memories'; + }); + + // ======================================================================== + // Fetch On This Day + // ======================================================================== + builder + .addCase(fetchOnThisDay.pending, (state) => { + state.loading.onThisDay = true; + state.error.onThisDay = null; + }) + .addCase(fetchOnThisDay.fulfilled, (state, action) => { + state.loading.onThisDay = false; + state.onThisDayImages = action.payload.images; + state.onThisDayMeta = { + today: action.payload.today, + years: action.payload.years, + }; + }) + .addCase(fetchOnThisDay.rejected, (state, action) => { + state.loading.onThisDay = false; + state.error.onThisDay = action.payload || 'Failed to fetch On This Day'; + }); + }, +}); + +// ============================================================================ +// Exports +// ============================================================================ + +export const { + setSelectedMemory, + toggleImageFavorite, + clearErrors, + resetMemories, +} = memoriesSlice.actions; + +export default memoriesSlice.reducer; + +// ============================================================================ +// Selectors +// ============================================================================ + +export const selectAllMemories = (state: { memories: MemoriesState }) => + state.memories.allMemories; +export const selectRecentMemories = (state: { memories: MemoriesState }) => + state.memories.recentMemories; +export const selectYearMemories = (state: { memories: MemoriesState }) => + state.memories.yearMemories; +export const selectOnThisDayImages = (state: { memories: MemoriesState }) => + state.memories.onThisDayImages; +export const selectOnThisDayMeta = (state: { memories: MemoriesState }) => + state.memories.onThisDayMeta; +export const selectSelectedMemory = (state: { memories: MemoriesState }) => + state.memories.selectedMemory; +export const selectMemoriesLoading = (state: { memories: MemoriesState }) => + state.memories.loading; +export const selectMemoriesError = (state: { memories: MemoriesState }) => + state.memories.error; +export const selectLastFetched = (state: { memories: MemoriesState }) => + state.memories.lastFetched; + +/** + * Select total memory count across all sections + */ +export const selectTotalMemoryCount = (state: { memories: MemoriesState }) => { + return state.memories.allMemories.length; +}; + +/** + * Check if any section is loading + */ +export const selectIsAnyLoading = (state: { memories: MemoriesState }) => { + const { loading } = state.memories; + return loading.all || loading.recent || loading.year || loading.onThisDay; +}; + +/** + * Check if there are any errors + */ +export const selectHasAnyError = (state: { memories: MemoriesState }) => { + const { error } = state.memories; + return !!(error.all || error.recent || error.year || error.onThisDay); +}; diff --git a/frontend/src/types/Media.ts b/frontend/src/types/Media.ts index d7e0712fc..fbea71259 100644 --- a/frontend/src/types/Media.ts +++ b/frontend/src/types/Media.ts @@ -36,6 +36,7 @@ export interface MediaViewProps { onClose?: () => void; type?: string; images: Image[]; + onToggleFavorite?: (imageId: string) => void | Promise; } export interface SortingControlsProps {