From dc289a50089d7e74f64817c533fe95956fce9cd6 Mon Sep 17 00:00:00 2001 From: Zhenyu Zheng Date: Tue, 24 Feb 2026 23:03:04 +0800 Subject: [PATCH 1/3] =?UTF-8?q?fix:=20=E7=BB=9F=E4=B8=80=E6=97=B6=E9=97=B4?= =?UTF-8?q?=E6=88=B3=E4=B8=BA=E5=B8=A6=E6=97=B6=E5=8C=BA=E7=9A=84=20UTC=20?= =?UTF-8?q?datetime=EF=BC=8C=E4=BF=AE=E5=A4=8D=E8=B7=A8=E6=97=B6=E5=8C=BA?= =?UTF-8?q?=E9=83=A8=E7=BD=B2=E6=97=B6=E9=97=B4=E6=98=BE=E7=A4=BA=E9=94=99?= =?UTF-8?q?=E8=AF=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - 新增 app/core/timezone.py:提供 utc_now()、get_app_tz()、to_app_tz() 工具函数 - 新增 APP_TIMEZONE 配置项(默认 Asia/Shanghai),控制 ICS/邮件等服务端输出的本地化时区 - 将所有模型的 DateTime 列改为 DateTime(timezone=True),default 改用 utc_now - 替换所有 datetime.utcnow() 直接调用为 utc_now()(含 services、api、security) - ICS 日历改为标准 UTC Z 格式(DTSTART:20240115T103000Z) - docker-compose.yml 添加 TZ 环境变量与 APP_TIMEZONE 联动 - 生成 Alembic 迁移 986ddbdad1d7 --- backend/.env.example | 8 + ...7\266\345\214\272\347\232\204_datetime.py" | 514 ++++++++++++++++++ backend/app/api/auth.py | 5 +- backend/app/api/campaigns.py | 5 +- backend/app/api/community_dashboard.py | 7 +- backend/app/api/publish.py | 4 +- backend/app/config.py | 3 + backend/app/core/security.py | 7 +- backend/app/core/timezone.py | 22 + backend/app/models/audit.py | 5 +- backend/app/models/campaign.py | 11 +- backend/app/models/committee.py | 15 +- backend/app/models/community.py | 7 +- backend/app/models/content.py | 13 +- backend/app/models/ecosystem.py | 9 +- backend/app/models/event.py | 15 +- backend/app/models/meeting.py | 13 +- backend/app/models/password_reset.py | 9 +- backend/app/models/people.py | 7 +- backend/app/models/publish_record.py | 9 +- backend/app/models/user.py | 7 +- backend/app/models/wechat_stats.py | 7 +- .../app/services/ecosystem/github_crawler.py | 8 +- backend/app/services/hugo.py | 6 +- backend/app/services/ics.py | 16 +- backend/app/services/notification.py | 4 +- backend/app/services/wechat_stats.py | 5 +- docker-compose.yml | 2 + 28 files changed, 644 insertions(+), 99 deletions(-) create mode 100644 "backend/alembic/versions/986ddbdad1d7_\345\260\206\346\227\266\351\227\264\346\210\263\345\210\227\346\224\271\344\270\272\345\270\246\346\227\266\345\214\272\347\232\204_datetime.py" create mode 100644 backend/app/core/timezone.py diff --git a/backend/.env.example b/backend/.env.example index 447a387..333265a 100644 --- a/backend/.env.example +++ b/backend/.env.example @@ -44,6 +44,14 @@ DEFAULT_ADMIN_USERNAME=admin DEFAULT_ADMIN_PASSWORD=admin123 DEFAULT_ADMIN_EMAIL=admin@example.com +# ───────────────────────────────────────────────────────────────────── +# 时区配置 +# ───────────────────────────────────────────────────────────────────── +# 服务端使用的时区,影响 ICS 日历文件和邮件通知中的时间显示 +# 数据库始终以 UTC 存储;此配置仅控制服务端生成内容的本地化时区 +# 可用值参见 IANA 时区数据库: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones +APP_TIMEZONE=Asia/Shanghai + # ───────────────────────────────────────────────────────────────────── # 服务器 # ───────────────────────────────────────────────────────────────────── diff --git "a/backend/alembic/versions/986ddbdad1d7_\345\260\206\346\227\266\351\227\264\346\210\263\345\210\227\346\224\271\344\270\272\345\270\246\346\227\266\345\214\272\347\232\204_datetime.py" "b/backend/alembic/versions/986ddbdad1d7_\345\260\206\346\227\266\351\227\264\346\210\263\345\210\227\346\224\271\344\270\272\345\270\246\346\227\266\345\214\272\347\232\204_datetime.py" new file mode 100644 index 0000000..3a08fdf --- /dev/null +++ "b/backend/alembic/versions/986ddbdad1d7_\345\260\206\346\227\266\351\227\264\346\210\263\345\210\227\346\224\271\344\270\272\345\270\246\346\227\266\345\214\272\347\232\204_datetime.py" @@ -0,0 +1,514 @@ +"""将时间戳列改为带时区的 datetime + +Revision ID: 986ddbdad1d7 +Revises: 7e70abbef6ae +Create Date: 2026-02-24 22:52:57.374499 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '986ddbdad1d7' +down_revision: Union[str, None] = '7e70abbef6ae' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('_alembic_tmp_campaign_contacts') + with op.batch_alter_table('campaign_contacts', schema=None) as batch_op: + batch_op.drop_constraint('uq_campaign_contact', type_='unique') + batch_op.create_index(batch_op.f('ix_campaign_contacts_id'), ['id'], unique=False) + + with op.batch_alter_table('campaigns', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_campaigns_id'), ['id'], unique=False) + + with op.batch_alter_table('channel_configs', schema=None) as batch_op: + batch_op.alter_column('enabled', + existing_type=sa.BOOLEAN(), + nullable=True, + existing_server_default=sa.text('0')) + + with op.batch_alter_table('checklist_items', schema=None) as batch_op: + batch_op.alter_column('phase', + existing_type=sa.VARCHAR(length=10), + type_=sa.Enum('pre', 'during', 'post', name='checklist_item_phase_enum'), + existing_nullable=False) + batch_op.alter_column('status', + existing_type=sa.VARCHAR(length=20), + type_=sa.Enum('pending', 'done', 'skipped', name='checklist_status_enum'), + existing_nullable=True, + existing_server_default=sa.text("'pending'")) + + with op.batch_alter_table('checklist_template_items', schema=None) as batch_op: + batch_op.alter_column('phase', + existing_type=sa.VARCHAR(length=10), + type_=sa.Enum('pre', 'during', 'post', name='checklist_phase_enum'), + existing_nullable=False) + + with op.batch_alter_table('committee_members', schema=None) as batch_op: + batch_op.alter_column('is_active', + existing_type=sa.BOOLEAN(), + nullable=True, + existing_server_default=sa.text('1')) + batch_op.create_index(batch_op.f('ix_committee_members_committee_id'), ['committee_id'], unique=False) + batch_op.create_index(batch_op.f('ix_committee_members_email'), ['email'], unique=False) + batch_op.create_index(batch_op.f('ix_committee_members_id'), ['id'], unique=False) + + with op.batch_alter_table('committees', schema=None) as batch_op: + batch_op.alter_column('is_active', + existing_type=sa.BOOLEAN(), + nullable=True, + existing_server_default=sa.text('1')) + batch_op.create_index(batch_op.f('ix_committees_community_id'), ['community_id'], unique=False) + batch_op.create_index(batch_op.f('ix_committees_id'), ['id'], unique=False) + + with op.batch_alter_table('communities', schema=None) as batch_op: + batch_op.alter_column('is_active', + existing_type=sa.BOOLEAN(), + nullable=True, + existing_server_default=sa.text('1')) + batch_op.create_index(batch_op.f('ix_communities_id'), ['id'], unique=False) + batch_op.create_index(batch_op.f('ix_communities_name'), ['name'], unique=True) + batch_op.create_index(batch_op.f('ix_communities_slug'), ['slug'], unique=True) + + with op.batch_alter_table('community_roles', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_community_roles_id'), ['id'], unique=False) + + with op.batch_alter_table('content_analytics', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_content_analytics_id'), ['id'], unique=False) + + with op.batch_alter_table('content_communities', schema=None) as batch_op: + batch_op.drop_constraint('uq_content_community', type_='unique') + + with op.batch_alter_table('contents', schema=None) as batch_op: + batch_op.alter_column('source_type', + existing_type=sa.VARCHAR(length=50), + type_=sa.Enum('contribution', 'release_note', 'event_summary', name='source_type_enum'), + existing_nullable=True) + batch_op.alter_column('status', + existing_type=sa.VARCHAR(length=50), + type_=sa.Enum('draft', 'reviewing', 'approved', 'published', name='status_enum'), + existing_nullable=True, + existing_server_default=sa.text("'draft'")) + batch_op.create_index(batch_op.f('ix_contents_id'), ['id'], unique=False) + + with op.batch_alter_table('ecosystem_contributors', schema=None) as batch_op: + batch_op.drop_constraint('uq_eco_contributor', type_='unique') + batch_op.create_index(batch_op.f('ix_ecosystem_contributors_id'), ['id'], unique=False) + + with op.batch_alter_table('ecosystem_projects', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_ecosystem_projects_id'), ['id'], unique=False) + + with op.batch_alter_table('event_attendees', schema=None) as batch_op: + batch_op.alter_column('source', + existing_type=sa.VARCHAR(length=20), + type_=sa.Enum('manual', 'excel_import', name='attendee_source_enum'), + existing_nullable=True, + existing_server_default=sa.text("'manual'")) + + with op.batch_alter_table('event_personnel', schema=None) as batch_op: + batch_op.alter_column('assignee_type', + existing_type=sa.VARCHAR(length=20), + type_=sa.Enum('internal', 'external', name='personnel_assignee_enum'), + existing_nullable=False) + batch_op.alter_column('confirmed', + existing_type=sa.VARCHAR(length=20), + type_=sa.Enum('pending', 'confirmed', 'declined', name='personnel_confirm_enum'), + existing_nullable=True, + existing_server_default=sa.text("'pending'")) + + with op.batch_alter_table('event_tasks', schema=None) as batch_op: + batch_op.alter_column('task_type', + existing_type=sa.VARCHAR(length=20), + type_=sa.Enum('task', 'milestone', name='task_type_enum'), + existing_nullable=True, + existing_server_default=sa.text("'task'")) + batch_op.alter_column('phase', + existing_type=sa.VARCHAR(length=10), + type_=sa.Enum('pre', 'during', 'post', name='task_phase_enum'), + existing_nullable=True, + existing_server_default=sa.text("'pre'")) + batch_op.alter_column('status', + existing_type=sa.VARCHAR(length=20), + type_=sa.Enum('not_started', 'in_progress', 'completed', 'blocked', name='task_status_enum'), + existing_nullable=True, + existing_server_default=sa.text("'not_started'")) + + with op.batch_alter_table('event_templates', schema=None) as batch_op: + batch_op.alter_column('event_type', + existing_type=sa.VARCHAR(length=20), + type_=sa.Enum('online', 'offline', 'hybrid', name='event_type_enum'), + existing_nullable=False) + + with op.batch_alter_table('events', schema=None) as batch_op: + batch_op.alter_column('event_type', + existing_type=sa.VARCHAR(length=20), + type_=sa.Enum('online', 'offline', 'hybrid', name='event_type_enum'), + existing_nullable=False, + existing_server_default=sa.text("'offline'")) + batch_op.alter_column('status', + existing_type=sa.VARCHAR(length=20), + type_=sa.Enum('draft', 'planning', 'ongoing', 'completed', 'cancelled', name='event_status_enum'), + existing_nullable=True, + existing_server_default=sa.text("'draft'")) + + with op.batch_alter_table('feedback_items', schema=None) as batch_op: + batch_op.alter_column('status', + existing_type=sa.VARCHAR(length=20), + type_=sa.Enum('open', 'in_progress', 'closed', name='feedback_status_enum'), + existing_nullable=True, + existing_server_default=sa.text("'open'")) + + with op.batch_alter_table('issue_links', schema=None) as batch_op: + batch_op.alter_column('platform', + existing_type=sa.VARCHAR(length=20), + type_=sa.Enum('github', 'gitcode', 'gitee', name='issue_platform_enum'), + existing_nullable=False) + batch_op.alter_column('issue_type', + existing_type=sa.VARCHAR(length=10), + type_=sa.Enum('issue', 'pr', name='issue_type_enum'), + existing_nullable=True, + existing_server_default=sa.text("'issue'")) + batch_op.alter_column('issue_status', + existing_type=sa.VARCHAR(length=10), + type_=sa.Enum('open', 'closed', name='issue_status_enum'), + existing_nullable=True, + existing_server_default=sa.text("'open'")) + + with op.batch_alter_table('meeting_participants', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_meeting_participants_id'), ['id'], unique=False) + + with op.batch_alter_table('meeting_reminders', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_meeting_reminders_id'), ['id'], unique=False) + batch_op.create_index(batch_op.f('ix_meeting_reminders_meeting_id'), ['meeting_id'], unique=False) + + with op.batch_alter_table('meetings', schema=None) as batch_op: + batch_op.alter_column('reminder_sent', + existing_type=sa.BOOLEAN(), + nullable=True, + existing_server_default=sa.text('0')) + batch_op.create_index(batch_op.f('ix_meetings_committee_id'), ['committee_id'], unique=False) + batch_op.create_index(batch_op.f('ix_meetings_community_id'), ['community_id'], unique=False) + batch_op.create_index(batch_op.f('ix_meetings_id'), ['id'], unique=False) + batch_op.create_index(batch_op.f('ix_meetings_scheduled_at'), ['scheduled_at'], unique=False) + batch_op.create_index(batch_op.f('ix_meetings_status'), ['status'], unique=False) + + with op.batch_alter_table('password_reset_tokens', schema=None) as batch_op: + batch_op.alter_column('used', + existing_type=sa.BOOLEAN(), + nullable=True, + existing_server_default=sa.text('0')) + + with op.batch_alter_table('person_profiles', schema=None) as batch_op: + batch_op.alter_column('source', + existing_type=sa.VARCHAR(length=30), + type_=sa.Enum('manual', 'event_import', 'ecosystem_import', name='person_source_enum'), + existing_nullable=False, + existing_server_default=sa.text("'manual'")) + batch_op.drop_index('ix_person_profiles_email') + batch_op.create_index(batch_op.f('ix_person_profiles_email'), ['email'], unique=True) + batch_op.drop_index('ix_person_profiles_github_handle') + batch_op.create_index(batch_op.f('ix_person_profiles_github_handle'), ['github_handle'], unique=True) + batch_op.create_index(batch_op.f('ix_person_profiles_gitcode_handle'), ['gitcode_handle'], unique=True) + batch_op.create_index(batch_op.f('ix_person_profiles_id'), ['id'], unique=False) + + with op.batch_alter_table('publish_records', schema=None) as batch_op: + batch_op.alter_column('channel', + existing_type=sa.VARCHAR(length=50), + type_=sa.Enum('wechat', 'hugo', 'csdn', 'zhihu', name='pub_channel_enum'), + existing_nullable=False) + batch_op.alter_column('status', + existing_type=sa.VARCHAR(length=50), + type_=sa.Enum('pending', 'draft', 'published', 'failed', name='pub_status_enum'), + existing_nullable=True) + batch_op.alter_column('community_id', + existing_type=sa.INTEGER(), + nullable=False) + batch_op.create_index(batch_op.f('ix_publish_records_id'), ['id'], unique=False) + + with op.batch_alter_table('users', schema=None) as batch_op: + batch_op.alter_column('is_active', + existing_type=sa.BOOLEAN(), + nullable=True, + existing_server_default=sa.text('1')) + batch_op.alter_column('is_superuser', + existing_type=sa.BOOLEAN(), + nullable=True, + existing_server_default=sa.text('0')) + batch_op.alter_column('is_default_admin', + existing_type=sa.BOOLEAN(), + nullable=True, + existing_server_default=sa.text('0')) + batch_op.create_index(batch_op.f('ix_users_email'), ['email'], unique=True) + batch_op.create_index(batch_op.f('ix_users_id'), ['id'], unique=False) + batch_op.create_index(batch_op.f('ix_users_username'), ['username'], unique=True) + + with op.batch_alter_table('wechat_article_stats', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_wechat_article_stats_id'), ['id'], unique=False) + + with op.batch_alter_table('wechat_stats_aggregates', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_wechat_stats_aggregates_article_category'), ['article_category'], unique=False) + batch_op.create_index(batch_op.f('ix_wechat_stats_aggregates_id'), ['id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('wechat_stats_aggregates', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_wechat_stats_aggregates_id')) + batch_op.drop_index(batch_op.f('ix_wechat_stats_aggregates_article_category')) + + with op.batch_alter_table('wechat_article_stats', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_wechat_article_stats_id')) + + with op.batch_alter_table('users', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_users_username')) + batch_op.drop_index(batch_op.f('ix_users_id')) + batch_op.drop_index(batch_op.f('ix_users_email')) + batch_op.alter_column('is_default_admin', + existing_type=sa.BOOLEAN(), + nullable=False, + existing_server_default=sa.text('0')) + batch_op.alter_column('is_superuser', + existing_type=sa.BOOLEAN(), + nullable=False, + existing_server_default=sa.text('0')) + batch_op.alter_column('is_active', + existing_type=sa.BOOLEAN(), + nullable=False, + existing_server_default=sa.text('1')) + + with op.batch_alter_table('publish_records', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_publish_records_id')) + batch_op.alter_column('community_id', + existing_type=sa.INTEGER(), + nullable=True) + batch_op.alter_column('status', + existing_type=sa.Enum('pending', 'draft', 'published', 'failed', name='pub_status_enum'), + type_=sa.VARCHAR(length=50), + existing_nullable=True) + batch_op.alter_column('channel', + existing_type=sa.Enum('wechat', 'hugo', 'csdn', 'zhihu', name='pub_channel_enum'), + type_=sa.VARCHAR(length=50), + existing_nullable=False) + + with op.batch_alter_table('person_profiles', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_person_profiles_id')) + batch_op.drop_index(batch_op.f('ix_person_profiles_gitcode_handle')) + batch_op.drop_index(batch_op.f('ix_person_profiles_github_handle')) + batch_op.create_index('ix_person_profiles_github_handle', ['github_handle'], unique=False) + batch_op.drop_index(batch_op.f('ix_person_profiles_email')) + batch_op.create_index('ix_person_profiles_email', ['email'], unique=False) + batch_op.alter_column('source', + existing_type=sa.Enum('manual', 'event_import', 'ecosystem_import', name='person_source_enum'), + type_=sa.VARCHAR(length=30), + existing_nullable=False, + existing_server_default=sa.text("'manual'")) + + with op.batch_alter_table('password_reset_tokens', schema=None) as batch_op: + batch_op.alter_column('used', + existing_type=sa.BOOLEAN(), + nullable=False, + existing_server_default=sa.text('0')) + + with op.batch_alter_table('meetings', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_meetings_status')) + batch_op.drop_index(batch_op.f('ix_meetings_scheduled_at')) + batch_op.drop_index(batch_op.f('ix_meetings_id')) + batch_op.drop_index(batch_op.f('ix_meetings_community_id')) + batch_op.drop_index(batch_op.f('ix_meetings_committee_id')) + batch_op.alter_column('reminder_sent', + existing_type=sa.BOOLEAN(), + nullable=False, + existing_server_default=sa.text('0')) + + with op.batch_alter_table('meeting_reminders', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_meeting_reminders_meeting_id')) + batch_op.drop_index(batch_op.f('ix_meeting_reminders_id')) + + with op.batch_alter_table('meeting_participants', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_meeting_participants_id')) + + with op.batch_alter_table('issue_links', schema=None) as batch_op: + batch_op.alter_column('issue_status', + existing_type=sa.Enum('open', 'closed', name='issue_status_enum'), + type_=sa.VARCHAR(length=10), + existing_nullable=True, + existing_server_default=sa.text("'open'")) + batch_op.alter_column('issue_type', + existing_type=sa.Enum('issue', 'pr', name='issue_type_enum'), + type_=sa.VARCHAR(length=10), + existing_nullable=True, + existing_server_default=sa.text("'issue'")) + batch_op.alter_column('platform', + existing_type=sa.Enum('github', 'gitcode', 'gitee', name='issue_platform_enum'), + type_=sa.VARCHAR(length=20), + existing_nullable=False) + + with op.batch_alter_table('feedback_items', schema=None) as batch_op: + batch_op.alter_column('status', + existing_type=sa.Enum('open', 'in_progress', 'closed', name='feedback_status_enum'), + type_=sa.VARCHAR(length=20), + existing_nullable=True, + existing_server_default=sa.text("'open'")) + + with op.batch_alter_table('events', schema=None) as batch_op: + batch_op.alter_column('status', + existing_type=sa.Enum('draft', 'planning', 'ongoing', 'completed', 'cancelled', name='event_status_enum'), + type_=sa.VARCHAR(length=20), + existing_nullable=True, + existing_server_default=sa.text("'draft'")) + batch_op.alter_column('event_type', + existing_type=sa.Enum('online', 'offline', 'hybrid', name='event_type_enum'), + type_=sa.VARCHAR(length=20), + existing_nullable=False, + existing_server_default=sa.text("'offline'")) + + with op.batch_alter_table('event_templates', schema=None) as batch_op: + batch_op.alter_column('event_type', + existing_type=sa.Enum('online', 'offline', 'hybrid', name='event_type_enum'), + type_=sa.VARCHAR(length=20), + existing_nullable=False) + + with op.batch_alter_table('event_tasks', schema=None) as batch_op: + batch_op.alter_column('status', + existing_type=sa.Enum('not_started', 'in_progress', 'completed', 'blocked', name='task_status_enum'), + type_=sa.VARCHAR(length=20), + existing_nullable=True, + existing_server_default=sa.text("'not_started'")) + batch_op.alter_column('phase', + existing_type=sa.Enum('pre', 'during', 'post', name='task_phase_enum'), + type_=sa.VARCHAR(length=10), + existing_nullable=True, + existing_server_default=sa.text("'pre'")) + batch_op.alter_column('task_type', + existing_type=sa.Enum('task', 'milestone', name='task_type_enum'), + type_=sa.VARCHAR(length=20), + existing_nullable=True, + existing_server_default=sa.text("'task'")) + + with op.batch_alter_table('event_personnel', schema=None) as batch_op: + batch_op.alter_column('confirmed', + existing_type=sa.Enum('pending', 'confirmed', 'declined', name='personnel_confirm_enum'), + type_=sa.VARCHAR(length=20), + existing_nullable=True, + existing_server_default=sa.text("'pending'")) + batch_op.alter_column('assignee_type', + existing_type=sa.Enum('internal', 'external', name='personnel_assignee_enum'), + type_=sa.VARCHAR(length=20), + existing_nullable=False) + + with op.batch_alter_table('event_attendees', schema=None) as batch_op: + batch_op.alter_column('source', + existing_type=sa.Enum('manual', 'excel_import', name='attendee_source_enum'), + type_=sa.VARCHAR(length=20), + existing_nullable=True, + existing_server_default=sa.text("'manual'")) + + with op.batch_alter_table('ecosystem_projects', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_ecosystem_projects_id')) + + with op.batch_alter_table('ecosystem_contributors', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_ecosystem_contributors_id')) + batch_op.create_unique_constraint('uq_eco_contributor', ['project_id', 'github_handle']) + + with op.batch_alter_table('contents', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_contents_id')) + batch_op.alter_column('status', + existing_type=sa.Enum('draft', 'reviewing', 'approved', 'published', name='status_enum'), + type_=sa.VARCHAR(length=50), + existing_nullable=True, + existing_server_default=sa.text("'draft'")) + batch_op.alter_column('source_type', + existing_type=sa.Enum('contribution', 'release_note', 'event_summary', name='source_type_enum'), + type_=sa.VARCHAR(length=50), + existing_nullable=True) + + with op.batch_alter_table('content_communities', schema=None) as batch_op: + batch_op.create_unique_constraint('uq_content_community', ['content_id', 'community_id']) + + with op.batch_alter_table('content_analytics', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_content_analytics_id')) + + with op.batch_alter_table('community_roles', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_community_roles_id')) + + with op.batch_alter_table('communities', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_communities_slug')) + batch_op.drop_index(batch_op.f('ix_communities_name')) + batch_op.drop_index(batch_op.f('ix_communities_id')) + batch_op.alter_column('is_active', + existing_type=sa.BOOLEAN(), + nullable=False, + existing_server_default=sa.text('1')) + + with op.batch_alter_table('committees', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_committees_id')) + batch_op.drop_index(batch_op.f('ix_committees_community_id')) + batch_op.alter_column('is_active', + existing_type=sa.BOOLEAN(), + nullable=False, + existing_server_default=sa.text('1')) + + with op.batch_alter_table('committee_members', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_committee_members_id')) + batch_op.drop_index(batch_op.f('ix_committee_members_email')) + batch_op.drop_index(batch_op.f('ix_committee_members_committee_id')) + batch_op.alter_column('is_active', + existing_type=sa.BOOLEAN(), + nullable=False, + existing_server_default=sa.text('1')) + + with op.batch_alter_table('checklist_template_items', schema=None) as batch_op: + batch_op.alter_column('phase', + existing_type=sa.Enum('pre', 'during', 'post', name='checklist_phase_enum'), + type_=sa.VARCHAR(length=10), + existing_nullable=False) + + with op.batch_alter_table('checklist_items', schema=None) as batch_op: + batch_op.alter_column('status', + existing_type=sa.Enum('pending', 'done', 'skipped', name='checklist_status_enum'), + type_=sa.VARCHAR(length=20), + existing_nullable=True, + existing_server_default=sa.text("'pending'")) + batch_op.alter_column('phase', + existing_type=sa.Enum('pre', 'during', 'post', name='checklist_item_phase_enum'), + type_=sa.VARCHAR(length=10), + existing_nullable=False) + + with op.batch_alter_table('channel_configs', schema=None) as batch_op: + batch_op.alter_column('enabled', + existing_type=sa.BOOLEAN(), + nullable=False, + existing_server_default=sa.text('0')) + + with op.batch_alter_table('campaigns', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_campaigns_id')) + + with op.batch_alter_table('campaign_contacts', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_campaign_contacts_id')) + batch_op.create_unique_constraint('uq_campaign_contact', ['campaign_id', 'person_id']) + + op.create_table('_alembic_tmp_campaign_contacts', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('campaign_id', sa.INTEGER(), nullable=False), + sa.Column('person_id', sa.INTEGER(), nullable=False), + sa.Column('status', sa.VARCHAR(length=50), server_default=sa.text("'pending'"), nullable=False), + sa.Column('channel', sa.VARCHAR(length=50), nullable=True), + sa.Column('added_by', sa.VARCHAR(length=50), server_default=sa.text("'manual'"), nullable=False), + sa.Column('last_contacted_at', sa.DATETIME(), nullable=True), + sa.Column('notes', sa.TEXT(), nullable=True), + sa.Column('assigned_to_id', sa.INTEGER(), nullable=True), + sa.ForeignKeyConstraint(['assigned_to_id'], ['users.id'], ondelete='SET NULL'), + sa.ForeignKeyConstraint(['campaign_id'], ['campaigns.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['person_id'], ['person_profiles.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### diff --git a/backend/app/api/auth.py b/backend/app/api/auth.py index 5d1f182..c2e395e 100644 --- a/backend/app/api/auth.py +++ b/backend/app/api/auth.py @@ -1,5 +1,5 @@ import secrets -from datetime import datetime, timedelta +from datetime import timedelta from fastapi import APIRouter, Depends, HTTPException, Request, status from sqlalchemy.orm import Session @@ -8,6 +8,7 @@ from app.core.dependencies import get_current_active_superuser, get_current_user from app.core.logging import get_logger from app.core.rate_limit import limiter +from app.core.timezone import utc_now from app.core.security import create_access_token, get_password_hash, verify_password from app.database import get_db from app.models import User @@ -383,7 +384,7 @@ def request_password_reset( reset_token = PasswordResetToken( user_id=user.id, token=token_value, - expires_at=datetime.utcnow() + timedelta(hours=1), + expires_at=utc_now() + timedelta(hours=1), ) db.add(reset_token) db.commit() diff --git a/backend/app/api/campaigns.py b/backend/app/api/campaigns.py index e99fd10..6b4d480 100644 --- a/backend/app/api/campaigns.py +++ b/backend/app/api/campaigns.py @@ -1,10 +1,9 @@ -from datetime import datetime - from fastapi import APIRouter, Depends, HTTPException, Query from sqlalchemy import func from sqlalchemy.orm import Session, joinedload from app.core.dependencies import get_current_user +from app.core.timezone import utc_now from app.database import get_db from app.models import User from app.models.campaign import Campaign, CampaignActivity, CampaignContact @@ -349,7 +348,7 @@ def add_activity( ) db.add(activity) # 更新联系人最近跟进时间 - contact.last_contacted_at = datetime.utcnow() + contact.last_contacted_at = utc_now() db.commit() db.refresh(activity) return activity diff --git a/backend/app/api/community_dashboard.py b/backend/app/api/community_dashboard.py index 02f541f..26371b2 100644 --- a/backend/app/api/community_dashboard.py +++ b/backend/app/api/community_dashboard.py @@ -7,13 +7,14 @@ 权限:社区成员(admin / user)均可访问,Superuser 可访问任意社区。 """ -from datetime import datetime, timedelta +from datetime import timedelta from fastapi import APIRouter, Depends, HTTPException, status from sqlalchemy import case, func, select from sqlalchemy.orm import Session, joinedload from app.core.dependencies import get_current_user, get_user_community_role +from app.core.timezone import utc_now from app.core.logging import get_logger from app.database import get_db from app.models import Community, User @@ -77,7 +78,7 @@ def get_community_dashboard( detail="社区不存在", ) - now = datetime.utcnow() + now = utc_now() # ── 1. 指标卡片聚合(单批查询)────────────────────────────────────── @@ -432,7 +433,7 @@ def get_superuser_overview( detail="仅平台超级管理员可访问", ) - now = datetime.utcnow() + now = utc_now() communities = db.query(Community).order_by(Community.created_at.desc()).all() # 批量查询各社区统计(避免 N+1) diff --git a/backend/app/api/publish.py b/backend/app/api/publish.py index de73111..c756ee4 100644 --- a/backend/app/api/publish.py +++ b/backend/app/api/publish.py @@ -1,11 +1,11 @@ import os -from datetime import datetime from fastapi import APIRouter, Depends, HTTPException from sqlalchemy.orm import Session from app.config import settings from app.core.dependencies import get_current_community +from app.core.timezone import utc_now from app.database import get_db from app.models.content import Content from app.models.publish_record import PublishRecord @@ -141,7 +141,7 @@ def publish_to_hugo(content_id: int, db: Session = Depends(get_db)): channel="hugo", status="published", platform_url=file_path, - published_at=datetime.utcnow(), + published_at=utc_now(), community_id=community_id, ) db.add(record) diff --git a/backend/app/config.py b/backend/app/config.py index f103664..422f160 100644 --- a/backend/app/config.py +++ b/backend/app/config.py @@ -60,6 +60,9 @@ def cors_origins_list(self) -> list[str]: UPLOAD_DIR: str = str(Path(__file__).resolve().parent.parent / "uploads") MAX_UPLOAD_SIZE: int = 50 * 1024 * 1024 # 50MB + # Timezone + APP_TIMEZONE: str = "Asia/Shanghai" + # Server HOST: str = "0.0.0.0" PORT: int = 8000 diff --git a/backend/app/core/security.py b/backend/app/core/security.py index 9d6f676..e8157c0 100644 --- a/backend/app/core/security.py +++ b/backend/app/core/security.py @@ -1,9 +1,10 @@ -from datetime import datetime, timedelta +from datetime import timedelta from jose import JWTError, jwt from passlib.context import CryptContext from app.config import settings +from app.core.timezone import utc_now # JWT Configuration — single source from settings SECRET_KEY = settings.JWT_SECRET_KEY @@ -60,9 +61,9 @@ def create_access_token(data: dict, expires_delta: timedelta | None = None) -> s """Create a JWT access token.""" to_encode = data.copy() if expires_delta: - expire = datetime.utcnow() + expires_delta + expire = utc_now() + expires_delta else: - expire = datetime.utcnow() + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) + expire = utc_now() + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) to_encode.update({"exp": expire}) encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM) return encoded_jwt diff --git a/backend/app/core/timezone.py b/backend/app/core/timezone.py new file mode 100644 index 0000000..9881c5c --- /dev/null +++ b/backend/app/core/timezone.py @@ -0,0 +1,22 @@ +from datetime import datetime +from datetime import timezone as _tz +from zoneinfo import ZoneInfo + + +def utc_now() -> datetime: + """返回当前 UTC 时间(带时区信息)。""" + return datetime.now(_tz.utc) + + +def get_app_tz() -> ZoneInfo: + """返回配置的应用时区对象。""" + from app.config import settings + + return ZoneInfo(settings.APP_TIMEZONE) + + +def to_app_tz(dt: datetime) -> datetime: + """将 datetime 转换为应用时区。用于服务端输出(邮件、ICS 等)。""" + if dt.tzinfo is None: + dt = dt.replace(tzinfo=_tz.utc) + return dt.astimezone(get_app_tz()) diff --git a/backend/app/models/audit.py b/backend/app/models/audit.py index f00463a..61af4a9 100644 --- a/backend/app/models/audit.py +++ b/backend/app/models/audit.py @@ -1,8 +1,7 @@ -from datetime import datetime - from sqlalchemy import JSON, Column, DateTime, ForeignKey, Integer, String from sqlalchemy.orm import relationship +from app.core.timezone import utc_now from app.database import Base @@ -17,7 +16,7 @@ class AuditLog(Base): resource_id = Column(Integer, nullable=True) # ID of the affected resource details = Column(JSON, default=dict) # Additional details about the action ip_address = Column(String(50), nullable=True) - created_at = Column(DateTime, default=datetime.utcnow, index=True) + created_at = Column(DateTime(timezone=True), default=utc_now, index=True) # Relationships user = relationship("User", back_populates="audit_logs") diff --git a/backend/app/models/campaign.py b/backend/app/models/campaign.py index 19f7154..dc8e9dd 100644 --- a/backend/app/models/campaign.py +++ b/backend/app/models/campaign.py @@ -1,8 +1,7 @@ -from datetime import datetime - from sqlalchemy import Column, Date, DateTime, ForeignKey, Integer, String, Text from sqlalchemy.orm import relationship +from app.core.timezone import utc_now from app.database import Base @@ -19,8 +18,8 @@ class Campaign(Base): owner_id = Column(Integer, ForeignKey("users.id", ondelete="SET NULL"), nullable=True) start_date = Column(Date, nullable=True) end_date = Column(Date, nullable=True) - created_at = Column(DateTime, default=datetime.utcnow) - updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + created_at = Column(DateTime(timezone=True), default=utc_now) + updated_at = Column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) contacts = relationship("CampaignContact", back_populates="campaign", cascade="all, delete-orphan") activities = relationship("CampaignActivity", back_populates="campaign", cascade="all, delete-orphan") @@ -35,7 +34,7 @@ class CampaignContact(Base): status = Column(String(50), nullable=False, default="pending") # pending/contacted/responded/converted/declined channel = Column(String(50), nullable=True) # email/wechat/phone/in_person/other added_by = Column(String(50), nullable=False, default="manual") # manual/event_import/ecosystem_import/csv_import - last_contacted_at = Column(DateTime, nullable=True) + last_contacted_at = Column(DateTime(timezone=True), nullable=True) notes = Column(Text, nullable=True) assigned_to_id = Column(Integer, ForeignKey("users.id", ondelete="SET NULL"), nullable=True) @@ -53,7 +52,7 @@ class CampaignActivity(Base): content = Column(Text, nullable=True) outcome = Column(String(300), nullable=True) operator_id = Column(Integer, ForeignKey("users.id", ondelete="SET NULL"), nullable=True) - created_at = Column(DateTime, default=datetime.utcnow) + created_at = Column(DateTime(timezone=True), default=utc_now) campaign = relationship("Campaign", back_populates="activities") person = relationship("PersonProfile") diff --git a/backend/app/models/committee.py b/backend/app/models/committee.py index d99a9ea..a3b276c 100644 --- a/backend/app/models/committee.py +++ b/backend/app/models/committee.py @@ -1,5 +1,3 @@ -from datetime import datetime - from sqlalchemy import ( JSON, Boolean, @@ -14,6 +12,7 @@ ) from sqlalchemy.orm import relationship +from app.core.timezone import utc_now from app.database import Base @@ -38,9 +37,9 @@ class Committee(Base): notification_email = Column(String(200), nullable=True) notification_wechat = Column(String(100), nullable=True) - established_at = Column(DateTime, nullable=True) - created_at = Column(DateTime, default=datetime.utcnow) - updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + established_at = Column(DateTime(timezone=True), nullable=True) + created_at = Column(DateTime(timezone=True), default=utc_now) + updated_at = Column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) # Relationships community = relationship("Community", back_populates="committees") @@ -91,9 +90,9 @@ class CommitteeMember(Base): bio = Column(Text, nullable=True) avatar_url = Column(String(500), nullable=True) - joined_at = Column(DateTime, default=datetime.utcnow) - created_at = Column(DateTime, default=datetime.utcnow) - updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + joined_at = Column(DateTime(timezone=True), default=utc_now) + created_at = Column(DateTime(timezone=True), default=utc_now) + updated_at = Column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) # 关联人脉档案(可为空,表示尚未与 PersonProfile 匹配) person_id = Column( diff --git a/backend/app/models/community.py b/backend/app/models/community.py index bbecad2..17218aa 100644 --- a/backend/app/models/community.py +++ b/backend/app/models/community.py @@ -1,8 +1,7 @@ -from datetime import datetime - from sqlalchemy import JSON, Boolean, Column, DateTime, Integer, String, Text from sqlalchemy.orm import relationship +from app.core.timezone import utc_now from app.database import Base from app.models.user import community_users @@ -18,8 +17,8 @@ class Community(Base): logo_url = Column(String(500), nullable=True) settings = Column(JSON, default=dict) # Community-level settings is_active = Column(Boolean, default=True) - created_at = Column(DateTime, default=datetime.utcnow) - updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + created_at = Column(DateTime(timezone=True), default=utc_now) + updated_at = Column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) # Relationships members = relationship( diff --git a/backend/app/models/content.py b/backend/app/models/content.py index 30b7bda..625348b 100644 --- a/backend/app/models/content.py +++ b/backend/app/models/content.py @@ -1,9 +1,8 @@ -from datetime import datetime - from sqlalchemy import JSON, Boolean, Column, DateTime, ForeignKey, Integer, String, Table, Text from sqlalchemy import Enum as SAEnum from sqlalchemy.orm import relationship +from app.core.timezone import utc_now from app.database import Base # Association table for content → community (multi-community support) @@ -14,7 +13,7 @@ Column("content_id", Integer, ForeignKey("contents.id", ondelete="CASCADE"), nullable=False, index=True), Column("community_id", Integer, ForeignKey("communities.id", ondelete="CASCADE"), nullable=False, index=True), Column("is_primary", Boolean, server_default="1"), - Column("linked_at", DateTime, default=datetime.utcnow), + Column("linked_at", DateTime(timezone=True), default=utc_now), Column("linked_by_id", Integer, ForeignKey("users.id", ondelete="SET NULL"), nullable=True), ) @@ -25,7 +24,7 @@ Column("id", Integer, primary_key=True), Column("content_id", Integer, ForeignKey("contents.id", ondelete="CASCADE"), nullable=False), Column("user_id", Integer, ForeignKey("users.id", ondelete="CASCADE"), nullable=False), - Column("added_at", DateTime, default=datetime.utcnow), + Column("added_at", DateTime(timezone=True), default=utc_now), ) @@ -36,7 +35,7 @@ Column("id", Integer, primary_key=True), Column("content_id", Integer, ForeignKey("contents.id", ondelete="CASCADE"), nullable=False, index=True), Column("user_id", Integer, ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True), - Column("assigned_at", DateTime, default=datetime.utcnow), + Column("assigned_at", DateTime(timezone=True), default=utc_now), Column("assigned_by_user_id", Integer, ForeignKey("users.id", ondelete="SET NULL"), nullable=True), ) @@ -71,8 +70,8 @@ class Content(Base): # Calendar/scheduling field scheduled_publish_at = Column(DateTime, nullable=True, index=True) - created_at = Column(DateTime, default=datetime.utcnow) - updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + created_at = Column(DateTime(timezone=True), default=utc_now) + updated_at = Column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) publish_records = relationship("PublishRecord", back_populates="content", cascade="all, delete-orphan") community = relationship("Community", back_populates="contents") diff --git a/backend/app/models/ecosystem.py b/backend/app/models/ecosystem.py index 252d042..50e6b71 100644 --- a/backend/app/models/ecosystem.py +++ b/backend/app/models/ecosystem.py @@ -1,8 +1,7 @@ -from datetime import datetime - from sqlalchemy import JSON, Boolean, Column, DateTime, ForeignKey, Integer, String, Text from sqlalchemy.orm import relationship +from app.core.timezone import utc_now from app.database import Base @@ -18,9 +17,9 @@ class EcosystemProject(Base): description = Column(Text, nullable=True) tags = Column(JSON, default=list) is_active = Column(Boolean, default=True) - last_synced_at = Column(DateTime, nullable=True) + last_synced_at = Column(DateTime(timezone=True), nullable=True) added_by_id = Column(Integer, ForeignKey("users.id", ondelete="SET NULL"), nullable=True) - created_at = Column(DateTime, default=datetime.utcnow) + created_at = Column(DateTime(timezone=True), default=utc_now) contributors = relationship( "EcosystemContributor", @@ -44,7 +43,7 @@ class EcosystemContributor(Base): followers = Column(Integer, nullable=True) # 关联到人脉库 person_id = Column(Integer, ForeignKey("person_profiles.id", ondelete="SET NULL"), nullable=True, index=True) - last_synced_at = Column(DateTime, default=datetime.utcnow) + last_synced_at = Column(DateTime(timezone=True), default=utc_now) project = relationship("EcosystemProject", back_populates="contributors") person = relationship("PersonProfile") diff --git a/backend/app/models/event.py b/backend/app/models/event.py index 03475e4..ba16100 100644 --- a/backend/app/models/event.py +++ b/backend/app/models/event.py @@ -1,9 +1,8 @@ -from datetime import datetime - from sqlalchemy import JSON, Boolean, Column, Date, DateTime, ForeignKey, Integer, String, Table, Text from sqlalchemy import Enum as SAEnum from sqlalchemy.orm import relationship +from app.core.timezone import utc_now from app.database import Base # 活动 ↔ 社区 多对多关联表 @@ -31,7 +30,7 @@ class EventTemplate(Base): description = Column(Text, nullable=True) is_public = Column(Boolean, default=False) created_by_id = Column(Integer, ForeignKey("users.id", ondelete="SET NULL"), nullable=True) - created_at = Column(DateTime, default=datetime.utcnow) + created_at = Column(DateTime(timezone=True), default=utc_now) checklist_items = relationship( "ChecklistTemplateItem", back_populates="template", cascade="all, delete-orphan" @@ -78,7 +77,7 @@ class Event(Base): SAEnum("draft", "planning", "ongoing", "completed", "cancelled", name="event_status_enum"), default="draft", ) - planned_at = Column(DateTime, nullable=True) + planned_at = Column(DateTime(timezone=True), nullable=True) duration_minutes = Column(Integer, nullable=True) location = Column(String(300), nullable=True) online_url = Column(String(500), nullable=True) @@ -94,8 +93,8 @@ class Event(Base): result_summary = Column(Text, nullable=True) media_urls = Column(JSON, default=list) - created_at = Column(DateTime, default=datetime.utcnow) - updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + created_at = Column(DateTime(timezone=True), default=utc_now) + updated_at = Column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) template = relationship("EventTemplate", back_populates="events") communities = relationship( @@ -206,7 +205,7 @@ class FeedbackItem(Base): SAEnum("open", "in_progress", "closed", name="feedback_status_enum"), default="open" ) assignee_id = Column(Integer, ForeignKey("users.id", ondelete="SET NULL"), nullable=True) - created_at = Column(DateTime, default=datetime.utcnow) + created_at = Column(DateTime(timezone=True), default=utc_now) event = relationship("Event", back_populates="feedback_items") issue_links = relationship("IssueLink", back_populates="feedback", cascade="all, delete-orphan") @@ -232,7 +231,7 @@ class IssueLink(Base): issue_status = Column( SAEnum("open", "closed", name="issue_status_enum"), default="open" ) - linked_at = Column(DateTime, default=datetime.utcnow) + linked_at = Column(DateTime(timezone=True), default=utc_now) linked_by_id = Column(Integer, ForeignKey("users.id", ondelete="SET NULL"), nullable=True) feedback = relationship("FeedbackItem", back_populates="issue_links") diff --git a/backend/app/models/meeting.py b/backend/app/models/meeting.py index cbf0129..9b46a80 100644 --- a/backend/app/models/meeting.py +++ b/backend/app/models/meeting.py @@ -1,5 +1,3 @@ -from datetime import datetime - from sqlalchemy import ( JSON, Boolean, @@ -15,6 +13,7 @@ ) from sqlalchemy.orm import relationship +from app.core.timezone import utc_now from app.database import Base # Association table for meeting assignees (责任人) @@ -24,7 +23,7 @@ Column("id", Integer, primary_key=True), Column("meeting_id", Integer, ForeignKey("meetings.id", ondelete="CASCADE"), nullable=False, index=True), Column("user_id", Integer, ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True), - Column("assigned_at", DateTime, default=datetime.utcnow), + Column("assigned_at", DateTime(timezone=True), default=utc_now), Column("assigned_by_user_id", Integer, ForeignKey("users.id", ondelete="SET NULL"), nullable=True), ) @@ -76,8 +75,8 @@ class Meeting(Base): nullable=True, ) - created_at = Column(DateTime, default=datetime.utcnow) - updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + created_at = Column(DateTime(timezone=True), default=utc_now) + updated_at = Column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) # Relationships committee = relationship("Committee", back_populates="meetings") @@ -120,7 +119,7 @@ class MeetingReminder(Base): status = Column(String(50), default="pending") # pending, sent, failed error_message = Column(Text, nullable=True) - created_at = Column(DateTime, default=datetime.utcnow) + created_at = Column(DateTime(timezone=True), default=utc_now) # Relationships meeting = relationship("Meeting") @@ -145,7 +144,7 @@ class MeetingParticipant(Base): name = Column(String(200), nullable=False) email = Column(String(200), nullable=False, index=True) source = Column(String(50), default="manual") # manual / committee_import - created_at = Column(DateTime, default=datetime.utcnow) + created_at = Column(DateTime(timezone=True), default=utc_now) meeting = relationship("Meeting", back_populates="participants") diff --git a/backend/app/models/password_reset.py b/backend/app/models/password_reset.py index e84c2b7..50cf889 100644 --- a/backend/app/models/password_reset.py +++ b/backend/app/models/password_reset.py @@ -1,8 +1,7 @@ -from datetime import datetime - from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String from sqlalchemy.orm import relationship +from app.core.timezone import utc_now from app.database import Base @@ -13,16 +12,16 @@ class PasswordResetToken(Base): id = Column(Integer, primary_key=True, index=True) user_id = Column(Integer, ForeignKey("users.id", ondelete="CASCADE"), nullable=False) token = Column(String(200), unique=True, nullable=False, index=True) - expires_at = Column(DateTime, nullable=False) + expires_at = Column(DateTime(timezone=True), nullable=False) used = Column(Boolean, default=False) - created_at = Column(DateTime, default=datetime.utcnow) + created_at = Column(DateTime(timezone=True), default=utc_now) # Relationships user = relationship("User", backref="password_reset_tokens") @property def is_expired(self) -> bool: - return datetime.utcnow() > self.expires_at + return utc_now() > self.expires_at @property def is_valid(self) -> bool: diff --git a/backend/app/models/people.py b/backend/app/models/people.py index b9103d4..523a345 100644 --- a/backend/app/models/people.py +++ b/backend/app/models/people.py @@ -1,9 +1,8 @@ -from datetime import datetime - from sqlalchemy import JSON, Boolean, Column, Date, DateTime, ForeignKey, Integer, String, Text from sqlalchemy import Enum as SAEnum from sqlalchemy.orm import relationship +from app.core.timezone import utc_now from app.database import Base @@ -29,8 +28,8 @@ class PersonProfile(Base): default="manual", ) created_by_id = Column(Integer, ForeignKey("users.id", ondelete="SET NULL"), nullable=True) - created_at = Column(DateTime, default=datetime.utcnow) - updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + created_at = Column(DateTime(timezone=True), default=utc_now) + updated_at = Column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) community_roles = relationship("CommunityRole", back_populates="person", cascade="all, delete-orphan") event_attendances = relationship("EventAttendee", back_populates="person") diff --git a/backend/app/models/publish_record.py b/backend/app/models/publish_record.py index 2a075df..c57265d 100644 --- a/backend/app/models/publish_record.py +++ b/backend/app/models/publish_record.py @@ -1,9 +1,8 @@ -from datetime import datetime - from sqlalchemy import Column, DateTime, ForeignKey, Integer, String, Text from sqlalchemy import Enum as SAEnum from sqlalchemy.orm import relationship +from app.core.timezone import utc_now from app.database import Base @@ -22,9 +21,9 @@ class PublishRecord(Base): ) platform_article_id = Column(String(200), nullable=True) platform_url = Column(String(500), nullable=True) - published_at = Column(DateTime, nullable=True) + published_at = Column(DateTime(timezone=True), nullable=True) error_message = Column(Text, nullable=True) - created_at = Column(DateTime, default=datetime.utcnow) + created_at = Column(DateTime(timezone=True), default=utc_now) community_id = Column( Integer, ForeignKey("communities.id", ondelete="CASCADE"), @@ -46,6 +45,6 @@ class ContentAnalytics(Base): like_count = Column(Integer, default=0) share_count = Column(Integer, default=0) comment_count = Column(Integer, default=0) - collected_at = Column(DateTime, default=datetime.utcnow) + collected_at = Column(DateTime(timezone=True), default=utc_now) publish_record = relationship("PublishRecord", back_populates="analytics") diff --git a/backend/app/models/user.py b/backend/app/models/user.py index b5298a9..d6ccb72 100644 --- a/backend/app/models/user.py +++ b/backend/app/models/user.py @@ -1,8 +1,7 @@ -from datetime import datetime - from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, Table from sqlalchemy.orm import relationship +from app.core.timezone import utc_now from app.database import Base # Association table for many-to-many relationship between users and communities @@ -13,7 +12,7 @@ Column("user_id", Integer, ForeignKey("users.id", ondelete="CASCADE"), nullable=False), Column("community_id", Integer, ForeignKey("communities.id", ondelete="CASCADE"), nullable=False), Column("role", String(50), default="user"), # 'admin', 'user' (superuser is global) - Column("joined_at", DateTime, default=datetime.utcnow), + Column("joined_at", DateTime(timezone=True), default=utc_now), ) @@ -28,7 +27,7 @@ class User(Base): is_active = Column(Boolean, default=True) is_superuser = Column(Boolean, default=False) # Superuser can access all communities is_default_admin = Column(Boolean, default=False) # Marks the seeded default admin account - created_at = Column(DateTime, default=datetime.utcnow) + created_at = Column(DateTime(timezone=True), default=utc_now) # Relationships communities = relationship( diff --git a/backend/app/models/wechat_stats.py b/backend/app/models/wechat_stats.py index 7fe5504..ad83977 100644 --- a/backend/app/models/wechat_stats.py +++ b/backend/app/models/wechat_stats.py @@ -4,8 +4,6 @@ 粉丝互动数据,以及多维度时间聚合统计。 """ -from datetime import datetime - from sqlalchemy import ( Column, Date, @@ -20,6 +18,7 @@ ) from sqlalchemy.orm import relationship +from app.core.timezone import utc_now from app.database import Base @@ -72,7 +71,7 @@ class WechatArticleStat(Base): nullable=False, index=True, ) - collected_at = Column(DateTime, default=datetime.utcnow) + collected_at = Column(DateTime(timezone=True), default=utc_now) # ── 关系 ── publish_record = relationship("PublishRecord", backref="wechat_stats") @@ -143,7 +142,7 @@ class WechatStatsAggregate(Base): # ── 平均值指标 ── avg_read_count = Column(Integer, default=0) - updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + updated_at = Column(DateTime(timezone=True), default=utc_now, onupdate=utc_now) community = relationship("Community") diff --git a/backend/app/services/ecosystem/github_crawler.py b/backend/app/services/ecosystem/github_crawler.py index adcae21..89f5be5 100644 --- a/backend/app/services/ecosystem/github_crawler.py +++ b/backend/app/services/ecosystem/github_crawler.py @@ -5,11 +5,11 @@ """ import logging -from datetime import datetime import httpx from sqlalchemy.orm import Session +from app.core.timezone import utc_now from app.models.ecosystem import EcosystemContributor, EcosystemProject logger = logging.getLogger(__name__) @@ -54,7 +54,7 @@ def sync_project(db: Session, project: EcosystemProject, token: str | None = Non existing_map[handle].commit_count_90d = item.get("contributions") existing_map[handle].display_name = item.get("login") existing_map[handle].avatar_url = item.get("avatar_url") - existing_map[handle].last_synced_at = datetime.utcnow() + existing_map[handle].last_synced_at = utc_now() updated += 1 else: db.add(EcosystemContributor( @@ -63,7 +63,7 @@ def sync_project(db: Session, project: EcosystemProject, token: str | None = Non display_name=item.get("login"), avatar_url=item.get("avatar_url"), commit_count_90d=item.get("contributions"), - last_synced_at=datetime.utcnow(), + last_synced_at=utc_now(), )) created += 1 @@ -72,7 +72,7 @@ def sync_project(db: Session, project: EcosystemProject, token: str | None = Non errors += 1 return {"created": created, "updated": updated, "errors": errors} - project.last_synced_at = datetime.utcnow() + project.last_synced_at = utc_now() db.commit() logger.info("项目 %s 同步完成 — created=%d updated=%d", project.name, created, updated) return {"created": created, "updated": updated, "errors": errors} diff --git a/backend/app/services/hugo.py b/backend/app/services/hugo.py index 24c1ad0..92d12ac 100644 --- a/backend/app/services/hugo.py +++ b/backend/app/services/hugo.py @@ -3,6 +3,8 @@ from slugify import slugify +from app.core.timezone import to_app_tz, utc_now + class HugoService: def _load_config(self, community_id: int) -> tuple[str, str]: @@ -43,12 +45,12 @@ def generate_front_matter( ) -> str: """Generate Hugo YAML front matter.""" if date is None: - date = datetime.utcnow() + date = to_app_tz(utc_now()) lines = [ "---", f'title: "{title}"', - f"date: {date.strftime('%Y-%m-%dT%H:%M:%S+08:00')}", + f"date: {date.isoformat(timespec='seconds')}", ] if author: lines.append(f'author: "{author}"') diff --git a/backend/app/services/ics.py b/backend/app/services/ics.py index 1fe94c1..743007e 100644 --- a/backend/app/services/ics.py +++ b/backend/app/services/ics.py @@ -1,13 +1,17 @@ from __future__ import annotations -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone as _tz +from app.core.timezone import utc_now from app.models.community import Community from app.models.meeting import Meeting -def _format_dt(dt: datetime) -> str: - return dt.strftime("%Y%m%dT%H%M%S") +def _format_dt_utc(dt: datetime) -> str: + """将 datetime 格式化为 iCalendar UTC 格式(以 Z 结尾)。""" + if dt.tzinfo is None: + dt = dt.replace(tzinfo=_tz.utc) + return dt.astimezone(_tz.utc).strftime("%Y%m%dT%H%M%SZ") def build_meeting_ics(meeting: Meeting, community: Community, organizer_email: str) -> bytes: @@ -25,7 +29,7 @@ def build_meeting_ics(meeting: Meeting, community: Community, organizer_email: s description = "\n\n".join(description_parts) if description_parts else "Meeting reminder" location = meeting.location or "" uid = f"meeting-{meeting.id}@{community.slug}" - dtstamp = _format_dt(datetime.utcnow()) + dtstamp = _format_dt_utc(utc_now()) lines = [ "BEGIN:VCALENDAR", @@ -36,8 +40,8 @@ def build_meeting_ics(meeting: Meeting, community: Community, organizer_email: s "BEGIN:VEVENT", f"UID:{uid}", f"DTSTAMP:{dtstamp}", - f"DTSTART:{_format_dt(dt_start)}", - f"DTEND:{_format_dt(dt_end)}", + f"DTSTART:{_format_dt_utc(dt_start)}", + f"DTEND:{_format_dt_utc(dt_end)}", f"SUMMARY:{meeting.title}", f"LOCATION:{location}", f"DESCRIPTION:{_escape_text(description)}", diff --git a/backend/app/services/notification.py b/backend/app/services/notification.py index 348d6d5..412892a 100644 --- a/backend/app/services/notification.py +++ b/backend/app/services/notification.py @@ -1,11 +1,11 @@ from __future__ import annotations import smtplib -from datetime import datetime from html import escape from sqlalchemy.orm import Session +from app.core.timezone import utc_now from app.models.community import Community from app.models.meeting import Meeting, MeetingParticipant, MeetingReminder from app.services.email import EmailAttachment, EmailMessage, get_sender_info, get_smtp_config, send_email @@ -79,7 +79,7 @@ def send_meeting_reminder(db: Session, reminder_id: int) -> MeetingReminder: try: send_email(community, message) reminder.status = "sent" - reminder.sent_at = datetime.utcnow() + reminder.sent_at = utc_now() reminder.error_message = None except smtplib.SMTPException as exc: reminder.status = "failed" diff --git a/backend/app/services/wechat_stats.py b/backend/app/services/wechat_stats.py index 3065dc5..341aa10 100644 --- a/backend/app/services/wechat_stats.py +++ b/backend/app/services/wechat_stats.py @@ -3,11 +3,12 @@ 提供每日统计采集、多维度聚合计算、趋势数据查询等功能。 """ -from datetime import date, datetime, timedelta +from datetime import date, timedelta from sqlalchemy import and_, func from sqlalchemy.orm import Session +from app.core.timezone import utc_now from app.models.content import Content from app.models.publish_record import PublishRecord from app.models.wechat_stats import WechatArticleStat, WechatStatsAggregate @@ -39,7 +40,7 @@ def create_daily_stat( for key, value in data.items(): if key not in ("publish_record_id", "stat_date"): setattr(existing, key, value) - existing.collected_at = datetime.utcnow() + existing.collected_at = utc_now() db.commit() db.refresh(existing) return existing diff --git a/docker-compose.yml b/docker-compose.yml index ae8f6e7..165ee6c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,6 +8,8 @@ services: - ./data:/app/data env_file: - ./backend/.env + environment: + TZ: ${APP_TIMEZONE:-Asia/Shanghai} restart: unless-stopped healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8000/api/health"] From 9c7bab3c88c1bdb3669a84a22d3264ab15fad6b1 Mon Sep 17 00:00:00 2001 From: Zhenyu Zheng Date: Tue, 24 Feb 2026 23:11:57 +0800 Subject: [PATCH 2/3] =?UTF-8?q?fix:=20=E4=BF=AE=E5=A4=8D=20CI=20=E5=A4=B1?= =?UTF-8?q?=E8=B4=A5=E2=80=94=E2=80=94=E8=BF=81=E7=A7=BB=E4=B8=B4=E6=97=B6?= =?UTF-8?q?=E8=A1=A8=E3=80=81lint=20import=20=E6=8E=92=E5=BA=8F=E5=8F=8A?= =?UTF-8?q?=20datetime.UTC=20=E5=88=AB=E5=90=8D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - 删除迁移中 _alembic_tmp_campaign_contacts 的 drop/create 操作, 避免全新数据库执行 alembic upgrade head 时报 no such table 错误 - timezone.py / ics.py:将 timezone as _tz / _tz.utc 改为 datetime.UTC (ruff UP017) - auth.py、community_dashboard.py、ics.py:修正 import 块排序 (ruff I001) --- ...\227\266\345\214\272\347\232\204_datetime.py" | 16 ---------------- backend/app/api/auth.py | 2 +- backend/app/api/community_dashboard.py | 2 +- backend/app/core/timezone.py | 7 +++---- backend/app/services/ics.py | 6 +++--- 5 files changed, 8 insertions(+), 25 deletions(-) diff --git "a/backend/alembic/versions/986ddbdad1d7_\345\260\206\346\227\266\351\227\264\346\210\263\345\210\227\346\224\271\344\270\272\345\270\246\346\227\266\345\214\272\347\232\204_datetime.py" "b/backend/alembic/versions/986ddbdad1d7_\345\260\206\346\227\266\351\227\264\346\210\263\345\210\227\346\224\271\344\270\272\345\270\246\346\227\266\345\214\272\347\232\204_datetime.py" index 3a08fdf..75697e9 100644 --- "a/backend/alembic/versions/986ddbdad1d7_\345\260\206\346\227\266\351\227\264\346\210\263\345\210\227\346\224\271\344\270\272\345\270\246\346\227\266\345\214\272\347\232\204_datetime.py" +++ "b/backend/alembic/versions/986ddbdad1d7_\345\260\206\346\227\266\351\227\264\346\210\263\345\210\227\346\224\271\344\270\272\345\270\246\346\227\266\345\214\272\347\232\204_datetime.py" @@ -20,7 +20,6 @@ def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('_alembic_tmp_campaign_contacts') with op.batch_alter_table('campaign_contacts', schema=None) as batch_op: batch_op.drop_constraint('uq_campaign_contact', type_='unique') batch_op.create_index(batch_op.f('ix_campaign_contacts_id'), ['id'], unique=False) @@ -496,19 +495,4 @@ def downgrade() -> None: batch_op.drop_index(batch_op.f('ix_campaign_contacts_id')) batch_op.create_unique_constraint('uq_campaign_contact', ['campaign_id', 'person_id']) - op.create_table('_alembic_tmp_campaign_contacts', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('campaign_id', sa.INTEGER(), nullable=False), - sa.Column('person_id', sa.INTEGER(), nullable=False), - sa.Column('status', sa.VARCHAR(length=50), server_default=sa.text("'pending'"), nullable=False), - sa.Column('channel', sa.VARCHAR(length=50), nullable=True), - sa.Column('added_by', sa.VARCHAR(length=50), server_default=sa.text("'manual'"), nullable=False), - sa.Column('last_contacted_at', sa.DATETIME(), nullable=True), - sa.Column('notes', sa.TEXT(), nullable=True), - sa.Column('assigned_to_id', sa.INTEGER(), nullable=True), - sa.ForeignKeyConstraint(['assigned_to_id'], ['users.id'], ondelete='SET NULL'), - sa.ForeignKeyConstraint(['campaign_id'], ['campaigns.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['person_id'], ['person_profiles.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) # ### end Alembic commands ### diff --git a/backend/app/api/auth.py b/backend/app/api/auth.py index c2e395e..89afe62 100644 --- a/backend/app/api/auth.py +++ b/backend/app/api/auth.py @@ -8,8 +8,8 @@ from app.core.dependencies import get_current_active_superuser, get_current_user from app.core.logging import get_logger from app.core.rate_limit import limiter -from app.core.timezone import utc_now from app.core.security import create_access_token, get_password_hash, verify_password +from app.core.timezone import utc_now from app.database import get_db from app.models import User from app.models.password_reset import PasswordResetToken diff --git a/backend/app/api/community_dashboard.py b/backend/app/api/community_dashboard.py index 26371b2..28f79b3 100644 --- a/backend/app/api/community_dashboard.py +++ b/backend/app/api/community_dashboard.py @@ -14,8 +14,8 @@ from sqlalchemy.orm import Session, joinedload from app.core.dependencies import get_current_user, get_user_community_role -from app.core.timezone import utc_now from app.core.logging import get_logger +from app.core.timezone import utc_now from app.database import get_db from app.models import Community, User from app.models.channel import ChannelConfig diff --git a/backend/app/core/timezone.py b/backend/app/core/timezone.py index 9881c5c..263a622 100644 --- a/backend/app/core/timezone.py +++ b/backend/app/core/timezone.py @@ -1,11 +1,10 @@ -from datetime import datetime -from datetime import timezone as _tz +from datetime import UTC, datetime from zoneinfo import ZoneInfo def utc_now() -> datetime: """返回当前 UTC 时间(带时区信息)。""" - return datetime.now(_tz.utc) + return datetime.now(UTC) def get_app_tz() -> ZoneInfo: @@ -18,5 +17,5 @@ def get_app_tz() -> ZoneInfo: def to_app_tz(dt: datetime) -> datetime: """将 datetime 转换为应用时区。用于服务端输出(邮件、ICS 等)。""" if dt.tzinfo is None: - dt = dt.replace(tzinfo=_tz.utc) + dt = dt.replace(tzinfo=UTC) return dt.astimezone(get_app_tz()) diff --git a/backend/app/services/ics.py b/backend/app/services/ics.py index 743007e..0c2d0a5 100644 --- a/backend/app/services/ics.py +++ b/backend/app/services/ics.py @@ -1,6 +1,6 @@ from __future__ import annotations -from datetime import datetime, timedelta, timezone as _tz +from datetime import UTC, datetime, timedelta from app.core.timezone import utc_now from app.models.community import Community @@ -10,8 +10,8 @@ def _format_dt_utc(dt: datetime) -> str: """将 datetime 格式化为 iCalendar UTC 格式(以 Z 结尾)。""" if dt.tzinfo is None: - dt = dt.replace(tzinfo=_tz.utc) - return dt.astimezone(_tz.utc).strftime("%Y%m%dT%H%M%SZ") + dt = dt.replace(tzinfo=UTC) + return dt.astimezone(UTC).strftime("%Y%m%dT%H%M%SZ") def build_meeting_ics(meeting: Meeting, community: Community, organizer_email: str) -> bytes: From a1a921cd3b31ed10e324a93946c476f3c5ef54b6 Mon Sep 17 00:00:00 2001 From: Zhenyu Zheng Date: Tue, 24 Feb 2026 23:21:21 +0800 Subject: [PATCH 3/3] =?UTF-8?q?fix:=20=E4=BF=AE=E5=A4=8D=20SQLite=20?= =?UTF-8?q?=E8=AF=BB=E5=8F=96=20DateTime=20=E8=BF=94=E5=9B=9E=20naive=20da?= =?UTF-8?q?tetime=20=E5=AF=BC=E8=87=B4=20is=5Fexpired=20=E7=B1=BB=E5=9E=8B?= =?UTF-8?q?=E9=94=99=E8=AF=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit SQLite 的 DateTime(timezone=True) 在某些环境下读取时返回 naive datetime, 与 utc_now()(offset-aware)比较会引发 TypeError。 在 is_expired 中防御性处理:若 expires_at 无 tzinfo,先附加 UTC 再比较。 --- backend/app/models/password_reset.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/backend/app/models/password_reset.py b/backend/app/models/password_reset.py index 50cf889..8116c23 100644 --- a/backend/app/models/password_reset.py +++ b/backend/app/models/password_reset.py @@ -1,3 +1,5 @@ +from datetime import UTC + from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String from sqlalchemy.orm import relationship @@ -21,7 +23,10 @@ class PasswordResetToken(Base): @property def is_expired(self) -> bool: - return utc_now() > self.expires_at + expires = self.expires_at + if expires.tzinfo is None: + expires = expires.replace(tzinfo=UTC) + return utc_now() > expires @property def is_valid(self) -> bool: