diff --git a/examples/cloudflare-prisma-d1/.gitignore b/examples/cloudflare-prisma-d1/.gitignore new file mode 100644 index 0000000..a8aba72 --- /dev/null +++ b/examples/cloudflare-prisma-d1/.gitignore @@ -0,0 +1,179 @@ +# Logs + +logs +_.log +npm-debug.log_ +yarn-debug.log* +yarn-error.log* +lerna-debug.log* +.pnpm-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) + +report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json + +# Runtime data + +pids +_.pid +_.seed +\*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover + +lib-cov + +# Coverage directory used by tools like istanbul + +coverage +\*.lcov + +# nyc test coverage + +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) + +.grunt + +# Bower dependency directory (https://bower.io/) + +bower_components + +# node-waf configuration + +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) + +build/Release + +# Dependency directories + +node_modules/ +jspm_packages/ + +# Snowpack dependency directory (https://snowpack.dev/) + +web_modules/ + +# TypeScript cache + +\*.tsbuildinfo + +# Optional npm cache directory + +.npm + +# Optional eslint cache + +.eslintcache + +# Optional stylelint cache + +.stylelintcache + +# Microbundle cache + +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history + +.node_repl_history + +# Output of 'npm pack' + +\*.tgz + +# Yarn Integrity file + +.yarn-integrity + +# dotenv environment variable files + +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# parcel-bundler cache (https://parceljs.org/) + +.cache +.parcel-cache + +# Next.js build output + +.next +out + +# Nuxt.js build / generate output + +.nuxt +dist + +# Gatsby files + +.cache/ + +# Comment in the public line in if your project uses Gatsby and not Next.js + +# https://nextjs.org/blog/next-9-1#public-directory-support + +# public + +# vuepress build output + +.vuepress/dist + +# vuepress v2.x temp and cache directory + +.temp +.cache + +# Docusaurus cache and generated files + +.docusaurus + +# Serverless directories + +.serverless/ + +# FuseBox cache + +.fusebox/ + +# DynamoDB Local files + +.dynamodb/ + +# TernJS port file + +.tern-port + +# Stores VSCode versions used for testing VSCode extensions + +.vscode-test + +# yarn v2 + +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.\* + +# wrangler project + +.dev.vars +.wrangler/ + +# Pylon project +.pylon + +# Prisma files +prisma/dev.db +.backup \ No newline at end of file diff --git a/examples/cloudflare-prisma-d1/README.md b/examples/cloudflare-prisma-d1/README.md new file mode 100644 index 0000000..ec1fe26 --- /dev/null +++ b/examples/cloudflare-prisma-d1/README.md @@ -0,0 +1,104 @@ +# Deployment Guide for `cloudflare-prisma-d1` πŸš€ + +This guide covers deploying **cloudflare-prisma-d1** (your Pylon + Prisma + D1 example) to Cloudflare Workers using Yarn. + +## Cloudflare Workers (Recommended) ☁️ + +### Prerequisites βœ… + +* A Cloudflare account +* Node.js 18+ and Yarn +* Wrangler CLI installed globally + +### Initial Setup πŸ”§ + +1. **Install Wrangler CLI:** + + ```bash + yarn global add wrangler + ``` + +2. **Authenticate with Cloudflare:** + + ```bash + wrangler auth login + ``` + +3. **Clone and install dependencies:** + + ```bash + git clone https://github.com/getcronit/pylon.git + cd pylon/examples/cloudflare-prisma-d1 + yarn + ``` + +4. **Generate types & build:** + + ```bash + yarn cf-typegen + yarn build + ``` + +## Database Migrations & Seeding πŸ—„οΈ + +We use the `./scripts/migrate.sh` helper for both local and D1 migrations: + +* **Local dev migration:** + + ```bash + yarn migrate:seed # Seed dev (SQLite) if needed + yarn migrate:reset # Reset and reapply SQLite migrations + yarn migrate:status # Show local & remote status + yarn migrate:studio # Launch Prisma Studio + ``` + +* **Prod (D1) migration:** + + ```bash + yarn migrate:deploy # Apply all migrations to Cloudflare D1 + yarn migrate:seed # Seed production (prompted) + yarn d1:info # List D1 tables + yarn d1:backup # Dump D1 schema + data to SQL + ``` + +> βš™οΈ The script auto-skips already applied migrations and creates a temp symlink if needed. + +## Environment Variables πŸ”‘ + +* **Local (`.env`):** + + ```ini + DATABASE_URL="file:./dev.db" + ``` + +* **Cloudflare (Workers):** + + ```bash + wrangler secret put ADMIN_SECRET_KEY + ``` + +* **For prod migrations (if not using `wrangler auth`):** + + ```bash + export CLOUDFLARE_ACCOUNT_ID="your-account-id" + export CLOUDFLARE_DATABASE_ID="your-d1-database-id" + export CLOUDFLARE_D1_TOKEN="your-api-token" + ``` + +## Scripts Overview πŸ“œ + +* `yarn dev` + Start Pylon & Wrangler Dev with hot-reload + +* `yarn start` + Wrangler Dev only + +* `yarn build` + TypeScript compile + +* `yarn deploy` + Build + Wrangler deploy to production + +* `yarn test` + Run Vitest + diff --git a/examples/cloudflare-prisma-d1/package.json b/examples/cloudflare-prisma-d1/package.json new file mode 100644 index 0000000..60076ae --- /dev/null +++ b/examples/cloudflare-prisma-d1/package.json @@ -0,0 +1,47 @@ +{ + "name": "@snek-functions/jaen-agent", + "version": "0.0.2", + "type": "module", + "description": "Generated with `sf new`", + "author": "snek-at", + "config": { + "commitizen": { + "path": "git-cz" + } + }, + "scripts": { + "deploy": "pylon build && wrangler deploy", + "dev": "pylon dev -c \"wrangler dev --port 3000\" --client --client-port 3000 --client-path ./clients/jaen-agent/index.ts", + "cf-typegen": "wrangler types", + "start": "wrangler dev", + "test": "vitest", + "build": "tsc", + "migrate:deploy": "./scripts/migrate.sh prod", + "migrate:reset": "./scripts/migrate.sh reset", + "migrate:seed": "./scripts/migrate.sh seed dev", + "migrate:status": "./scripts/migrate.sh status", + "migrate:studio": "./scripts/migrate.sh studio", + "d1:info": "./scripts/migrate.sh d1-info", + "d1:backup": "./scripts/migrate.sh d1-backup" + }, + "dependencies": { + "@getcronit/pylon": "workspace:^", + "@gqty/cli": "^3.2.2", + "@prisma/adapter-d1": "^6.9.0", + "@prisma/client": "^6.9.0", + "gqty": "^3.2.2" + }, + "devDependencies": { + "@cloudflare/vitest-pool-workers": "^0.4.5", + "@cloudflare/workers-types": "^4.20240903.0", + "@getcronit/pylon-dev": "workspace:^", + "prisma": "^6.9.0", + "typescript": "^5.6.3", + "wrangler": "^3.60.3" + }, + "repository": { + "type": "git", + "url": "https://github.com/getcronit/pylon.git" + }, + "homepage": "https://pylon.cronit.io" +} diff --git a/examples/cloudflare-prisma-d1/prisma/schema.prisma b/examples/cloudflare-prisma-d1/prisma/schema.prisma new file mode 100644 index 0000000..780cfa3 --- /dev/null +++ b/examples/cloudflare-prisma-d1/prisma/schema.prisma @@ -0,0 +1,43 @@ +// This is your Prisma schema file, +// learn more about it in the docs: https://pris.ly/d/prisma-schema + +generator client { + provider = "prisma-client-js" + previewFeatures = ["driverAdapters"] +} + +// This `datasource` file isn't used but Prisma ORM still requires the `datasource` definition. +// Your SQLite database file will be managed by D1: +// - A local version will exist in `.wrangler`. +// - The remote version is managed by Cloudflare in the Cloud. +datasource db { + provider = "sqlite" + url = "file:./dev.db" +} + +model User { + // Primary key, auto-incrementing integer + id Int @id @default(autoincrement()) + + // Unique user email address + email String @unique + + // User's name + name String + + // Hashed user password + password String + + // JSON array of roles (e.g., ["admin","customer"]) + roles String @default("[]") + + // Creation timestamp, stored as text + createdAt String @map("created_at") + + // Last update timestamp, stored as text + updatedAt String @map("updated_at") + + // Map this model to the existing `user` table in SQLite + // (only needed if you're using the old Drizzle DB from the previous example) + // @@map("user") +} diff --git a/examples/cloudflare-prisma-d1/pylon.d.ts b/examples/cloudflare-prisma-d1/pylon.d.ts new file mode 100644 index 0000000..fffd42a --- /dev/null +++ b/examples/cloudflare-prisma-d1/pylon.d.ts @@ -0,0 +1,9 @@ +import '@getcronit/pylon' + +declare module '@getcronit/pylon' { + interface Bindings { + DB: D1Database + } + + interface Variables {} +} diff --git a/examples/cloudflare-prisma-d1/scripts/migrate.sh b/examples/cloudflare-prisma-d1/scripts/migrate.sh new file mode 100755 index 0000000..be488a2 --- /dev/null +++ b/examples/cloudflare-prisma-d1/scripts/migrate.sh @@ -0,0 +1,323 @@ +#!/usr/bin/env bash +#─────────────────────────────────────────────────────────────────────────────── +# Shenasa Database Migration Script +# +# β–Έ dev : `prisma migrate dev` against local SQLite +# β–Έ prod : Apply every SQL file under prisma/migrations to Cloudflare D1 +# +# New in this version +# ─────────────────── +# β€’ wrangler auto-apply works even if your wrangler.toml lacks migrations_dir +# (the script makes a temporary symlink). +# β€’ Manual fallback now *skips* migrations already recorded in +# the `_prisma_migrations` table, so re-runs never bomb on β€œtable exists”. +# β€’ Requires jq (already used elsewhere for backups). +#─────────────────────────────────────────────────────────────────────────────── + +set -e # Abort on first uncaught error + +############################################################################### +# Configurable defaults +############################################################################### +MIGRATIONS_DIR="${MIGRATIONS_DIR:-prisma/migrations}" # Where Prisma stores migrations +D1_NAME="mailpress" # D1 binding / database name + +############################################################################### +# Pretty output helpers +############################################################################### +RED='\033[0;31m'; GREEN='\033[0;32m'; YELLOW='\033[1;33m' +BLUE='\033[0;34m'; PURPLE='\033[0;35m'; CYAN='\033[0;36m'; NC='\033[0m' + +SUCCESS="βœ…"; ERROR="❌"; WARNING="⚠️"; INFO="ℹ️"; DATABASE="πŸ—„οΈ" + +print_info() { echo -e "${BLUE}${INFO} $1${NC}"; } +print_success() { echo -e "${GREEN}${SUCCESS} $1${NC}"; } +print_warning() { echo -e "${YELLOW}${WARNING} $1${NC}"; } +print_error() { echo -e "${RED}${ERROR} $1${NC}"; } +print_header() { + echo -e "${PURPLE}${DATABASE} Shenasa Database Migration Tool${NC}" + echo -e "${CYAN}======================================${NC}" +} + +############################################################################### +# Dependency checks +############################################################################### +check_dependencies() { + print_info "Checking dependencies…" + command -v npx &>/dev/null || { print_error "npx not found"; exit 1; } + command -v wrangler &>/dev/null || { print_error "wrangler not found"; exit 1; } + command -v jq &>/dev/null || { print_error "jq not found"; exit 1; } + print_success "All dependencies present" +} + +############################################################################### +# Environment helpers +############################################################################### +load_env() { + local env_file="${1:-.env}" + if [[ -f "$env_file" ]]; then + print_info "Loading environment from $env_file" + set -a && source "$env_file" && set +a + print_success "Environment loaded" + else + print_warning "Env file $env_file not found (continuing)" + fi +} + +validate_env() { + local env_type="$1" + print_info "Validating ${env_type} environment variables…" + case "$env_type" in + dev) + [[ -z "$DATABASE_URL" ]] && { print_error "DATABASE_URL missing"; exit 1; } + ;; + prod) + [[ -z "$CLOUDFLARE_ACCOUNT_ID" || -z "$CLOUDFLARE_DATABASE_ID" ]] && { + print_error "Missing Cloudflare credentials (CLOUDFLARE_ACCOUNT_ID/_DATABASE_ID)"; exit 1; } + if [[ -z "$CLOUDFLARE_D1_TOKEN" ]]; then + wrangler whoami &>/dev/null || { + print_error "Run 'wrangler auth login' or export CLOUDFLARE_D1_TOKEN"; exit 1; } + else + export CLOUDFLARE_API_TOKEN="$CLOUDFLARE_D1_TOKEN" + fi + ;; + esac + print_success "Environment ok" +} + +############################################################################### +# Utility functions +############################################################################### +backup_dev_db() { + local db_file="" + for candidate in "dev.db" "prisma/dev.db" "./dev.db"; do + [[ -f "$candidate" ]] && { db_file="$candidate"; break; } + done + if [[ -n "$db_file" ]]; then + local backup="${db_file}.backup.$(date +%Y%m%d_%H%M%S)" + print_info "SQLite backup β†’ $backup" + cp "$db_file" "$backup" + fi +} + +generate_client() { + print_info "Generating Prisma client" + npx prisma generate + print_success "Prisma client ready" +} + +# Return 0 (success) if a given migration name is already in _prisma_migrations +migration_already_applied() { + local mig_name="$1" + local exists + exists=$(wrangler d1 execute "$D1_NAME" \ + --command="SELECT 1 FROM sqlite_master WHERE type='table' AND name='_prisma_migrations';" \ + --json 2>/dev/null | jq -r '.[].results[0][0] // empty') || true + [[ -z "$exists" ]] && return 1 # table missing β†’ treat as β€œnot applied” + + local applied + applied=$(wrangler d1 execute "$D1_NAME" \ + --command="SELECT 1 FROM _prisma_migrations WHERE migration_name='$mig_name' LIMIT 1;" \ + --json 2>/dev/null | jq -r '.[].results[0][0] // empty') || true + [[ -n "$applied" ]] +} + +############################################################################### +# Development workflow +############################################################################### +migrate_dev() { + print_header; load_env ".env"; validate_env dev; backup_dev_db + print_info "Running local Prisma migration (migrate dev)" + npx prisma migrate dev --name "${MIGRATION_NAME:-auto_migration}" + generate_client + print_success "Local migrate dev complete" +} + +############################################################################### +# Production workflow (idempotent) +############################################################################### +migrate_prod() { + print_header; load_env ".env"; validate_env prod + print_warning "This will modify the production Cloudflare D1 database!" + read -rp "Continue? (y/N): " REPLY; [[ ! $REPLY =~ ^[Yy]$ ]] && { echo; print_info "Cancelled"; exit 0; } + + #--------------------------------------------------------------------------- + # Ensure migrations directory exists + #--------------------------------------------------------------------------- + [[ -d "$MIGRATIONS_DIR" ]] || { print_error "Directory '$MIGRATIONS_DIR' not found"; exit 1; } + print_info "Using migrations from $MIGRATIONS_DIR" + + #--------------------------------------------------------------------------- + # Try wrangler’s own migration runner (preferred, if toml is configured) + # If wrangler can’t find the folder, we create a temporary symlink β€˜migrations’ + #--------------------------------------------------------------------------- + TEMP_SYMLINK=false + if [[ ! -d "migrations" ]]; then + ln -s "$MIGRATIONS_DIR" migrations + TEMP_SYMLINK=true + fi + + print_info "Trying wrangler d1 migrations apply…" + if wrangler d1 migrations apply "$D1_NAME" --remote; then + print_success "Wrangler applied migrations successfully" + else + print_warning "Wrangler auto-apply failed; falling back to manual execution" + + #----------------------------------------------------------------------- + # Manual loop (idempotent): skip if already in _prisma_migrations + #----------------------------------------------------------------------- + while IFS= read -r -d '' dir; do + MIG_FILE="${dir}/migration.sql" + [[ -f "$MIG_FILE" ]] || continue + MIG_NAME="$(basename "$dir")" + + if migration_already_applied "$MIG_NAME"; then + print_info "$MIG_NAME already applied – skipping" + continue + fi + + print_info "Applying $MIG_NAME" + if wrangler d1 execute "$D1_NAME" --file="$MIG_FILE" --remote; then + print_success "$MIG_NAME applied" + else + print_error "Failed on $MIG_NAME, aborting" + [[ "$TEMP_SYMLINK" == true ]] && rm migrations + exit 1 + fi + done < <(find "$MIGRATIONS_DIR" -maxdepth 1 -mindepth 1 -type d -print0 | sort -z) + + print_success "Manual migration loop finished" + fi + + [[ "$TEMP_SYMLINK" == true ]] && rm migrations + generate_client + print_success "Production migrations complete!" +} + +############################################################################### +# Reset, seed, status, etc. (unchanged behaviour but commented) +############################################################################### +reset_dev() { + print_header; load_env ".env"; validate_env dev + print_warning "All local data will be LOST!" + read -rp "Really reset? (y/N): " REPLY; [[ ! $REPLY =~ ^[Yy]$ ]] && { echo; print_info "Cancelled"; exit 0; } + backup_dev_db + npx prisma migrate reset --force + generate_client + print_success "Dev database reset" +} + +seed_db() { + local env_type="${1:-dev}"; print_header; load_env ".env"; validate_env "$env_type" + print_info "Seeding $env_type database" + if [[ "$env_type" == "prod" ]]; then + print_warning "Seeding PRODUCTION data!" + read -rp "Continue? (y/N): " REPLY; [[ ! $REPLY =~ ^[Yy]$ ]] && { echo; print_info "Cancelled"; exit 0; } + fi + npm run db:seed + print_success "Seeding done" +} + +check_status() { + print_header; load_env ".env" + print_info "Local migrate status"; npx prisma migrate status || true + + if [[ -n "$CLOUDFLARE_ACCOUNT_ID" && -n "$CLOUDFLARE_DATABASE_ID" ]]; then + print_info "Remote (D1) table list:" + wrangler d1 execute "$D1_NAME" --command \ + "SELECT name FROM sqlite_master WHERE type='table';" || true + fi +} + +create_migration() { + print_header; load_env ".env"; validate_env dev + MIGRATION_NAME="${MIGRATION_NAME:-$1}" + [[ -z "$MIGRATION_NAME" ]] && { read -rp "Migration name: " MIGRATION_NAME; } + [[ -z "$MIGRATION_NAME" ]] && { print_error "Name required"; exit 1; } + npx prisma migrate dev --name "$MIGRATION_NAME" --create-only + print_success "Migration folder created (not applied)" +} + +studio() { + print_header; load_env ".env"; print_info "Launching Prisma Studio…"; npx prisma studio +} + +execute_d1() { print_header; load_env ".env"; validate_env prod + [[ -z "$1" ]] && { print_error "SQL missing"; exit 1; } + wrangler d1 execute "$D1_NAME" --command="$1" +} + +d1_info() { print_header; load_env ".env"; validate_env prod + print_info "Cloudflare D1 info ($D1_NAME)" + wrangler d1 execute "$D1_NAME" --command \ + "SELECT name FROM sqlite_master WHERE type='table';" +} + +backup_d1() { + print_header; load_env ".env"; validate_env prod + + # 1) Choose filename + local out="d1_backup_$(date +%Y%m%d_%H%M%S).sql" + print_info "Exporting D1 to $out" + + # 2) Use Wrangler’s native export command (works on v3+) + # --remote : hit the production DB + # --output ... : write raw SQL dump (schema + data) + if wrangler d1 export "$D1_NAME" --remote --output "$out"; then + print_success "Backup written to $out" + else + print_error "wrangler d1 export failed" + print_info "If you are on an old Wrangler, upgrade with:" + print_info " npm install --save-dev wrangler@latest" + exit 1 + fi +} + +############################################################################### +# CLI parsing +############################################################################### +COMMAND=""; SUBCOMMAND="" +while [[ $# -gt 0 ]]; do + case "$1" in + --name) MIGRATION_NAME="$2"; shift 2;; + --env) ENV_FILE="$2"; shift 2;; + *) [[ -z "$COMMAND" ]] && COMMAND="$1" || SUBCOMMAND="$1"; shift;; + esac +done + +check_dependencies # Always verify before doing anything + +case "$COMMAND" in + dev) migrate_dev;; + prod) migrate_prod;; + reset) reset_dev;; + seed) seed_db "$SUBCOMMAND";; + status) check_status;; + create) create_migration "$SUBCOMMAND";; + studio) studio;; + d1-info) d1_info;; + d1-exec) execute_d1 "$SUBCOMMAND";; + d1-backup) backup_d1;; + ""|help|-h|--help) +cat <<'EOF' +Usage: ./scripts/migrate.sh [options] + +Commands + dev Run dev migrations (SQLite) + prod Apply all migrations to Cloudflare D1 (idempotent) + reset Reset dev DB (DESTROYS DATA) + seed [dev|prod] Seed database + status Show migration status + create [name] Create migration (not applied) + studio Open Prisma Studio + d1-info Show D1 info + d1-exec "SQL" Execute raw SQL on D1 + d1-backup Dump D1 schema to file + +Common options + --name Migration name (dev/create) + --env Alternate .env file +EOF + ;; + *) print_error "Unknown command $COMMAND"; exit 1;; +esac diff --git a/examples/cloudflare-prisma-d1/src/errors/general.errors.ts b/examples/cloudflare-prisma-d1/src/errors/general.errors.ts new file mode 100644 index 0000000..e2ac93b --- /dev/null +++ b/examples/cloudflare-prisma-d1/src/errors/general.errors.ts @@ -0,0 +1,37 @@ +import { ServiceError } from "@getcronit/pylon"; + +export class InvalidInputError extends ServiceError { + constructor(message: string) { + super(message, { + code: "INVALID_INPUT", + statusCode: 400, + }); + } +} + +export class NotFoundError extends ServiceError { + constructor(message: string) { + super(message, { + code: "NOT_FOUND", + statusCode: 404, + }); + } +} + +export class AuthorizationError extends ServiceError { + constructor(message: string) { + super(message, { + code: "AUTHORIZATION_ERROR", + statusCode: 403, + }); + } +} + +export class EmailOrUsernameAlreadyExistsError extends ServiceError { + constructor(message: string) { + super(message, { + code: "EMAILORUSERNAMEALREADYEXISTS_ERROR", + statusCode: 403, + }); + } + } \ No newline at end of file diff --git a/examples/cloudflare-prisma-d1/src/errors/user.errors.ts b/examples/cloudflare-prisma-d1/src/errors/user.errors.ts new file mode 100644 index 0000000..fc368b0 --- /dev/null +++ b/examples/cloudflare-prisma-d1/src/errors/user.errors.ts @@ -0,0 +1,24 @@ +// errors/user.errors.ts +import { ServiceError } from "@getcronit/pylon"; + +export class UserNotFoundError extends ServiceError { + constructor(id: string) { + const message = `User with ID '${id}' was not found. Please double-check the ID and try again.`; + + super(message, { + statusCode: 404, + code: "USER_NOT_FOUND", + }); + } +} + +export class EmailOrUsernameAlreadyExistsError extends ServiceError { + constructor(loginName: string) { + const message = `Email or username'${loginName}' already exists.`; + + super(message, { + code: "EMAILORUSERNAMEALREADYEXISTS_ERROR", + statusCode: 403, + }); + } +} \ No newline at end of file diff --git a/examples/cloudflare-prisma-d1/src/index.ts b/examples/cloudflare-prisma-d1/src/index.ts new file mode 100644 index 0000000..1d4c5d3 --- /dev/null +++ b/examples/cloudflare-prisma-d1/src/index.ts @@ -0,0 +1,57 @@ +import { app, getContext, getEnv, requireAuth } from "@getcronit/pylon"; +import { UserNotFoundError } from "./errors/user.errors"; +import { PrismaClient } from '@prisma/client' +import { PrismaD1 } from '@prisma/adapter-d1' + +export const graphql = { + Query: { + /** + * Get all users from the database + */ + async users() { + const env: any = getEnv(); + + const adapter = new PrismaD1(env.DB) + const prisma = new PrismaClient({ adapter }) + + const users = await prisma.user.findMany() + return users.map(user => ({ + ...user, + roles: JSON.parse(user.roles) + })) + } + }, + Mutation: { + /** + * Create a new user with the given data + * The password will be hashed before storing it + */ + async userCreate(data: { + name: string + email: string + password: string + roles: string[] + }) { + const db = getDb() + + data.password = crypto + .createHash('sha256') + .update(data.password) + .digest('hex') + + const user = await db + .insert(schema.user) + .values({ + ...data, + roles: JSON.stringify(data.roles), + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString() + }) + .returning() + + return user + } + } +} + +export default app diff --git a/examples/cloudflare-prisma-d1/tsconfig.json b/examples/cloudflare-prisma-d1/tsconfig.json new file mode 100644 index 0000000..f1614b8 --- /dev/null +++ b/examples/cloudflare-prisma-d1/tsconfig.json @@ -0,0 +1,4 @@ +{ + "extends": "@getcronit/pylon/tsconfig.pylon.json", + "include": ["pylon.d.ts", "worker-configuration.d.ts", "src/**/*.ts"] +} diff --git a/examples/cloudflare-prisma-d1/wrangler.toml b/examples/cloudflare-prisma-d1/wrangler.toml new file mode 100644 index 0000000..8dfc1be --- /dev/null +++ b/examples/cloudflare-prisma-d1/wrangler.toml @@ -0,0 +1,109 @@ +#:schema node_modules/wrangler/config-schema.json +name = "cloudflare-drizzle-d1" +main = ".pylon/index.js" +compatibility_date = "2024-09-03" +compatibility_flags = ["nodejs_compat_v2"] + +# Automatically place your workloads in an optimal location to minimize latency. +# If you are running back-end logic in a Worker, running it closer to your back-end infrastructure +# rather than the end user may result in better performance. +# Docs: https://developers.cloudflare.com/workers/configuration/smart-placement/#smart-placement +# [placement] +# mode = "smart" + +# Variable bindings. These are arbitrary, plaintext strings (similar to environment variables) +# Docs: +# - https://developers.cloudflare.com/workers/wrangler/configuration/#environment-variables +# Note: Use secrets to store sensitive data. +# - https://developers.cloudflare.com/workers/configuration/secrets/ +# [vars] +# MY_VARIABLE = "production_value" + +# Bind the Workers AI model catalog. Run machine learning models, powered by serverless GPUs, on Cloudflare’s global network +# Docs: https://developers.cloudflare.com/workers/wrangler/configuration/#workers-ai +# [ai] +# binding = "AI" + +# Bind an Analytics Engine dataset. Use Analytics Engine to write analytics within your Pages Function. +# Docs: https://developers.cloudflare.com/workers/wrangler/configuration/#analytics-engine-datasets +# [[analytics_engine_datasets]] +# binding = "MY_DATASET" + +# Bind a headless browser instance running on Cloudflare's global network. +# Docs: https://developers.cloudflare.com/workers/wrangler/configuration/#browser-rendering +# [browser] +# binding = "MY_BROWSER" + +# Bind a D1 database. D1 is Cloudflare’s native serverless SQL database. +# Docs: https://developers.cloudflare.com/workers/wrangler/configuration/#d1-databases +[[ d1_databases ]] +binding = "DB" +database_name = "YOUR DB NAME" +database_id = "YOUR DB ID" +# migrations_dir = "prisma/migrations" + +# Bind a dispatch namespace. Use Workers for Platforms to deploy serverless functions programmatically on behalf of your customers. +# Docs: https://developers.cloudflare.com/workers/wrangler/configuration/#dispatch-namespace-bindings-workers-for-platforms +# [[dispatch_namespaces]] +# binding = "MY_DISPATCHER" +# namespace = "my-namespace" + +# Bind a Durable Object. Durable objects are a scale-to-zero compute primitive based on the actor model. +# Durable Objects can live for as long as needed. Use these when you need a long-running "server", such as in realtime apps. +# Docs: https://developers.cloudflare.com/workers/wrangler/configuration/#durable-objects +# [[durable_objects.bindings]] +# name = "MY_DURABLE_OBJECT" +# class_name = "MyDurableObject" + +# Durable Object migrations. +# Docs: https://developers.cloudflare.com/workers/wrangler/configuration/#migrations +# [[migrations]] +# tag = "v1" +# new_classes = ["MyDurableObject"] + +# Bind a Hyperdrive configuration. Use to accelerate access to your existing databases from Cloudflare Workers. +# Docs: https://developers.cloudflare.com/workers/wrangler/configuration/#hyperdrive +# [[hyperdrive]] +# binding = "MY_HYPERDRIVE" +# id = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + +# Bind a KV Namespace. Use KV as persistent storage for small key-value pairs. +# Docs: https://developers.cloudflare.com/workers/wrangler/configuration/#kv-namespaces +# [[kv_namespaces]] +# binding = "MY_KV_NAMESPACE" +# id = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + +# Bind an mTLS certificate. Use to present a client certificate when communicating with another service. +# Docs: https://developers.cloudflare.com/workers/wrangler/configuration/#mtls-certificates +# [[mtls_certificates]] +# binding = "MY_CERTIFICATE" +# certificate_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + +# Bind a Queue producer. Use this binding to schedule an arbitrary task that may be processed later by a Queue consumer. +# Docs: https://developers.cloudflare.com/workers/wrangler/configuration/#queues +# [[queues.producers]] +# binding = "MY_QUEUE" +# queue = "my-queue" + +# Bind a Queue consumer. Queue Consumers can retrieve tasks scheduled by Producers to act on them. +# Docs: https://developers.cloudflare.com/workers/wrangler/configuration/#queues +# [[queues.consumers]] +# queue = "my-queue" + +# Bind an R2 Bucket. Use R2 to store arbitrarily large blobs of data, such as files. +# Docs: https://developers.cloudflare.com/workers/wrangler/configuration/#r2-buckets +# [[r2_buckets]] +# binding = "MY_BUCKET" +# bucket_name = "my-bucket" + +# Bind another Worker service. Use this binding to call another Worker without network overhead. +# Docs: https://developers.cloudflare.com/workers/wrangler/configuration/#service-bindings +# [[services]] +# binding = "MY_SERVICE" +# service = "my-service" + +# Bind a Vectorize index. Use to store and query vector embeddings for semantic search, classification and other vector search use-cases. +# Docs: https://developers.cloudflare.com/workers/wrangler/configuration/#vectorize-indexes +# [[vectorize]] +# binding = "MY_INDEX" +# index_name = "my-index"