commit 8cd4b943a5cc51dc9460282c850f6ccfec51b777 Author: Nikita Date: Sun Dec 21 13:37:19 2025 -0800 . diff --git a/.ai/commit-checkpoints.json b/.ai/commit-checkpoints.json new file mode 100644 index 00000000..02cb3a23 --- /dev/null +++ b/.ai/commit-checkpoints.json @@ -0,0 +1,7 @@ +{ + "last_commit": { + "timestamp": "2025-12-21T20:02:45.816734+00:00", + "session_id": "019b4281-5d8b-7270-9f5b-9baaa3ab5b47", + "last_entry_timestamp": "2025-12-21T20:02:42.811Z" + } +} \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..387fac49 --- /dev/null +++ b/.gitignore @@ -0,0 +1,46 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +# Dependencies +node_modules +.pnpm-store/ + +# Build outputs +dist +dist-ssr +.wrangler + +# Environment variables +.env +.env.local +.env.*.local +.dev.vars + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? + +# OS files +Thumbs.db + +test_scripts/ +.conductor + +# Swift +.build/ + +# Rust +target/ diff --git a/.npmrc b/.npmrc new file mode 100644 index 00000000..fbf7f9a7 --- /dev/null +++ b/.npmrc @@ -0,0 +1,2 @@ +engine-strict=true +auto-install-peers=true diff --git a/claude.md b/claude.md new file mode 100644 index 00000000..6d306359 --- /dev/null +++ b/claude.md @@ -0,0 +1,234 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Monorepo Architecture + +This is a pnpm workspace monorepo with two packages: + +- **`packages/worker`**: Cloudflare Worker (backend) +- **`packages/web`**: TanStack Start app (frontend) + +### Key Architectural Pattern: Worker RPC via Service Bindings + +The web package communicates with the worker package through **two mechanisms**: + +1. **HTTP API** (traditional): REST endpoints exposed by Hono in `packages/worker/src/index.ts` +2. **RPC calls** (service bindings): Type-safe method calls via `WorkerRpc` class in `packages/worker/src/rpc.ts` + +**Critical understanding**: The worker exports TWO things from `src/index.ts`: + +- `export default app` - Hono HTTP handler (default export) +- `export { WorkerRpc } from './rpc'` - Named export for RPC entrypoint + +The web package's `wrangler.jsonc` configures a service binding: + +```jsonc +"services": [{ + "binding": "WORKER_RPC", + "service": "fullstack-monorepo-template-worker", + "entrypoint": "WorkerRpc" // References the named export +}] +``` + +### Type Safety Across Packages + +The web package imports types directly from the worker package: + +```typescript +// packages/web/env.d.ts +import type { WorkerRpc } from '../worker/src/rpc'; +``` + +This creates a **direct TypeScript dependency** between packages. The monorepo structure enables this cross-package type sharing. + +### TanStack Start Server Context + +In TanStack Start server functions/loaders, access the RPC binding via: + +```typescript +import { getServerContext } from '@tanstack/react-start/server'; + +const { WORKER_RPC } = getServerContext().cloudflare.env; +const result = await WORKER_RPC.sayHello('World'); +``` + +## Commands + +### Development + +```bash +# Run both services in separate terminals +pnpm dev:worker # Worker on localhost:8787 +pnpm dev:web # Web on localhost:3000 + +# Or run just one +pnpm --filter @linsa/worker dev +pnpm --filter @linsa/web dev +``` + +### Testing + +```bash +# Run all tests +pnpm test + +# Run tests for specific package +pnpm --filter @linsa/worker test +pnpm --filter @linsa/web test + +# Run tests in watch mode (within package directory) +cd packages/worker && pnpm test --watch +``` + +### Linting & Formatting + +```bash +pnpm lint # Lint all packages + check formatting +pnpm lint:fix # Fix linting issues in all packages +pnpm format # Format all code +pnpm format:check # Check formatting without changes +``` + +### Deployment + +```bash +# Deploy both packages +pnpm deploy + +# Deploy individually +pnpm deploy:worker +pnpm deploy:web + +# Login to Cloudflare first (one-time) +cd packages/worker && pnpm wrangler login +``` + +### Working with Workspace Packages + +```bash +# Add dependency to specific package +pnpm --filter @linsa/worker add +pnpm --filter @linsa/web add + +# Add dev dependency +pnpm --filter @linsa/worker add -D +``` + +## Adding New RPC Methods + +When adding RPC methods that the web package will call: + +1. **Add method to `packages/worker/src/rpc.ts`**: + +```typescript +export class WorkerRpc extends WorkerEntrypoint { + async myNewMethod(param: string): Promise { + // implementation + } +} +``` + +2. **TypeScript will automatically provide types** in the web package because `env.d.ts` imports the `WorkerRpc` type + +3. **Call from web package** in any server function: + +```typescript +const { WORKER_RPC } = getServerContext().cloudflare.env; +const result = await WORKER_RPC.myNewMethod('value'); +``` + +4. **Optional**: Add helper function in `packages/web/src/lib/worker-rpc.ts` for convenience + +## Adding Cloudflare Bindings + +To add KV, D1, R2, or other bindings to the worker: + +1. Update `packages/worker/wrangler.jsonc`: + +```jsonc +{ + "kv_namespaces": [ + { + "binding": "MY_KV", + "id": "your-namespace-id", + }, + ], +} +``` + +2. Update TypeScript types in `packages/worker/src/index.ts` or create a separate `env.ts`: + +```typescript +interface Env { + MY_KV: KVNamespace; +} +``` + +3. Access in RPC methods or HTTP handlers: + +```typescript +// In rpc.ts +export class WorkerRpc extends WorkerEntrypoint { + async getData(key: string) { + return await this.env.MY_KV.get(key); + } +} +``` + +## Vitest Configuration for Cloudflare Workers + +The worker package uses `@cloudflare/vitest-pool-workers` for testing. The config pattern is: + +```typescript +// vitest.config.mts +import { defineConfig } from 'vitest/config'; + +export default defineConfig({ + test: { + pool: '@cloudflare/vitest-pool-workers', + poolOptions: { + workers: { + wrangler: { configPath: './wrangler.jsonc' }, + }, + }, + }, +}); +``` + +**Do not use** `defineWorkersConfig` from `@cloudflare/vitest-pool-workers/config` - it causes TypeScript module resolution issues. + +## pnpm Workspace Configuration + +The `pnpm-workspace.yaml` includes `onlyBuiltDependencies` for native dependencies: + +```yaml +onlyBuiltDependencies: + - esbuild + - sharp + - workerd +``` + +This ensures these packages are built correctly in the monorepo context. + +## Package Naming Convention + +Packages use the `@linsa/*` scope: + +- `@linsa/worker` +- `@linsa/web` + +When filtering commands, use these exact names: `pnpm --filter @linsa/worker ` + +## ESLint Configuration + +The project uses **ESLint v9** with the new flat config format (`eslint.config.js`). + +Key points: + +- Uses `@typescript-eslint/eslint-plugin` v8+ (compatible with ESLint 9) +- Configured for both TypeScript and TSX/JSX files +- The `no-undef` rule is disabled for TypeScript files (TypeScript handles this) +- Ignores: `node_modules/`, `dist/`, `.wrangler/`, `build/` + +Both packages (`worker` and `web`) have `lint` and `lint:fix` scripts that use the shared root config. diff --git a/cli/go-live.sh b/cli/go-live.sh new file mode 100755 index 00000000..71a63afa --- /dev/null +++ b/cli/go-live.sh @@ -0,0 +1,34 @@ +#!/bin/bash +# Go live on linsa.io +# Usage: ./go-live.sh [start|stop] + +ACTION=${1:-start} +HLS_URL="http://65.108.248.119:8080/hls/stream.m3u8" + +if [ "$ACTION" = "start" ]; then + echo "Going live on linsa.io..." + + # Start the stream capture in background + cd /Users/nikiv/org/linsa/linsa/cli/stream + .build/release/stream-capture start 65.108.248.119 6000 & + STREAM_PID=$! + echo $STREAM_PID > /tmp/stream.pid + + echo "Stream started (PID: $STREAM_PID)" + echo "HLS URL: $HLS_URL" + echo "" + echo "To stop: ./go-live.sh stop" + +elif [ "$ACTION" = "stop" ]; then + echo "Stopping stream..." + + if [ -f /tmp/stream.pid ]; then + kill $(cat /tmp/stream.pid) 2>/dev/null + rm /tmp/stream.pid + fi + + # Also kill any lingering stream-capture processes + pkill -f stream-capture 2>/dev/null + + echo "Stream stopped" +fi diff --git a/cli/stream-mac.sh b/cli/stream-mac.sh new file mode 100755 index 00000000..c8478a2d --- /dev/null +++ b/cli/stream-mac.sh @@ -0,0 +1,29 @@ +#!/bin/bash +# Stream Mac screen + audio to Cloudflare Stream via RTMPS +# Uses VideoToolbox for zero-CPU hardware encoding + +# Device 2 = Capture screen 0 +# Device 1 = MacBook Pro Microphone (audio) + +# Cloudflare Stream credentials (set STREAM_KEY env var or in ~/.config/linsa/stream.env) +RTMPS_URL="rtmps://live.cloudflare.com:443/live/" + +if [ -z "$STREAM_KEY" ]; then + if [ -f ~/.config/linsa/stream.env ]; then + source ~/.config/linsa/stream.env + fi +fi + +if [ -z "$STREAM_KEY" ]; then + echo "Error: STREAM_KEY not set" + echo "Set it via: export STREAM_KEY=your_key" + echo "Or create ~/.config/linsa/stream.env with: STREAM_KEY=your_key" + exit 1 +fi + +exec ffmpeg -f avfoundation -capture_cursor 1 -framerate 30 -i "2:1" \ + -c:v h264_videotoolbox -b:v 4500k -maxrate 4500k -bufsize 9000k \ + -profile:v high -pix_fmt yuv420p \ + -g 60 -keyint_min 60 \ + -c:a aac -b:a 128k -ar 48000 -ac 2 \ + -f flv "${RTMPS_URL}${STREAM_KEY}" diff --git a/cli/stream/Package.swift b/cli/stream/Package.swift new file mode 100644 index 00000000..214fdbf5 --- /dev/null +++ b/cli/stream/Package.swift @@ -0,0 +1,25 @@ +// swift-tools-version:5.9 +import PackageDescription + +let package = Package( + name: "stream-capture", + platforms: [.macOS(.v13)], + products: [ + .executable(name: "stream-capture", targets: ["stream-capture"]) + ], + dependencies: [], + targets: [ + .executableTarget( + name: "stream-capture", + dependencies: [], + linkerSettings: [ + .linkedFramework("ScreenCaptureKit"), + .linkedFramework("VideoToolbox"), + .linkedFramework("CoreMedia"), + .linkedFramework("CoreVideo"), + .linkedFramework("AVFoundation"), + .linkedFramework("Network"), + ] + ) + ] +) diff --git a/cli/stream/Sources/stream-capture/main.swift b/cli/stream/Sources/stream-capture/main.swift new file mode 100644 index 00000000..29cde1f7 --- /dev/null +++ b/cli/stream/Sources/stream-capture/main.swift @@ -0,0 +1,428 @@ +import Foundation +import ScreenCaptureKit +import VideoToolbox +import CoreMedia +import Network +import AVFoundation + +// MARK: - Zero-CPU Screen Capture +// Uses ScreenCaptureKit (GPU) → VideoToolbox (GPU) → Network +// No pixel data ever touches the CPU + +@main +struct StreamCapture { + static func main() async { + let args = CommandLine.arguments + + if args.count < 2 { + printUsage() + return + } + + switch args[1] { + case "start": + let host = args.count > 2 ? args[2] : "65.108.248.119" + let port = args.count > 3 ? UInt16(args[3]) ?? 6000 : 6000 + await startStreaming(host: host, port: port) + + case "displays": + await listDisplays() + + case "test": + await testCapture() + + default: + printUsage() + } + } + + static func printUsage() { + print(""" + stream-capture - Zero-CPU screen streaming + + Usage: + stream-capture start [host] [port] - Stream Zed window to host (default: 65.108.248.119:6000) + stream-capture displays - List available displays + stream-capture test - Test capture without streaming + + Architecture: + ScreenCaptureKit (GPU) → VideoToolbox H.264 (GPU) → UDP → Linux + + The entire pipeline runs on GPU. CPU usage should be <1%. + """) + } + + static func listDisplays() async { + do { + let content = try await SCShareableContent.current + print("Available displays:") + for (i, display) in content.displays.enumerated() { + print(" [\(i)] \(display.width)x\(display.height) - Display \(display.displayID)") + } + } catch { + print("Error: \(error)") + } + } + + static func testCapture() async { + print("Testing capture (5 seconds)...") + + do { + let capturer = try await ZeroCPUCapturer.create() + try await capturer.startCapture() + + try await Task.sleep(for: .seconds(5)) + + await capturer.stopCapture() + let count = await capturer.frameCount + print("Test complete. Frames captured: \(count)") + print("Average FPS: \(Double(count) / 5.0)") + } catch { + print("Error: \(error)") + } + } + + static func startStreaming(host: String, port: UInt16) async { + print("Starting zero-CPU stream to \(host):\(port)") + print("Press Ctrl+C to stop\n") + + do { + let streamer = ZeroCPUStreamer(host: host, port: port) + try await streamer.start() + + // Keep running until interrupted + await withCheckedContinuation { (_: CheckedContinuation) in + signal(SIGINT) { _ in + print("\nStopping...") + exit(0) + } + dispatchMain() + } + } catch { + if let error = error as? LocalizedError, let description = error.errorDescription { + print("Error: \(description)") + } else { + print("Error: \(error)") + } + } + } +} + +// MARK: - Zero-CPU Capturer + +actor ZeroCPUCapturer: NSObject, SCStreamDelegate, SCStreamOutput { + private var stream: SCStream? + private var display: SCDisplay? + private var window: SCWindow? + private var captureSize: CGSize? + var frameCount = 0 + var onFrame: ((CMSampleBuffer) -> Void)? + + override init() { + super.init() + } + + static func create(onlyAppName: String? = nil) async throws -> ZeroCPUCapturer { + let capturer = ZeroCPUCapturer() + + let content = try await SCShareableContent.current + if let onlyAppName { + let normalizedName = onlyAppName.lowercased() + let candidateWindows = content.windows.filter { window in + guard let app = window.owningApplication else { return false } + let appName = app.applicationName.lowercased() + let bundleId = app.bundleIdentifier?.lowercased() + return appName == normalizedName || bundleId == normalizedName + } + guard let window = candidateWindows.max(by: { $0.frame.width * $0.frame.height < $1.frame.width * $1.frame.height }) else { + throw CaptureError.noWindowForApp(onlyAppName) + } + await capturer.setWindow(window) + } else { + // Get primary display + guard let display = content.displays.first else { + throw CaptureError.noDisplay + } + await capturer.setDisplay(display) + } + return capturer + } + + func setDisplay(_ display: SCDisplay) { + self.display = display + self.window = nil + self.captureSize = CGSize(width: display.width, height: display.height) + } + + func setWindow(_ window: SCWindow) { + self.window = window + self.display = nil + self.captureSize = window.frame.size + } + + func startCapture() async throws { + guard let captureSize = captureSize else { + throw CaptureError.noCaptureTarget + } + + // Configure for zero-CPU capture + let config = SCStreamConfiguration() + + // Match capture target resolution + let normalizedSize = normalizedCaptureSize(for: captureSize) + config.width = normalizedSize.width + config.height = normalizedSize.height + + // 30 FPS for streaming + config.minimumFrameInterval = CMTime(value: 1, timescale: 30) + + // Queue depth for smooth delivery (like OBS) + config.queueDepth = 8 + + // Show cursor + config.showsCursor = true + + // Use GPU-native pixel format (BGRA for VideoToolbox compatibility) + config.pixelFormat = kCVPixelFormatType_32BGRA + + // Color space for wide gamut + config.colorSpaceName = CGColorSpace.displayP3 + + // Create content filter for display or window capture + let filter: SCContentFilter + if let window = window { + filter = SCContentFilter(desktopIndependentWindow: window) + } else if let display = display { + filter = SCContentFilter(display: display, excludingWindows: []) + } else { + throw CaptureError.noCaptureTarget + } + + // Create stream + stream = SCStream(filter: filter, configuration: config, delegate: self) + + // Add output handler on high-priority queue + try stream?.addStreamOutput( + self, + type: .screen, + sampleHandlerQueue: DispatchQueue(label: "capture", qos: .userInteractive) + ) + + // Start capture + try await stream?.startCapture() + print("Capture started: \(config.width)x\(config.height) @ 30fps") + } + + func stopCapture() async { + try? await stream?.stopCapture() + stream = nil + } + + // SCStreamOutput - receives frames on GPU + nonisolated func stream(_ stream: SCStream, didOutputSampleBuffer sampleBuffer: CMSampleBuffer, of type: SCStreamOutputType) { + guard type == .screen else { return } + + Task { + await self.handleFrame(sampleBuffer) + } + } + + private func handleFrame(_ sampleBuffer: CMSampleBuffer) { + frameCount += 1 + onFrame?(sampleBuffer) + } + + // SCStreamDelegate + nonisolated func stream(_ stream: SCStream, didStopWithError error: Error) { + print("Stream stopped with error: \(error)") + } +} + +// MARK: - Zero-CPU Streamer (with VideoToolbox encoding) + +class ZeroCPUStreamer { + private var capturer: ZeroCPUCapturer? + private var encoder: HardwareEncoder? + private var connection: NWConnection? + private let host: String + private let port: UInt16 + + init(host: String, port: UInt16) { + self.host = host + self.port = port + } + + func start() async throws { + // Setup network connection (UDP for low latency) + let endpoint = NWEndpoint.hostPort(host: NWEndpoint.Host(host), port: NWEndpoint.Port(rawValue: port)!) + connection = NWConnection(to: endpoint, using: .udp) + + connection?.stateUpdateHandler = { state in + switch state { + case .ready: + print("Network ready") + case .failed(let error): + print("Network failed: \(error)") + default: + break + } + } + connection?.start(queue: .global(qos: .userInteractive)) + + // Setup capturer + capturer = try await ZeroCPUCapturer.create(onlyAppName: "Zed") + + // Setup hardware encoder with network send callback + let conn = connection + let size = await capturer?.getCaptureSize() ?? CGSize(width: 1920, height: 1080) + let normalizedSize = normalizedCaptureSize(for: size) + encoder = try HardwareEncoder(width: normalizedSize.width, height: normalizedSize.height) { data in + conn?.send(content: data, completion: .idempotent) + } + + // Connect capturer to encoder + let enc = encoder + await capturer?.setOnFrame { sampleBuffer in + enc?.encode(sampleBuffer: sampleBuffer) + } + + // Start capture + try await capturer?.startCapture() + + print("Streaming to \(host):\(port)") + } + + func stop() async { + await capturer?.stopCapture() + connection?.cancel() + } +} + +// MARK: - Hardware H.264 Encoder (VideoToolbox - runs on GPU) + +class HardwareEncoder { + private var session: VTCompressionSession? + private var onEncodedData: (Data) -> Void + + init(width: Int, height: Int, onEncodedData: @escaping (Data) -> Void) throws { + self.onEncodedData = onEncodedData + + // Create hardware compression session + let status = VTCompressionSessionCreate( + allocator: nil, + width: Int32(width), + height: Int32(height), + codecType: kCMVideoCodecType_H264, + encoderSpecification: [ + kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder: true, + kVTVideoEncoderSpecification_RequireHardwareAcceleratedVideoEncoder: true + ] as CFDictionary, + imageBufferAttributes: nil, + compressedDataAllocator: nil, + outputCallback: nil, + refcon: nil, + compressionSessionOut: &session + ) + + guard status == noErr, let session = session else { + throw CaptureError.encoderCreationFailed + } + + // Configure for streaming + VTSessionSetProperty(session, key: kVTCompressionPropertyKey_RealTime, value: kCFBooleanTrue) + VTSessionSetProperty(session, key: kVTCompressionPropertyKey_ProfileLevel, value: kVTProfileLevel_H264_High_AutoLevel) + VTSessionSetProperty(session, key: kVTCompressionPropertyKey_AverageBitRate, value: 4_500_000 as CFNumber) // 4.5 Mbps + VTSessionSetProperty(session, key: kVTCompressionPropertyKey_MaxKeyFrameInterval, value: 60 as CFNumber) // Keyframe every 2s @ 30fps + VTSessionSetProperty(session, key: kVTCompressionPropertyKey_AllowFrameReordering, value: kCFBooleanFalse) // No B-frames for low latency + + VTCompressionSessionPrepareToEncodeFrames(session) + + print("Hardware encoder initialized (VideoToolbox)") + } + + func encode(sampleBuffer: CMSampleBuffer) { + guard let session = session, + let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { + return + } + + let presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + + VTCompressionSessionEncodeFrame( + session, + imageBuffer: imageBuffer, + presentationTimeStamp: presentationTime, + duration: .invalid, + frameProperties: nil, + infoFlagsOut: nil + ) { [weak self] status, flags, sampleBuffer in + guard status == noErr, let sampleBuffer = sampleBuffer else { return } + self?.handleEncodedFrame(sampleBuffer) + } + } + + private func handleEncodedFrame(_ sampleBuffer: CMSampleBuffer) { + guard let dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer) else { return } + + var length = 0 + var dataPointer: UnsafeMutablePointer? + CMBlockBufferGetDataPointer(dataBuffer, atOffset: 0, lengthAtOffsetOut: nil, totalLengthOut: &length, dataPointerOut: &dataPointer) + + if let dataPointer = dataPointer { + let data = Data(bytes: dataPointer, count: length) + onEncodedData(data) + } + } + + deinit { + if let session = session { + VTCompressionSessionInvalidate(session) + } + } +} + +// MARK: - Errors + +enum CaptureError: Error { + case noDisplay + case noCaptureTarget + case noWindowForApp(String) + case encoderCreationFailed + case networkError +} + +extension CaptureError: LocalizedError { + var errorDescription: String? { + switch self { + case .noDisplay: + return "No display available for capture." + case .noCaptureTarget: + return "No capture target configured." + case .noWindowForApp(let appName): + return "No window found for app: \(appName). Make sure Zed is running and a window is visible." + case .encoderCreationFailed: + return "Failed to create the hardware encoder." + case .networkError: + return "Network error while streaming." + } + } +} + +// MARK: - Extensions + +extension ZeroCPUCapturer { + func setOnFrame(_ handler: @escaping (CMSampleBuffer) -> Void) { + onFrame = handler + } + + func getCaptureSize() -> CGSize? { + captureSize + } +} + +private func normalizedCaptureSize(for size: CGSize) -> (width: Int, height: Int) { + let width = max(1, Int(size.width.rounded(.down))) + let height = max(1, Int(size.height.rounded(.down))) + // H.264 encoders typically require even dimensions. + return (width: width - (width % 2), height: height - (height % 2)) +} diff --git a/docs/how-flowglad-is-integrated.md b/docs/how-flowglad-is-integrated.md new file mode 100644 index 00000000..79f14aa0 --- /dev/null +++ b/docs/how-flowglad-is-integrated.md @@ -0,0 +1,330 @@ +# How Flowglad is Integrated + +This document explains the Flowglad billing integration in this codebase. + +## Overview + +Flowglad handles usage-based billing with metered subscriptions. Users can: +- Use the app for free with limited requests +- Subscribe to a paid plan for more usage +- Top up credits when needed + +## Architecture + +``` +┌─────────────────────────────────────────────────────────────────┐ +│ Frontend │ +│ ┌─────────────────┐ ┌─────────────────┐ │ +│ │ useBilling() │ │ Pricing UI │ │ +│ │ (Flowglad hook) │ │ (checkout) │ │ +│ └────────┬────────┘ └────────┬────────┘ │ +└───────────┼──────────────────────┼──────────────────────────────┘ + │ │ + ▼ ▼ +┌───────────────────────┐ ┌───────────────────────┐ +│ /api/flowglad/* │ │ /api/usage-events │ +│ (billing endpoints) │ │ (record usage) │ +└───────────┬───────────┘ └───────────┬───────────┘ + │ │ + ▼ ▼ +┌─────────────────────────────────────────────────────────────────┐ +│ Flowglad API │ +│ - Customer management │ +│ - Subscriptions │ +│ - Usage metering │ +│ - Checkout sessions │ +└─────────────────────────────────────────────────────────────────┘ +``` + +## File Structure + +``` +packages/web/src/ +├── lib/ +│ ├── flowglad.ts # FlowgladServer initialization +│ ├── billing.ts # Usage checking & recording logic +│ └── billing-helpers.ts # Utility functions for pricing/usage +└── routes/api/ + ├── flowglad/ + │ └── $.ts # Catch-all route for Flowglad API + └── usage-events.ts # Record usage events +``` + +## Core Files + +### 1. `lib/flowglad.ts` - Server Initialization + +Creates a FlowgladServer instance for a specific user: + +```typescript +import { FlowgladServer } from "@flowglad/server" +import { db } from "@/db/connection" +import { users } from "@/db/schema" +import { eq } from "drizzle-orm" + +export const flowglad = (customerExternalId: string) => { + const env = getEnv() + + if (!env.FLOWGLAD_SECRET_KEY) { + return null + } + + return new FlowgladServer({ + apiKey: env.FLOWGLAD_SECRET_KEY, + customerExternalId, // Maps to user.id + getCustomerDetails: async (externalId: string) => { + // Fetch user details from database + const user = await db().query.users.findFirst({ + where: eq(users.id, externalId), + }) + + if (!user) { + throw new Error(`User not found: ${externalId}`) + } + + return { + email: user.email, + name: user.name ?? undefined, + } + }, + }) +} +``` + +**Key Points:** +- Takes `customerExternalId` which is the user's ID from better-auth +- Fetches customer details (email, name) from database when needed +- Returns `null` if `FLOWGLAD_SECRET_KEY` is not configured + +### 2. `routes/api/flowglad/$.ts` - API Route Handler + +Proxies requests to Flowglad for billing operations: + +```typescript +export const Route = createFileRoute("/api/flowglad/$")({ + server: { + handlers: { + GET: async ({ request, params }) => { + const userId = await getUserId(request) + if (!userId) { + return json({ error: "Unauthorized" }, 401) + } + + const flowgladServer = flowglad(userId) + // ... handle request + }, + POST: async ({ request, params }) => { + // Same pattern for POST + }, + }, + }, +}) +``` + +**Endpoints available:** +- `GET /api/flowglad/billing` - Get user's billing info +- `POST /api/flowglad/checkout` - Create checkout session +- etc. + +### 3. `routes/api/usage-events.ts` - Record Usage + +Records usage events after AI requests: + +```typescript +// POST /api/usage-events +// Body: { usageMeterSlug: "ai_requests", amount: 1 } + +const usageEvent = await flowgladServer.createUsageEvent({ + subscriptionId: currentSubscription.id, + priceSlug: usagePrice.slug, + amount, + transactionId: finalTransactionId, // For idempotency +}) +``` + +### 4. `lib/billing.ts` - Usage Logic + +Implements billing business logic: + +```typescript +// Usage limits by tier +const GUEST_FREE_REQUESTS = 5 // No auth required +const AUTH_FREE_REQUESTS_DAILY = 20 // Authenticated, no subscription +const PAID_PLAN_REQUESTS = 1000 // Pro plan per billing period + +// Meter slug (must match Flowglad dashboard) +export const AI_REQUESTS_METER = "ai_requests" + +// Key functions: +export async function checkUsageAllowed(request: Request): Promise +export async function recordUsage(request: Request, amount?: number): Promise +export async function getBillingSummary(request: Request): Promise +``` + +### 5. `lib/billing-helpers.ts` - Utilities + +Helper functions for working with Flowglad data: + +```typescript +// Find usage price by meter slug +findUsagePriceByMeterSlug(usageMeterSlug, pricingModel) + +// Compute total usage credits from subscription +computeUsageTotal(usageMeterSlug, currentSubscription, pricingModel) + +// Check if plan is default/free +isDefaultPlanBySlug(pricingModel, priceSlug) +``` + +## Usage Flow + +### 1. User Makes AI Request + +```typescript +// In /api/chat/ai.ts +const usage = await checkUsageAllowed(request) +if (!usage.allowed) { + return new Response("Usage limit reached", { status: 429 }) +} + +// ... process AI request ... + +// Record usage after success +await recordUsage(request, 1) +``` + +### 2. Frontend Checks Billing + +```typescript +// Using Flowglad React hook +import { useBilling } from "@flowglad/react" // or @flowglad/nextjs + +function Dashboard() { + const billing = useBilling() + + if (!billing.loaded) return + + const usage = billing.checkUsageBalance("ai_requests") + const remaining = usage?.availableBalance ?? 0 + + return
Remaining: {remaining}
+} +``` + +### 3. User Upgrades + +```typescript +// Create checkout session +const handleUpgrade = async () => { + await billing.createCheckoutSession({ + priceSlug: "pro_monthly", + successUrl: `${window.location.origin}/`, + cancelUrl: window.location.href, + quantity: 1, + autoRedirect: true, + }) +} +``` + +## Flowglad Dashboard Setup + +### 1. Create Usage Meter + +In Flowglad dashboard, create a usage meter: +- **Slug**: `ai_requests` +- **Name**: "AI Requests" +- **Type**: Sum +- **Reset**: Per billing period + +### 2. Create Products & Prices + +**Free Plan (default):** +- Default: Yes +- Price: $0/month + +**Pro Plan:** +- Name: "Pro" +- Price slug: `pro_monthly` +- Amount: $7.99/month +- Add usage price for `ai_requests` meter with 1000 included credits + +### 3. Get API Key + +1. Go to Settings → API Keys +2. Create a secret key (starts with `sk_`) +3. Add to environment: + +```bash +# Local development +echo "FLOWGLAD_SECRET_KEY=sk_test_xxx" >> packages/web/.env + +# Production (Cloudflare) +wrangler secret put FLOWGLAD_SECRET_KEY +``` + +## Environment Variables + +```bash +# Required for Flowglad +FLOWGLAD_SECRET_KEY=sk_live_xxx # or sk_test_xxx for testing +``` + +## Testing Locally + +1. Set up Flowglad account and get test API key +2. Add to `.env`: + ``` + FLOWGLAD_SECRET_KEY=sk_test_xxx + ``` +3. Create products/prices in Flowglad dashboard +4. Run the app and test: + - Sign up → user becomes Flowglad customer + - Make AI requests → usage is tracked + - Hit limit → upgrade prompt shown + - Subscribe → checkout flow + - More requests → usage deducted from subscription + +## Debugging + +### Check if Flowglad is configured + +```bash +curl 'http://localhost:3000/api/flowglad/billing' -H 'Cookie: ...' + +# Error: "Flowglad not configured" → FLOWGLAD_SECRET_KEY not set +# Error: "Unauthorized" → User not logged in +# Success: Returns billing data with subscriptions, usage, etc. +``` + +### Check usage balance + +```typescript +const billing = await flowglad(userId).getBilling() +const usage = billing.checkUsageBalance("ai_requests") +console.log("Available:", usage?.availableBalance) +``` + +### Record test usage + +```bash +curl -X POST 'http://localhost:3000/api/usage-events' \ + -H 'Content-Type: application/json' \ + -H 'Cookie: ...' \ + -d '{"usageMeterSlug": "ai_requests", "amount": 1}' +``` + +## Common Issues + +### "Customer not found" + +The user doesn't have a Flowglad customer record yet. This is created automatically when: +- User first accesses billing endpoints +- `getCustomerDetails` is called + +### "No active subscription found" + +User needs to subscribe to a plan. Show them the pricing page. + +### "Usage price not found for meter" + +The `ai_requests` meter exists but no usage price is attached to it in your pricing model. Add a usage price in Flowglad dashboard. diff --git a/docs/production-setup.md b/docs/production-setup.md new file mode 100644 index 00000000..ce090ab2 --- /dev/null +++ b/docs/production-setup.md @@ -0,0 +1,87 @@ +# Production Setup (Cloudflare Workers) + +This app deploys two Workers: +- `@linsa/web`: SSR app + API routes + Electric proxy +- `@linsa/worker`: standalone API bound in `@linsa/web` as `WORKER_RPC` + +The stack expects Postgres over HTTP (Neon or a Postgres behind a Neon HTTP proxy) and ElectricSQL for sync. + +## Prerequisites +- Cloudflare account with Workers enabled and `wrangler` logged in +- Production Postgres reachable over HTTP (Neon recommended, or a Postgres behind a Neon HTTP proxy) +- Electric Cloud account (or self-hosted Electric instance) +- Domain for cookies (`APP_BASE_URL`) +- OpenRouter API key (optional, for AI responses) + +## 1) Database (Postgres) +- Create a Neon database (recommended) and copy the `postgresql://...neon.tech/...` connection string. +- Ensure logical replication (`wal_level=logical`) is enabled. Neon enables it by default; for other Postgres, enable it and allow replication access. +- Electric needs replication on these tables: `users`, `sessions`, `accounts`, `verifications`, `chat_threads`, `chat_messages`. +- If not using Neon, expose your Postgres through a Neon HTTP proxy; Cloudflare Workers cannot talk to raw TCP Postgres. + +## 2) Electric Cloud / Self-hosted Electric +1. Sign up at [Electric Cloud](https://electric-sql.com/product/cloud) or point to your own Electric instance. +2. Create a source connected to your Postgres. +3. Note: + - `ELECTRIC_URL` – Electric endpoint (shape API) + - `ELECTRIC_SOURCE_ID` and `ELECTRIC_SOURCE_SECRET` – only if Electric Cloud auth is enabled + +## 3) Cloudflare Worker configuration +- Optional: rename the `name` fields in `packages/worker/wrangler.jsonc` and `packages/web/wrangler.jsonc`. If you rename the worker, also update `services[0].service` in `packages/web/wrangler.jsonc` so the `WORKER_RPC` binding still points to the right script. +- Set secrets for `@linsa/web` (run inside `packages/web`): +```bash +cd packages/web + +wrangler secret put DATABASE_URL # Neon/Postgres HTTP URL +wrangler secret put BETTER_AUTH_SECRET # generate with: openssl rand -hex 32 +wrangler secret put ELECTRIC_URL # e.g., https://your-electric-host/v1/shape +wrangler secret put ELECTRIC_SOURCE_ID # only if Electric Cloud auth is on +wrangler secret put ELECTRIC_SOURCE_SECRET # only if Electric Cloud auth is on +wrangler secret put OPENROUTER_API_KEY # optional, for real AI replies +``` +- Set non-secret vars: +```bash +wrangler vars set APP_BASE_URL https://your-domain.com # exact origin for cookies +wrangler vars set OPENROUTER_MODEL anthropic/claude-sonnet-4 # optional override +``` +- Prefer `pnpm` wrappers if you want to stay in the monorepo context: +```bash +pnpm --filter @linsa/web exec wrangler whoami +``` +You can also run `f deploy-setup` from the repo root for an interactive secret setup. + +## 4) Deploy +From the repo root: +```bash +pnpm deploy:worker # deploy @linsa/worker +pnpm deploy:web # build + deploy @linsa/web +# or +pnpm deploy # deploy both +# Flow shortcut +f deploy +``` + +## 5) Verify +1. Open your production URL and confirm auth flows (sign up / sign in). +2. Create a chat thread/message; check Electric sync across two tabs. +3. Hit `/api/chat/ai` to confirm OpenRouter responses (or expect the demo reply when no key is set). +4. Tail logs if needed: `pnpm --filter @linsa/web exec wrangler tail`. + +## Environment Variables + +| Variable | Required | Description | +|----------|----------|-------------| +| `DATABASE_URL` | Yes | Postgres URL reachable over HTTP (Neon or Postgres behind Neon proxy) | +| `BETTER_AUTH_SECRET` | Yes | Secret for auth/session signing (32+ chars) | +| `ELECTRIC_URL` | Yes | Electric Cloud/self-host URL (shape endpoint) | +| `ELECTRIC_SOURCE_ID` | Conditional | Needed when Electric Cloud auth is enabled | +| `ELECTRIC_SOURCE_SECRET` | Conditional | Needed with `ELECTRIC_SOURCE_ID` | +| `APP_BASE_URL` | Yes | Production origin for cookies/CORS (e.g., https://app.example.com) | +| `OPENROUTER_API_KEY` | No | Enables real AI responses | +| `OPENROUTER_MODEL` | No | AI model id (default: `anthropic/claude-sonnet-4`) | + +## Troubleshooting +- Auth: `APP_BASE_URL` must match your deployed origin; rotate `BETTER_AUTH_SECRET` only when you intend to invalidate sessions. +- Database: use an HTTP-capable connection string; ensure logical replication is on and tables exist; allow Cloudflare egress to the DB host. +- Electric: confirm the source is healthy and credentials are set; verify `where` filters in logs if shapes look empty. +- AI chat: set `OPENROUTER_API_KEY`; without it you’ll see the demo reply instead of model output. diff --git a/eslint.config.js b/eslint.config.js new file mode 100644 index 00000000..e6a69a48 --- /dev/null +++ b/eslint.config.js @@ -0,0 +1,47 @@ +import js from "@eslint/js" +import tseslint from "@typescript-eslint/eslint-plugin" +import tsparser from "@typescript-eslint/parser" +import globals from "globals" + +export default [ + js.configs.recommended, + { + files: ["**/*.{js,mjs,cjs,ts,tsx}"], + languageOptions: { + parser: tsparser, + parserOptions: { + ecmaVersion: 2021, + sourceType: "module", + ecmaFeatures: { + jsx: true, + }, + }, + globals: { + ...globals.browser, + ...globals.node, + ...globals.es2021, + }, + }, + plugins: { + "@typescript-eslint": tseslint, + }, + rules: { + ...tseslint.configs.recommended.rules, + "@typescript-eslint/no-unused-vars": [ + "warn", + { argsIgnorePattern: "^_" }, + ], + "@typescript-eslint/no-explicit-any": "warn", + // Disable no-undef for TypeScript files as TypeScript handles this + "no-undef": "off", + }, + }, + { + ignores: [ + "**/node_modules/**", + "**/dist/**", + "**/.wrangler/**", + "**/build/**", + ], + }, +] diff --git a/flow.toml b/flow.toml new file mode 100644 index 00000000..36d90322 --- /dev/null +++ b/flow.toml @@ -0,0 +1,1489 @@ +version = 1 +name = "linsa" + +[deps] +node = "node" +pnpm = "pnpm" +docker = "docker" + +[[tasks]] +name = "setup" +interactive = true +command = """ +set -euo pipefail + +ROOT="$(pwd)" +WEB_DIR="$ROOT/packages/web" +ENV_FILE="$WEB_DIR/.env" +EXAMPLE_FILE="$WEB_DIR/.env.example" + +echo "=== Linsa Setup ===" +echo "" + +# 1. Create .env from template if needed +if [ ! -f "$ENV_FILE" ]; then + cp "$EXAMPLE_FILE" "$ENV_FILE" + echo "✓ Created $ENV_FILE from template" +else + echo "✓ $ENV_FILE exists" +fi + +# 2. Generate secrets and set defaults +node - <<'NODE' +const fs = require("fs") +const path = require("path") +const crypto = require("crypto") + +const envPath = path.join("packages", "web", ".env") +let text = fs.readFileSync(envPath, "utf8") + +const ensureKey = (key, value, shouldReplace = () => false) => { + const pattern = new RegExp(`^${key}=.*$`, "m") + if (pattern.test(text)) { + const current = text.match(pattern)[0].split("=")[1] + if (current.trim() === "" || shouldReplace(current.trim())) { + text = text.replace(pattern, `${key}=${value}`) + console.log(` Set ${key}`) + } + } else { + text += `\n${key}=${value}\n` + console.log(` Added ${key}`) + } +} + +ensureKey( + "BETTER_AUTH_SECRET", + crypto.randomBytes(32).toString("hex"), + (current) => current === "your-strong-secret-at-least-32-chars" +) +ensureKey("APP_BASE_URL", "http://localhost:5613") + +fs.writeFileSync(envPath, text) +NODE + +# 3. Install dependencies +echo "" +echo "Installing dependencies..." +pnpm install + +# 4. Check DATABASE_URL +echo "" +DATABASE_URL=$(grep -E "^DATABASE_URL=" "$ENV_FILE" 2>/dev/null | cut -d'=' -f2- || true) + +if [ -z "$DATABASE_URL" ] || [ "$DATABASE_URL" = "" ] || [[ "$DATABASE_URL" == *"user:password"* ]]; then + echo "=== Database Setup ===" + echo "" + echo "You need a Neon Postgres database." + echo "Get your connection string from: https://console.neon.tech" + echo "" + read -p "Paste your Neon DATABASE_URL (or press Enter to skip): " NEW_DB_URL + + if [ -n "$NEW_DB_URL" ]; then + # Update .env with the new DATABASE_URL + if grep -q "^DATABASE_URL=" "$ENV_FILE"; then + sed -i '' "s|^DATABASE_URL=.*|DATABASE_URL=$NEW_DB_URL|" "$ENV_FILE" + else + echo "DATABASE_URL=$NEW_DB_URL" >> "$ENV_FILE" + fi + DATABASE_URL="$NEW_DB_URL" + echo "✓ DATABASE_URL saved" + fi +fi + +# 5. Push schema to database if DATABASE_URL is set +if [ -n "$DATABASE_URL" ] && [ "$DATABASE_URL" != "" ] && [[ "$DATABASE_URL" != *"user:password"* ]]; then + echo "" + echo "Pushing schema to database..." + cd "$WEB_DIR" + pnpm drizzle-kit push --force 2>&1 | tail -5 + echo "✓ Database schema ready" + cd "$ROOT" +fi + +# 6. Summary +echo "" +echo "=== Setup Complete ===" +echo "" + +# Check what's configured +DB_SET=$(grep -E "^DATABASE_URL=.+" "$ENV_FILE" 2>/dev/null | grep -v "DATABASE_URL=$" | grep -v "user:password" | wc -l | tr -d ' ') +AI_SET=$(grep -E "^OPENROUTER_API_KEY=.+" "$ENV_FILE" 2>/dev/null | grep -v "OPENROUTER_API_KEY=$" | wc -l | tr -d ' ') + +if [ "$DB_SET" = "1" ]; then + echo "✓ Database: Connected" +else + echo "○ Database: Not configured (add DATABASE_URL to packages/web/.env)" +fi + +if [ "$AI_SET" = "1" ]; then + echo "✓ AI Chat: Configured" +else + echo "○ AI Chat: Not configured (add OPENROUTER_API_KEY for AI responses)" +fi + +echo "" +echo "Run 'f dev' to start the web server on http://localhost:5613" +""" +description = "Set up Linsa: create .env, install deps, push schema to Neon." +dependencies = ["node", "pnpm"] +shortcuts = ["s"] + +[[tasks]] +name = "seed" +command = """ +set -euo pipefail + +ROOT="$(pwd)" +WEB_DIR="$ROOT/packages/web" +ENV_FILE="$WEB_DIR/.env" + +if [ ! -f "$ENV_FILE" ]; then + echo "Missing $ENV_FILE. Run 'f setup' first." + exit 1 +fi + +set -a +. "$ENV_FILE" +set +a + +if [ -z "${DATABASE_URL:-}" ] || [[ "$DATABASE_URL" == "postgresql://user:password@host:5432/dbname" ]]; then + echo "DATABASE_URL is not set or still placeholder in $ENV_FILE" + exit 1 +fi + +pnpm --filter @linsa/web install --silent --ignore-scripts +pnpm --filter @linsa/web run seed +""" +description = "Seed the database with demo user/chat data (requires DATABASE_URL set)." +dependencies = ["node", "pnpm"] + +[[tasks]] +name = "migrate-db" +command = """ +set -euo pipefail + +ROOT="$(pwd)" +WEB_DIR="$ROOT/packages/web" +ENV_FILE="$WEB_DIR/.env" + +if [ ! -f "$ENV_FILE" ]; then + echo "Missing $ENV_FILE. Run 'f setup' first." + exit 1 +fi + +set -a +. "$ENV_FILE" +set +a + +if [ -z "${DATABASE_URL:-}" ] || [[ "$DATABASE_URL" == "postgresql://user:password@host:5432/dbname" ]]; then + echo "DATABASE_URL is not set (or still placeholder) in $ENV_FILE" + exit 1 +fi + +cd "$WEB_DIR" +pnpm --filter @linsa/web install --silent --ignore-scripts + +# Use drizzle-kit push for local dev (syncs schema directly, no migration history) +# This is safer for local dev as it handles existing tables gracefully +echo "Pushing schema to database..." +pnpm drizzle-kit push --force + +echo "✓ Database schema synced" +""" +description = "Sync Drizzle schema to local database (uses push for dev, handles existing tables)." +dependencies = ["node", "pnpm"] +shortcuts = ["migrate", "m"] + +[[tasks]] +name = "fix-context-tables" +command = """ +set -euo pipefail + +ROOT="$(pwd)" +WEB_DIR="$ROOT/packages/web" +ENV_FILE="$WEB_DIR/.env" + +if [ ! -f "$ENV_FILE" ]; then + echo "Missing $ENV_FILE. Run 'f setup' first." + exit 1 +fi + +set -a +. "$ENV_FILE" +set +a + +if [ -z "${DATABASE_URL:-}" ] || [[ "$DATABASE_URL" == "postgresql://user:password@host:5432/dbname" ]]; then + echo "DATABASE_URL is not set (or still placeholder) in $ENV_FILE" + exit 1 +fi + +cd "$WEB_DIR" +echo "Ensuring context tables exist in the target database..." +pnpm --filter @linsa/web install --silent --ignore-scripts +DATABASE_URL="$DATABASE_URL" pnpm tsx scripts/push-schema.ts + +echo "✓ context_items and thread_context_items tables ensured" +""" +description = "Create/repair context_items and thread_context_items tables using push-schema." +dependencies = ["node", "pnpm"] +shortcuts = ["fctx"] + +[[tasks]] +name = "dev" +command = """ +# Kill any process on port 5613 before starting +lsof -ti:5613 | xargs kill -9 2>/dev/null || true +pnpm --filter @linsa/web run dev +""" +description = "Start the web dev server on port 5613." +dependencies = ["node", "pnpm"] +shortcuts = ["d"] + +[[tasks]] +name = "deploy" +command = """ +set -euo pipefail + +echo "=== Production Deployment ===" +echo "" +echo "This will deploy to Cloudflare Workers." +echo "Make sure you have configured secrets first (see docs/production-setup.md)" +echo "" + +# Check if wrangler is logged in +if ! pnpm --filter @linsa/web exec wrangler whoami >/dev/null 2>&1; then + echo "Not logged in to Cloudflare. Running wrangler login..." + pnpm --filter @linsa/web exec wrangler login +fi + +echo "" +echo "Deploying worker..." +pnpm deploy:worker + +echo "" +echo "Deploying web..." +pnpm deploy:web + +echo "" +echo "=== Deployment Complete ===" +""" +description = "Deploy both worker and web to Cloudflare Workers." +dependencies = ["node", "pnpm"] +shortcuts = ["p"] + +[[tasks]] +name = "deploy-setup" +interactive = true +command = """ +set -euo pipefail + +echo "=== Production Secrets Setup ===" +echo "" +echo "This will configure Cloudflare Workers secrets for production." +echo "You'll need:" +echo " - Neon PostgreSQL DATABASE_URL" +echo " - BETTER_AUTH_SECRET (will generate if empty)" +echo " - OpenRouter API key (optional)" +echo "" + +cd packages/web + +# Check if wrangler is logged in +if ! pnpm exec wrangler whoami >/dev/null 2>&1; then + echo "Not logged in to Cloudflare. Running wrangler login..." + pnpm exec wrangler login +fi + +echo "" +read -p "Enter your Neon PostgreSQL DATABASE_URL: " DATABASE_URL +if [ -n "$DATABASE_URL" ]; then + echo "$DATABASE_URL" | pnpm exec wrangler secret put DATABASE_URL + echo "✓ DATABASE_URL set" +fi + +echo "" +read -p "Enter BETTER_AUTH_SECRET (leave empty to generate): " BETTER_AUTH_SECRET +if [ -z "$BETTER_AUTH_SECRET" ]; then + BETTER_AUTH_SECRET=$(openssl rand -hex 32) + echo "Generated: $BETTER_AUTH_SECRET" +fi +echo "$BETTER_AUTH_SECRET" | pnpm exec wrangler secret put BETTER_AUTH_SECRET +echo "✓ BETTER_AUTH_SECRET set" + +echo "" +read -p "Enter your production APP_BASE_URL (e.g., https://app.example.com): " APP_BASE_URL +if [ -n "$APP_BASE_URL" ]; then + pnpm exec wrangler vars put APP_BASE_URL "$APP_BASE_URL" + echo "✓ APP_BASE_URL set" +fi + +echo "" +read -p "Enter ELECTRIC_URL: " ELECTRIC_URL +if [ -n "$ELECTRIC_URL" ]; then + echo "$ELECTRIC_URL" | pnpm exec wrangler secret put ELECTRIC_URL + echo "✓ ELECTRIC_URL set" +fi + +echo "" +read -p "Enter ELECTRIC_SOURCE_ID (leave empty if not using Electric Cloud): " ELECTRIC_SOURCE_ID +if [ -n "$ELECTRIC_SOURCE_ID" ]; then + echo "$ELECTRIC_SOURCE_ID" | pnpm exec wrangler secret put ELECTRIC_SOURCE_ID + echo "✓ ELECTRIC_SOURCE_ID set" +fi + +echo "" +read -p "Enter ELECTRIC_SOURCE_SECRET (leave empty if not using Electric Cloud): " ELECTRIC_SOURCE_SECRET +if [ -n "$ELECTRIC_SOURCE_SECRET" ]; then + echo "$ELECTRIC_SOURCE_SECRET" | pnpm exec wrangler secret put ELECTRIC_SOURCE_SECRET + echo "✓ ELECTRIC_SOURCE_SECRET set" +fi + +echo "" +read -p "Enter OPENROUTER_API_KEY (leave empty to skip): " OPENROUTER_API_KEY +if [ -n "$OPENROUTER_API_KEY" ]; then + echo "$OPENROUTER_API_KEY" | pnpm exec wrangler secret put OPENROUTER_API_KEY + echo "✓ OPENROUTER_API_KEY set" +fi + +echo "" +read -p "Enter RESEND_API_KEY (leave empty to skip): " RESEND_API_KEY +if [ -n "$RESEND_API_KEY" ]; then + echo "$RESEND_API_KEY" | pnpm exec wrangler secret put RESEND_API_KEY + echo "✓ RESEND_API_KEY set" +fi + +echo "" +read -p "Enter RESEND_FROM_EMAIL (e.g., noreply@yourdomain.com): " RESEND_FROM_EMAIL +if [ -n "$RESEND_FROM_EMAIL" ]; then + echo "$RESEND_FROM_EMAIL" | pnpm exec wrangler secret put RESEND_FROM_EMAIL + echo "✓ RESEND_FROM_EMAIL set" +fi + +echo "" +echo "=== Setup Complete ===" +echo "" +echo "Run 'f deploy' to deploy to production." +""" +description = "Interactive setup for Cloudflare Workers production secrets." +dependencies = ["node", "pnpm"] +shortcuts = ["ds"] + +[[tasks]] +name = "local-services" +command = """ +set -euo pipefail + +echo "Starting local services via docker-compose..." + +cd packages/web +docker compose up -d + +# Wait for postgres to be healthy +echo "Waiting for Postgres to be ready..." +READY=0 +for i in $(seq 1 30); do + STATUS=$(docker inspect -f '{{.State.Health.Status}}' linsa-postgres 2>/dev/null || echo "unknown") + if [ "$STATUS" = "healthy" ]; then + READY=1 + break + fi + sleep 1 +done + +if [ "$READY" -ne 1 ]; then + echo "⚠ Postgres not ready. Check 'docker logs linsa-postgres'" + exit 1 +fi + +# Create tables if they don't exist +docker compose exec -T postgres psql -U postgres -d electric <<'SQL' +-- Better-auth tables (camelCase columns) +CREATE TABLE IF NOT EXISTS users ( + id text PRIMARY KEY, + name text NOT NULL, + email text NOT NULL UNIQUE, + "emailVerified" boolean NOT NULL DEFAULT false, + image text, + "createdAt" timestamp NOT NULL DEFAULT now(), + "updatedAt" timestamp NOT NULL DEFAULT now() +); +CREATE TABLE IF NOT EXISTS sessions ( + id text PRIMARY KEY, + "expiresAt" timestamp NOT NULL, + token text NOT NULL UNIQUE, + "createdAt" timestamp NOT NULL, + "updatedAt" timestamp NOT NULL, + "ipAddress" text, + "userAgent" text, + "userId" text NOT NULL REFERENCES users(id) ON DELETE CASCADE +); +CREATE TABLE IF NOT EXISTS accounts ( + id text PRIMARY KEY, + "accountId" text NOT NULL, + "providerId" text NOT NULL, + "userId" text NOT NULL REFERENCES users(id) ON DELETE CASCADE, + "accessToken" text, + "refreshToken" text, + "idToken" text, + "accessTokenExpiresAt" timestamp, + "refreshTokenExpiresAt" timestamp, + scope text, + password text, + "createdAt" timestamp NOT NULL, + "updatedAt" timestamp NOT NULL +); +CREATE TABLE IF NOT EXISTS verifications ( + id text PRIMARY KEY, + identifier text NOT NULL, + value text NOT NULL, + "expiresAt" timestamp NOT NULL, + "createdAt" timestamp DEFAULT now(), + "updatedAt" timestamp DEFAULT now() +); +-- App tables (snake_case for Electric sync) +CREATE TABLE IF NOT EXISTS chat_threads ( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + title text NOT NULL, + user_id text NOT NULL, + created_at timestamptz NOT NULL DEFAULT now() +); +CREATE TABLE IF NOT EXISTS chat_messages ( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + thread_id integer NOT NULL REFERENCES chat_threads(id) ON DELETE CASCADE, + role varchar(32) NOT NULL, + content text NOT NULL, + created_at timestamptz NOT NULL DEFAULT now() +); +SQL +echo "✓ Database tables ready" + +echo "" +echo "Local services ready:" +echo " - Postgres: postgresql://postgres:password@db.localtest.me:5433/electric" +echo " - Neon HTTP Proxy: http://localhost:4444" +echo " - Electric: http://localhost:3100" +echo "" +echo "Run 'f dev' to start the web server." +""" +description = "Start local Postgres, Neon proxy, and Electric services for development." +dependencies = ["docker"] +shortcuts = ["ls"] + +[[tasks]] +name = "stop-services" +command = """ +echo "Stopping local services..." +cd packages/web +docker compose down +echo "✓ Services stopped" +""" +description = "Stop local Postgres, Neon proxy, and Electric services." +dependencies = ["docker"] +shortcuts = ["ss"] + +[[tasks]] +name = "reset-db" +command = """ +set -euo pipefail + +echo "Resetting local database volumes (Postgres + Electric)..." +cd packages/web +docker compose down -v +docker compose up -d + +echo "" +echo "DB reset complete. Reapply schema with 'pnpm --filter @linsa/web run migrate' or run 'f reset-setup' to recreate + seed." +""" +description = "Drop docker-compose volumes and restart for a clean database." +dependencies = ["docker"] +shortcuts = ["rdb"] + +[[tasks]] +name = "reset-setup" +command = """ +set -euo pipefail + +ROOT="$(pwd)" +WEB_DIR="$ROOT/packages/web" +ENV_FILE="$WEB_DIR/.env" +EXAMPLE_FILE="$WEB_DIR/.env.example" + +echo "⚙️ Resetting local stack (db + auth schema + seed)..." + +# Ensure env file exists +if [ ! -f "$ENV_FILE" ]; then + if [ -f "$EXAMPLE_FILE" ]; then + cp "$EXAMPLE_FILE" "$ENV_FILE" + echo "Created $ENV_FILE from template." + else + echo "Missing $ENV_FILE and $EXAMPLE_FILE; run 'f setup' first." + exit 1 + fi +fi + +set -a +. "$ENV_FILE" +set +a + +if [ -z "${DATABASE_URL:-}" ]; then + echo "DATABASE_URL is not set in $ENV_FILE. Fix and rerun." + exit 1 +fi + +cd "$WEB_DIR" + +echo "⏹️ Stopping and clearing local services..." +docker compose down -v + +echo "⏫ Starting clean services..." +docker compose up -d + +echo "⌛ Waiting for Postgres to be ready..." +READY=0 +for i in $(seq 1 90); do + STATUS=$(docker inspect -f '{{.State.Health.Status}}' linsa-postgres 2>/dev/null || echo "unknown") + if [ "$STATUS" = "healthy" ]; then + READY=1 + break + fi + printf "." + sleep 1 +done +echo "" +if [ "$READY" -ne 1 ]; then + echo "Postgres did not become ready in time. Last status: $STATUS" + docker compose logs --tail=50 postgres || true + echo "You can also run: docker compose exec -T postgres pg_isready -U postgres -h localhost" + echo "Check container logs: docker compose logs postgres" + exit 1 +fi +echo "✓ Postgres ready" + +echo "🔄 Recreating auth and app tables..." +docker compose exec -T postgres psql -U postgres -d electric <<'SQL' +DROP TABLE IF EXISTS chat_messages CASCADE; +DROP TABLE IF EXISTS chat_threads CASCADE; +DROP TABLE IF EXISTS verifications CASCADE; +DROP TABLE IF EXISTS accounts CASCADE; +DROP TABLE IF EXISTS sessions CASCADE; +DROP TABLE IF EXISTS users CASCADE; + +CREATE TABLE users ( + id text PRIMARY KEY, + name text NOT NULL, + email text NOT NULL UNIQUE, + "emailVerified" boolean NOT NULL DEFAULT false, + image text, + "createdAt" timestamp NOT NULL DEFAULT now(), + "updatedAt" timestamp NOT NULL DEFAULT now() +); +CREATE TABLE sessions ( + id text PRIMARY KEY, + "expiresAt" timestamp NOT NULL, + token text NOT NULL UNIQUE, + "createdAt" timestamp NOT NULL, + "updatedAt" timestamp NOT NULL, + "ipAddress" text, + "userAgent" text, + "userId" text NOT NULL REFERENCES users(id) ON DELETE CASCADE +); +CREATE TABLE accounts ( + id text PRIMARY KEY, + "accountId" text NOT NULL, + "providerId" text NOT NULL, + "userId" text NOT NULL REFERENCES users(id) ON DELETE CASCADE, + "accessToken" text, + "refreshToken" text, + "idToken" text, + "accessTokenExpiresAt" timestamp, + "refreshTokenExpiresAt" timestamp, + scope text, + password text, + "createdAt" timestamp NOT NULL, + "updatedAt" timestamp NOT NULL +); +CREATE TABLE verifications ( + id text PRIMARY KEY, + identifier text NOT NULL, + value text NOT NULL, + "expiresAt" timestamp NOT NULL, + "createdAt" timestamp NOT NULL DEFAULT now(), + "updatedAt" timestamp NOT NULL DEFAULT now() +); +CREATE TABLE chat_threads ( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + title text NOT NULL, + user_id text NOT NULL, + created_at timestamptz NOT NULL DEFAULT now() +); +CREATE TABLE chat_messages ( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + thread_id integer NOT NULL REFERENCES chat_threads(id) ON DELETE CASCADE, + role varchar(32) NOT NULL, + content text NOT NULL, + created_at timestamptz NOT NULL DEFAULT now() +); +SQL + +echo "📦 Installing deps..." +pnpm --filter @linsa/web install --silent --ignore-scripts + +echo "🌱 Seeding demo user and chat..." +pnpm --filter @linsa/web run seed + +echo "" +echo "✅ Reset complete. Start dev server with: f dev" +""" +description = "Hard reset local dev stack: recreate DB schema, reseed, and restart services." +dependencies = ["docker", "node", "pnpm"] +shortcuts = ["rs"] + +[[tasks]] +name = "prep-deploy" +command = """ +set -euo pipefail + +echo "=== Pre-Deployment Checklist ===" +echo "" + +ERRORS=0 +WARNINGS=0 + +# 1. Check for uncommitted changes +echo "Checking git status..." +if [ -n "$(git status --porcelain)" ]; then + echo "⚠️ Warning: You have uncommitted changes" + git status --short + WARNINGS=$((WARNINGS + 1)) +else + echo "✓ Working directory clean" +fi + +# 2. Check TypeScript compilation (warning only - build may still work) +echo "" +echo "Checking TypeScript..." +cd packages/web +if pnpm tsc --noEmit 2>&1; then + echo "✓ TypeScript compiles without errors" +else + echo "⚠️ TypeScript errors found (build may still work)" + WARNINGS=$((WARNINGS + 1)) +fi + +# 3. Check ESLint +echo "" +echo "Checking ESLint..." +if pnpm lint 2>&1; then + echo "✓ No lint errors" +else + echo "⚠️ Lint errors found (run 'pnpm lint:fix' to auto-fix)" + WARNINGS=$((WARNINGS + 1)) +fi + +# 4. Check if wrangler is logged in +echo "" +echo "Checking Cloudflare authentication..." +if pnpm exec wrangler whoami >/dev/null 2>&1; then + ACCOUNT=$(pnpm exec wrangler whoami 2>&1 | grep -oE '[a-f0-9]{32}' | head -1 || echo "authenticated") + echo "✓ Logged into Cloudflare" +else + echo "✗ Not logged into Cloudflare (run 'pnpm exec wrangler login')" + ERRORS=$((ERRORS + 1)) +fi + +# 5. Check required secrets are configured +echo "" +echo "Checking Cloudflare secrets..." +SECRETS_OUTPUT=$(pnpm exec wrangler secret list 2>&1 || echo "") + +check_secret() { + if echo "$SECRETS_OUTPUT" | grep -q "$1"; then + echo " ✓ $1 is set" + else + echo " ✗ $1 is NOT set" + ERRORS=$((ERRORS + 1)) + fi +} + +check_secret "DATABASE_URL" +check_secret "BETTER_AUTH_SECRET" +check_secret "ELECTRIC_URL" + +# Optional secrets (warnings only) +check_optional_secret() { + if echo "$SECRETS_OUTPUT" | grep -q "$1"; then + echo " ✓ $1 is set" + else + echo " ⚠️ $1 is not set (optional)" + fi +} + +check_optional_secret "OPENROUTER_API_KEY" +check_optional_secret "RESEND_API_KEY" + +# 6. Check build works +echo "" +echo "Testing build..." +cd .. +if pnpm --filter @linsa/web build 2>&1; then + echo "✓ Build successful" +else + echo "✗ Build failed" + ERRORS=$((ERRORS + 1)) +fi + +# Summary +echo "" +echo "=== Summary ===" +if [ $ERRORS -gt 0 ]; then + echo "✗ $ERRORS error(s) found - fix before deploying" + exit 1 +elif [ $WARNINGS -gt 0 ]; then + echo "⚠️ $WARNINGS warning(s) found - review before deploying" + echo "" + echo "Ready to deploy with warnings. Run 'f deploy' to proceed." +else + echo "✓ All checks passed!" + echo "" + echo "Ready to deploy. Run 'f deploy' to proceed." +fi +""" +description = "Pre-deployment checks: TypeScript, lint, secrets, and build verification." +dependencies = ["node", "pnpm"] +shortcuts = ["pd"] + +[[tasks]] +name = "migrate-prod" +interactive = true +command = """ +set -euo pipefail + +echo "=== Production Database Migration ===" +echo "" +echo "⚠️ WARNING: This will modify the PRODUCTION database!" +echo "" + +read -p "Enter your Neon DATABASE_URL: " PROD_DATABASE_URL +if [ -z "$PROD_DATABASE_URL" ]; then + echo "No DATABASE_URL provided. Aborting." + exit 1 +fi + +# Validate URL format +if [[ ! "$PROD_DATABASE_URL" =~ ^postgresql:// ]]; then + echo "Invalid DATABASE_URL format. Must start with 'postgresql://'" + exit 1 +fi + +echo "" +read -p "Are you sure you want to migrate the production database? (yes/no): " CONFIRM +if [ "$CONFIRM" != "yes" ]; then + echo "Aborted." + exit 1 +fi + +cd packages/web + +echo "" +echo "Pushing schema to production database..." +DATABASE_URL="$PROD_DATABASE_URL" pnpm drizzle-kit push --force + +echo "" +echo "✓ Production database schema synced" +echo "" +echo "Note: If this is your first deploy, you may also need to:" +echo " 1. Set up Electric sync for the new tables" +echo " 2. Configure ELECTRIC_SOURCE_ID and ELECTRIC_SOURCE_SECRET" +""" +description = "Push Drizzle schema to production Neon database." +dependencies = ["node", "pnpm"] +shortcuts = ["mp"] + +[[tasks]] +name = "prod-setup" +interactive = true +command = """ +set -euo pipefail + +echo "=== Full Production Setup ===" +echo "" +echo "This will:" +echo " 1. Check Cloudflare authentication" +echo " 2. Create Hyperdrive for database connection pooling" +echo " 3. Set all required secrets (skipping already-set ones)" +echo " 4. Migrate the production database" +echo " 5. Verify everything is ready" +echo "" + +cd packages/web + +# 1. Check/setup Cloudflare auth +echo "Step 1: Cloudflare Authentication" +if ! pnpm exec wrangler whoami >/dev/null 2>&1; then + echo "Not logged into Cloudflare. Logging in..." + pnpm exec wrangler login +fi +echo "✓ Authenticated with Cloudflare" + +# Get existing secrets to check what's already set +echo "" +echo "Checking existing secrets..." +SECRETS_OUTPUT=$(pnpm exec wrangler secret list 2>&1 || echo "") + +is_secret_set() { + echo "$SECRETS_OUTPUT" | grep -q "$1" +} + +# 2. Setup Hyperdrive +echo "" +echo "Step 2: Hyperdrive Setup" +echo "" + +# Check if Hyperdrive ID is already configured in wrangler.jsonc +CURRENT_HYPERDRIVE_ID=$(grep -o '"id": *"[^"]*"' wrangler.jsonc 2>/dev/null | grep -o '"[^"]*"$' | tr -d '"' | head -1 || echo "") + +if [ "$CURRENT_HYPERDRIVE_ID" = "YOUR_HYPERDRIVE_ID" ] || [ -z "$CURRENT_HYPERDRIVE_ID" ]; then + echo "Hyperdrive not configured yet." + echo "" + read -p "Enter your PostgreSQL DATABASE_URL for Hyperdrive: " DATABASE_URL + if [ -n "$DATABASE_URL" ]; then + echo "" + echo "Creating Hyperdrive config 'prod-db'..." + HYPERDRIVE_OUTPUT=$(pnpm exec wrangler hyperdrive create prod-db --connection-string="$DATABASE_URL" 2>&1 || echo "") + + # Extract the ID from output + HYPERDRIVE_ID=$(echo "$HYPERDRIVE_OUTPUT" | grep -oE '[a-f0-9]{32}' | head -1 || echo "") + + if [ -n "$HYPERDRIVE_ID" ]; then + echo "✓ Hyperdrive created with ID: $HYPERDRIVE_ID" + echo "" + echo "Updating wrangler.jsonc with Hyperdrive ID..." + sed -i '' "s/YOUR_HYPERDRIVE_ID/$HYPERDRIVE_ID/g" wrangler.jsonc + echo "✓ wrangler.jsonc updated" + else + # Hyperdrive might already exist, try to get the ID + echo "Hyperdrive may already exist. Listing existing configs..." + pnpm exec wrangler hyperdrive list 2>&1 || true + echo "" + read -p "Enter the Hyperdrive ID to use: " HYPERDRIVE_ID + if [ -n "$HYPERDRIVE_ID" ]; then + sed -i '' "s/YOUR_HYPERDRIVE_ID/$HYPERDRIVE_ID/g" wrangler.jsonc + echo "✓ wrangler.jsonc updated with ID: $HYPERDRIVE_ID" + fi + fi + else + echo "⚠️ DATABASE_URL not provided. Hyperdrive setup skipped." + echo " You'll need to manually create Hyperdrive and update wrangler.jsonc" + fi +else + echo "✓ Hyperdrive already configured with ID: $CURRENT_HYPERDRIVE_ID" +fi + +# 3. Set secrets (skip if already set) +echo "" +echo "Step 3: Configure Secrets" +echo "" + +# BETTER_AUTH_SECRET +if is_secret_set "BETTER_AUTH_SECRET"; then + echo "✓ BETTER_AUTH_SECRET already set (skipping)" +else + read -p "Enter BETTER_AUTH_SECRET (leave empty to generate): " BETTER_AUTH_SECRET + if [ -z "$BETTER_AUTH_SECRET" ]; then + BETTER_AUTH_SECRET=$(openssl rand -hex 32) + echo "Generated new secret" + fi + echo "$BETTER_AUTH_SECRET" | pnpm exec wrangler secret put BETTER_AUTH_SECRET + echo "✓ BETTER_AUTH_SECRET set" +fi + +# ELECTRIC_URL +echo "" +if is_secret_set "ELECTRIC_URL"; then + echo "✓ ELECTRIC_URL already set (skipping)" +else + read -p "Enter ELECTRIC_URL: " ELECTRIC_URL + if [ -n "$ELECTRIC_URL" ]; then + echo "$ELECTRIC_URL" | pnpm exec wrangler secret put ELECTRIC_URL + echo "✓ ELECTRIC_URL set" + else + echo "⚠️ ELECTRIC_URL skipped (required for real-time sync)" + fi +fi + +# OPENROUTER_API_KEY +echo "" +if is_secret_set "OPENROUTER_API_KEY"; then + echo "✓ OPENROUTER_API_KEY already set (skipping)" +else + read -p "Enter OPENROUTER_API_KEY (leave empty to skip): " OPENROUTER_API_KEY + if [ -n "$OPENROUTER_API_KEY" ]; then + echo "$OPENROUTER_API_KEY" | pnpm exec wrangler secret put OPENROUTER_API_KEY + echo "✓ OPENROUTER_API_KEY set" + else + echo "⚠️ OPENROUTER_API_KEY skipped (AI chat will use demo mode)" + fi +fi + +# RESEND_API_KEY and RESEND_FROM_EMAIL +echo "" +if is_secret_set "RESEND_API_KEY"; then + echo "✓ RESEND_API_KEY already set (skipping)" +else + read -p "Enter RESEND_API_KEY (leave empty to skip): " RESEND_API_KEY + if [ -n "$RESEND_API_KEY" ]; then + echo "$RESEND_API_KEY" | pnpm exec wrangler secret put RESEND_API_KEY + echo "✓ RESEND_API_KEY set" + + if ! is_secret_set "RESEND_FROM_EMAIL"; then + read -p "Enter RESEND_FROM_EMAIL (e.g., noreply@yourdomain.com): " RESEND_FROM_EMAIL + if [ -n "$RESEND_FROM_EMAIL" ]; then + echo "$RESEND_FROM_EMAIL" | pnpm exec wrangler secret put RESEND_FROM_EMAIL + echo "✓ RESEND_FROM_EMAIL set" + fi + fi + else + echo "⚠️ RESEND_API_KEY skipped (OTP codes will only work in dev mode)" + fi +fi + +# APP_BASE_URL +echo "" +if is_secret_set "APP_BASE_URL"; then + echo "✓ APP_BASE_URL already set (skipping)" +else + read -p "Enter APP_BASE_URL (e.g., https://your-app.workers.dev): " APP_BASE_URL + if [ -n "$APP_BASE_URL" ]; then + pnpm exec wrangler vars set APP_BASE_URL "$APP_BASE_URL" 2>/dev/null || echo "$APP_BASE_URL" | pnpm exec wrangler secret put APP_BASE_URL + echo "✓ APP_BASE_URL set" + fi +fi + +# 4. Migrate production database +echo "" +echo "Step 4: Database Migration" +if [ -n "${DATABASE_URL:-}" ]; then + echo "" + read -p "Migrate production database now? (yes/no): " MIGRATE + if [ "$MIGRATE" = "yes" ]; then + echo "Pushing schema to production..." + DATABASE_URL="$DATABASE_URL" pnpm drizzle-kit push --force + echo "✓ Database schema synced" + else + echo "Skipped migration. Run 'f migrate-prod' later." + fi +else + echo "Skipped - no DATABASE_URL available" + echo "Run 'f migrate-prod' to migrate after setting up Hyperdrive" +fi + +# 5. Verify +echo "" +echo "Step 5: Verification" +echo "" +SECRETS_OUTPUT=$(pnpm exec wrangler secret list 2>&1 || echo "") + +check_secret() { + if echo "$SECRETS_OUTPUT" | grep -q "$1"; then + echo " ✓ $1" + else + echo " ✗ $1 (missing)" + fi +} + +echo "Required:" +check_secret "BETTER_AUTH_SECRET" +check_secret "ELECTRIC_URL" + +echo "" +echo "Optional:" +check_secret "OPENROUTER_API_KEY" +check_secret "RESEND_API_KEY" +check_secret "RESEND_FROM_EMAIL" +check_secret "APP_BASE_URL" + +echo "" +echo "Hyperdrive:" +CURRENT_ID=$(grep -o '"id": *"[^"]*"' wrangler.jsonc 2>/dev/null | grep -o '"[^"]*"$' | tr -d '"' | head -1 || echo "") +if [ -n "$CURRENT_ID" ] && [ "$CURRENT_ID" != "YOUR_HYPERDRIVE_ID" ]; then + echo " ✓ Configured with ID: $CURRENT_ID" +else + echo " ✗ Not configured (update wrangler.jsonc)" +fi + +echo "" +echo "=== Setup Complete ===" +echo "" +echo "Next: Run 'f prep-deploy' to verify, then 'f deploy' to deploy." +""" +description = "Complete production setup: Cloudflare auth, Hyperdrive, secrets, and database migration." +dependencies = ["node", "pnpm"] +shortcuts = ["ps", "prod"] + +[[tasks]] +name = "db-gui" +command = "open 'postgresql://postgres:password@localhost:5432/electric'" +description = "Open local database in TablePlus or default Postgres GUI" +shortcuts = ["gui"] + +[[tasks]] +name = "db-gui-prod" +command = """ +set -euo pipefail + +# Read from PROD_DATABASE_URL env var or .env file +if [ -z "${PROD_DATABASE_URL:-}" ]; then + if [ -f packages/web/.env ]; then + PROD_DATABASE_URL=$(grep "^PROD_DATABASE_URL=" packages/web/.env | cut -d'=' -f2-) + fi +fi + +if [ -z "${PROD_DATABASE_URL:-}" ]; then + echo "Error: PROD_DATABASE_URL not set. Add it to packages/web/.env" + exit 1 +fi + +echo "Opening production database in TablePlus..." +open -a "TablePlus" "$PROD_DATABASE_URL" +""" +description = "Open production database in TablePlus" +shortcuts = ["guip", "tp"] + +[[tasks]] +name = "db-push" +command = """ +set -euo pipefail + +ROOT="$(pwd)" +ENV_FILE="$ROOT/packages/web/.env" + +if [ -f "$ENV_FILE" ]; then + set -a + . "$ENV_FILE" + set +a +fi + +PROD_URL="${PROD_DATABASE_URL:-}" + +if [ -z "$PROD_URL" ]; then + echo "❌ PROD_DATABASE_URL not set in packages/web/.env" + exit 1 +fi + +echo "⚠️ Pushing schema to production database..." + +cd packages/web +DATABASE_URL="$PROD_URL" pnpm tsx scripts/push-schema.ts + +echo "" +echo "✓ Schema push complete" +""" +description = "Push schema to production Neon database." +dependencies = ["node", "pnpm"] +shortcuts = ["dbp", "push"] + +[[tasks]] +name = "db-connect" +command = """ +set -euo pipefail + +ROOT="$(pwd)" +ENV_FILE="$ROOT/packages/web/.env" + +if [ -f "$ENV_FILE" ]; then + set -a + . "$ENV_FILE" + set +a +fi + +PROD_URL="${PROD_DATABASE_URL:-}" + +if [ -z "$PROD_URL" ]; then + echo "❌ PROD_DATABASE_URL not set in packages/web/.env" + exit 1 +fi + +cd packages/web +DATABASE_URL="$PROD_URL" pnpm tsx scripts/db-connect.ts +""" +description = "Test connection to production database and list tables." +dependencies = ["node", "pnpm"] +shortcuts = ["dbc", "connect"] + +[[tasks]] +name = "db-query" +command = """ +set -euo pipefail + +ROOT="$(pwd)" +ENV_FILE="$ROOT/packages/web/.env" + +if [ -f "$ENV_FILE" ]; then + set -a + . "$ENV_FILE" + set +a +fi + +PROD_URL="${PROD_DATABASE_URL:-}" + +if [ -z "$PROD_URL" ]; then + echo "❌ PROD_DATABASE_URL not set in packages/web/.env" + exit 1 +fi + +cd packages/web +DATABASE_URL="$PROD_URL" pnpm tsx scripts/db-query.ts "$@" +""" +description = "Interactive CRUD tool for production database." +dependencies = ["node", "pnpm"] +shortcuts = ["dbq", "query"] + +[[tasks]] +name = "staging-secrets" +interactive = true +command = """ +set -euo pipefail + +cd packages/web + +WORKER="dev-linsa" + +echo "=== Set Staging Secrets (Worker: $WORKER -> staging.linsa.io) ===" +echo "" + +# Get existing secrets +SECRETS_OUTPUT=$(pnpm exec wrangler secret list --name="$WORKER" 2>&1 || echo "") + +is_secret_set() { + echo "$SECRETS_OUTPUT" | grep -q "$1" +} + +set_secret() { + local NAME="$1" + local DEFAULT="$2" + local REQUIRED="$3" + + echo "" + echo "$NAME:" + if is_secret_set "$NAME"; then + echo " (already set)" + read -p " Enter new value to update, or leave empty to keep: " VALUE + elif [ -n "$DEFAULT" ]; then + read -p " Enter value [$DEFAULT]: " VALUE + VALUE="${VALUE:-$DEFAULT}" + else + read -p " Enter value: " VALUE + fi + + if [ -n "$VALUE" ]; then + echo "$VALUE" | pnpm exec wrangler secret put "$NAME" --name="$WORKER" + echo " ✓ $NAME set" + elif [ "$REQUIRED" = "true" ] && ! is_secret_set "$NAME"; then + echo " ✗ Skipped (REQUIRED - auth will not work!)" + else + echo " ⚠ Skipped" + fi +} + +echo "Setting secrets for Worker: $WORKER" +echo "" + +# BETTER_AUTH_SECRET +echo "BETTER_AUTH_SECRET (required):" +if is_secret_set "BETTER_AUTH_SECRET"; then + echo " (already set)" + read -p " Enter new value to update, or leave empty to keep: " VALUE +else + read -p " Enter value (leave empty to generate): " VALUE + if [ -z "$VALUE" ]; then + VALUE=$(openssl rand -hex 32) + echo " Generated: $VALUE" + fi +fi +if [ -n "$VALUE" ]; then + echo "$VALUE" | pnpm exec wrangler secret put BETTER_AUTH_SECRET --name="$WORKER" + echo " ✓ BETTER_AUTH_SECRET set" +fi + +# APP_BASE_URL +set_secret "APP_BASE_URL" "https://staging.linsa.io" "true" + +# RESEND_API_KEY +set_secret "RESEND_API_KEY" "" "true" + +# RESEND_FROM_EMAIL +set_secret "RESEND_FROM_EMAIL" "noreply@linsa.io" "true" + +# ELECTRIC_URL +set_secret "ELECTRIC_URL" "https://api.electric-sql.cloud" "false" + +# OPENROUTER_API_KEY +set_secret "OPENROUTER_API_KEY" "" "false" + +echo "" +echo "=== Done ===" +echo "Secrets are set. Run 'pnpm deploy:web' or push to git to deploy." +""" +description = "Set secrets for staging Worker (dev-linsa -> staging.linsa.io)." +dependencies = ["node", "pnpm"] +shortcuts = ["staging"] + +[[tasks]] +name = "staging-check" +command = """ +set -euo pipefail + +cd packages/web + +WORKER="dev-linsa" + +echo "=== Staging Secrets Check (Worker: $WORKER) ===" +echo "" + +SECRETS_OUTPUT=$(pnpm exec wrangler secret list --name="$WORKER" 2>&1 || echo "") + +check() { + if echo "$SECRETS_OUTPUT" | grep -q "$1"; then + echo " ✓ $1" + else + echo " ✗ $1 (MISSING)" + fi +} + +echo "Required:" +check "BETTER_AUTH_SECRET" +check "APP_BASE_URL" + +echo "" +echo "For email auth:" +check "RESEND_API_KEY" +check "RESEND_FROM_EMAIL" + +echo "" +echo "Optional:" +check "ELECTRIC_URL" +check "OPENROUTER_API_KEY" + +echo "" +echo "If secrets are missing, run: f staging-secrets" +""" +description = "Check which secrets are set for staging Worker." +dependencies = ["node", "pnpm"] +shortcuts = ["sc"] + +[[tasks]] +name = "prod-check" +command = """ +set -euo pipefail + +echo "=== Production Health Check ===" +echo "" + +cd packages/web + +# 1. Check Cloudflare auth +echo "1. Cloudflare Authentication" +if pnpm exec wrangler whoami >/dev/null 2>&1; then + echo " ✓ Logged in" +else + echo " ✗ Not logged in - run: pnpm exec wrangler login" + exit 1 +fi + +# 2. Check secrets +echo "" +echo "2. Cloudflare Secrets" +SECRETS_OUTPUT=$(pnpm exec wrangler secret list 2>&1 || echo "") + +check_secret() { + if echo "$SECRETS_OUTPUT" | grep -q "$1"; then + echo " ✓ $1" + return 0 + else + echo " ✗ $1 (MISSING)" + return 1 + fi +} + +MISSING=0 +check_secret "BETTER_AUTH_SECRET" || MISSING=1 +check_secret "RESEND_API_KEY" || MISSING=1 +check_secret "RESEND_FROM_EMAIL" || MISSING=1 +check_secret "APP_BASE_URL" || MISSING=1 +check_secret "ELECTRIC_URL" || MISSING=1 + +if [ "$MISSING" -eq 1 ]; then + echo "" + echo " To set missing secrets:" + echo " pnpm exec wrangler secret put SECRET_NAME" +fi + +# 3. Check Hyperdrive +echo "" +echo "3. Hyperdrive Config" +HYPERDRIVE_ID=$(grep -o '"id": *"[^"]*"' wrangler.jsonc 2>/dev/null | head -1 | grep -o '"[^"]*"$' | tr -d '"' || echo "") +if [ -n "$HYPERDRIVE_ID" ] && [ "$HYPERDRIVE_ID" != "YOUR_HYPERDRIVE_ID" ]; then + echo " ✓ Configured: $HYPERDRIVE_ID" +else + echo " ✗ Not configured in wrangler.jsonc" +fi + +# 4. Test deployment endpoint +echo "" +echo "4. Deployment Status" +DEPLOY_URL=$(grep -E "APP_BASE_URL|workers.dev" wrangler.jsonc 2>/dev/null | head -1 || echo "") +# Try to get the actual deployed URL +WORKER_NAME=$(grep '"name"' wrangler.jsonc | head -1 | grep -o '"[^"]*"$' | tr -d '"' || echo "fullstack-monorepo-template-web") +echo " Worker: $WORKER_NAME" + +# 5. Tail logs instruction +echo "" +echo "5. Live Logs" +echo " To see real-time logs, run in another terminal:" +echo " pnpm --filter @linsa/web exec wrangler tail" + +# 6. Test auth endpoint +echo "" +echo "6. Testing Auth Endpoint" +AUTH_URL="https://dev.linsa.io/api/auth/ok" +echo " Testing: $AUTH_URL" +RESPONSE=$(curl -s -o /dev/null -w "%{http_code}" "$AUTH_URL" 2>/dev/null || echo "failed") +if [ "$RESPONSE" = "200" ]; then + echo " ✓ Auth endpoint responding (HTTP $RESPONSE)" +else + echo " ⚠ Auth endpoint returned: $RESPONSE" +fi + +echo "" +echo "=== Summary ===" +if [ "$MISSING" -eq 1 ]; then + echo "⚠ Some secrets are missing. Set them and redeploy." +else + echo "✓ All secrets configured" + echo "" + echo "If emails still not working:" + echo " 1. Run 'pnpm --filter @linsa/web exec wrangler tail' in another terminal" + echo " 2. Try login again at https://dev.linsa.io/auth" + echo " 3. Check the logs for [auth] messages" +fi +""" +description = "Verify production deployment: secrets, Hyperdrive, endpoints." +dependencies = ["node", "pnpm"] +shortcuts = ["pc", "check"] + +[[tasks]] +name = "prod-logs" +command = """ +cd packages/web +echo "Starting live log tail for production worker..." +echo "Try the login flow in browser to see logs." +echo "Press Ctrl+C to stop." +echo "" +pnpm exec wrangler tail +""" +description = "Tail live logs from production Cloudflare worker." +dependencies = ["node", "pnpm"] +shortcuts = ["pl", "logs"] + +[[tasks]] +name = "test-pg" +command = """ +set -euo pipefail +cd packages/web +pnpm tsx tests/pg-check.ts +""" +description = "Test PostgreSQL connection with simple CRUD operations." +dependencies = ["node", "pnpm"] +shortcuts = ["tpg", "pg"] + +[[tasks]] +name = "migrate-safe" +interactive = true +command = """ +set -euo pipefail + +ROOT="$(pwd)" +WEB_DIR="$ROOT/packages/web" +ENV_FILE="$WEB_DIR/.env" + +echo "=== Safe Production Migration ===" +echo "" + +if [ ! -f "$ENV_FILE" ]; then + echo "Missing $ENV_FILE. Run 'f setup' first." + exit 1 +fi + +set -a +. "$ENV_FILE" +set +a + +PROD_URL="${PROD_DATABASE_URL:-}" + +if [ -z "$PROD_URL" ]; then + echo "PROD_DATABASE_URL not set in packages/web/.env" + echo "" + echo "Add your production database URL:" + echo " PROD_DATABASE_URL=postgresql://user:pass@host/db?sslmode=require" + exit 1 +fi + +cd "$WEB_DIR" + +echo "1. Checking production database..." +DATABASE_URL="$PROD_URL" pnpm tsx scripts/migrate-safe.ts check + +echo "" +echo "=== Migration Options ===" +echo "" +echo " a) Push Drizzle schema (app tables)" +echo " b) Fix auth tables (recreate with camelCase)" +echo " c) Both (recommended for fresh setup)" +echo " q) Quit" +echo "" +read -p "Choose option [a/b/c/q]: " CHOICE + +case "$CHOICE" in + a) + echo "" + echo "Pushing Drizzle schema to production..." + DATABASE_URL="$PROD_URL" pnpm drizzle-kit push --force + echo "Done" + ;; + b) + echo "" + echo "WARNING: This will DROP and recreate auth tables!" + echo "All existing users will be deleted!" + read -p "Type 'yes' to confirm: " CONFIRM + if [ "$CONFIRM" != "yes" ]; then + echo "Aborted." + exit 1 + fi + DATABASE_URL="$PROD_URL" pnpm tsx scripts/migrate-safe.ts auth + ;; + c) + echo "" + echo "WARNING: This will DROP auth tables and push Drizzle schema!" + read -p "Type 'yes' to confirm: " CONFIRM + if [ "$CONFIRM" != "yes" ]; then + echo "Aborted." + exit 1 + fi + DATABASE_URL="$PROD_URL" pnpm tsx scripts/migrate-safe.ts auth + echo "" + echo "Pushing Drizzle schema..." + DATABASE_URL="$PROD_URL" pnpm drizzle-kit push --force + echo "Done" + ;; + q|*) + echo "Aborted." + exit 0 + ;; +esac + +echo "" +echo "=== Migration Complete ===" +""" +description = "Safe interactive migration for production database." +dependencies = ["node", "pnpm"] +shortcuts = ["ms", "safe"] diff --git a/package.json b/package.json new file mode 100644 index 00000000..f69d2a0f --- /dev/null +++ b/package.json @@ -0,0 +1,43 @@ +{ + "name": "linsa", + "version": "1.0.0", + "private": true, + "type": "module", + "packageManager": "pnpm@10.11.1", + "scripts": { + "dev:worker": "pnpm --filter @linsa/worker run dev", + "deploy:worker": "pnpm --filter @linsa/worker run deploy", + "test:worker": "pnpm --filter @linsa/worker run test", + "dev:web": "pnpm --filter @linsa/web run dev", + "deploy:web": "pnpm --filter @linsa/web run deploy", + "test:web": "pnpm --filter @linsa/web run test", + "dev": "pnpm dev:worker", + "test": "pnpm -r test", + "lint": "pnpm -r lint && pnpm format:check", + "lint:fix": "pnpm -r lint:fix", + "format": "prettier --write '**/*.{ts,js,tsx,jsx,cjs,json,md}'", + "format:check": "prettier --check '**/*.{ts,js,cjs,json,md}'", + "deploy": "pnpm deploy:worker && pnpm deploy:web" + }, + "devDependencies": { + "@eslint/js": "^9.39.1", + "@types/pg": "^8.15.6", + "@typescript-eslint/eslint-plugin": "^8.46.4", + "@typescript-eslint/parser": "^8.46.4", + "eslint": "^9.39.1", + "globals": "^16.5.0", + "pg": "^8.16.3", + "postgres": "^3.4.7", + "tsx": "^4.21.0", + "typescript": "^5.5.2" + }, + "dependencies": { + "prettier": "^3.6.2" + }, + "prettier": { + "semi": false + }, + "workspaces": [ + "packages/*" + ] +} diff --git a/packages/web/.cta.json b/packages/web/.cta.json new file mode 100644 index 00000000..ddb8fda8 --- /dev/null +++ b/packages/web/.cta.json @@ -0,0 +1,12 @@ +{ + "projectName": "web", + "mode": "file-router", + "typescript": true, + "tailwind": true, + "packageManager": "npm", + "git": false, + "addOnOptions": {}, + "version": 1, + "framework": "react-cra", + "chosenAddOns": ["start", "cloudflare"] +} diff --git a/packages/web/.env.example b/packages/web/.env.example new file mode 100644 index 00000000..d7ba5b2a --- /dev/null +++ b/packages/web/.env.example @@ -0,0 +1,45 @@ +# Neon PostgreSQL (https://console.neon.tech) +# Format: postgresql://:@/?sslmode=require +DATABASE_URL=postgresql://user:password@ep-xxx.region.aws.neon.tech/neondb?sslmode=require +ELECTRIC_URL=http://localhost:3100 +BETTER_AUTH_SECRET=your-strong-secret-at-least-32-chars +APP_BASE_URL=http://localhost:5000 + +# Optional: Electric Cloud credentials (for production) +ELECTRIC_SOURCE_ID= +ELECTRIC_SOURCE_SECRET= + +# Optional: OpenRouter for AI chat responses (https://openrouter.ai/keys) +OPENROUTER_API_KEY= +OPENROUTER_MODEL=google/gemini-2.0-flash-001 + +# Optional: Flowglad billing (enable billing UI + metering) +FLOWGLAD_SECRET_KEY= +VITE_FLOWGLAD_ENABLED=false + +# Optional: Resend for production email OTP (https://resend.com/api-keys) +# In dev mode, OTP codes are logged to terminal instead +RESEND_API_KEY= +RESEND_FROM_EMAIL=noreply@yourdomain.com + +# Optional: Gemini for canvas image generation +GEMINI_API_KEY= + +# =========================================== +# PRODUCTION DEPLOYMENT (Cloudflare Workers) +# =========================================== +# Neon PostgreSQL DATABASE_URL format: +# postgresql://:@.neon.tech/?sslmode=require +# +# Set these as secrets in Cloudflare: +# wrangler secret put DATABASE_URL +# wrangler secret put BETTER_AUTH_SECRET +# wrangler secret put ELECTRIC_URL +# wrangler secret put ELECTRIC_SOURCE_ID +# wrangler secret put ELECTRIC_SOURCE_SECRET +# wrangler secret put OPENROUTER_API_KEY +# wrangler secret put RESEND_API_KEY +# +# Or set APP_BASE_URL/RESEND_FROM_EMAIL as variables: +# wrangler vars set APP_BASE_URL https://your-domain.com +# wrangler vars set RESEND_FROM_EMAIL noreply@your-domain.com diff --git a/packages/web/.gitignore b/packages/web/.gitignore new file mode 100644 index 00000000..055af72c --- /dev/null +++ b/packages/web/.gitignore @@ -0,0 +1,17 @@ +node_modules +.DS_Store +dist +dist-ssr +*.local +count.txt +.env +.nitro +.tanstack +.wrangler +.output +.vinxi +todos.json + +.dev.vars* +!.dev.vars.example +!.env.example diff --git a/packages/web/.vscode/settings.json b/packages/web/.vscode/settings.json new file mode 100644 index 00000000..00b5278e --- /dev/null +++ b/packages/web/.vscode/settings.json @@ -0,0 +1,11 @@ +{ + "files.watcherExclude": { + "**/routeTree.gen.ts": true + }, + "search.exclude": { + "**/routeTree.gen.ts": true + }, + "files.readonlyInclude": { + "**/routeTree.gen.ts": true + } +} diff --git a/packages/web/docker-compose.yml b/packages/web/docker-compose.yml new file mode 100644 index 00000000..33af7776 --- /dev/null +++ b/packages/web/docker-compose.yml @@ -0,0 +1,46 @@ +name: gen + +services: + postgres: + image: postgres:17 + container_name: gen-postgres + command: "-c wal_level=logical" + volumes: + - db_data:/var/lib/postgresql/data + ports: + - "5433:5432" + environment: + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=password + - POSTGRES_DB=electric + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 10s + timeout: 5s + retries: 5 + + neon-proxy: + image: ghcr.io/timowilhelm/local-neon-http-proxy:main + container_name: gen-neon-proxy + environment: + - PG_CONNECTION_STRING=postgres://postgres:password@postgres:5432/electric + ports: + - "4444:4444" + depends_on: + postgres: + condition: service_healthy + + electric: + image: electricsql/electric:latest + container_name: gen-electric + environment: + DATABASE_URL: postgresql://postgres:password@postgres:5432/electric + ELECTRIC_INSECURE: "true" + ports: + - "3100:3000" + depends_on: + postgres: + condition: service_healthy + +volumes: + db_data: diff --git a/packages/web/drizzle.config.ts b/packages/web/drizzle.config.ts new file mode 100644 index 00000000..0dc52996 --- /dev/null +++ b/packages/web/drizzle.config.ts @@ -0,0 +1,12 @@ +import "dotenv/config" +import { defineConfig } from "drizzle-kit" + +export default defineConfig({ + schema: "./src/db/schema.ts", + out: "./drizzle", + dialect: "postgresql", + casing: "snake_case", + dbCredentials: { + url: process.env.DATABASE_URL ?? "", + }, +}) diff --git a/packages/web/drizzle/0000_freezing_black_crow.sql b/packages/web/drizzle/0000_freezing_black_crow.sql new file mode 100644 index 00000000..83093ec6 --- /dev/null +++ b/packages/web/drizzle/0000_freezing_black_crow.sql @@ -0,0 +1,102 @@ +CREATE TABLE "accounts" ( + "id" text PRIMARY KEY NOT NULL, + "accountId" text NOT NULL, + "providerId" text NOT NULL, + "userId" text NOT NULL, + "accessToken" text, + "refreshToken" text, + "idToken" text, + "accessTokenExpiresAt" timestamp, + "refreshTokenExpiresAt" timestamp, + "scope" text, + "password" text, + "createdAt" timestamp NOT NULL, + "updatedAt" timestamp NOT NULL +); +--> statement-breakpoint +CREATE TABLE "canvas" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "owner_id" text NOT NULL, + "name" text DEFAULT 'Untitled Canvas' NOT NULL, + "width" integer DEFAULT 1024 NOT NULL, + "height" integer DEFAULT 1024 NOT NULL, + "default_model" text DEFAULT 'gemini-2.0-flash-exp-image-generation' NOT NULL, + "default_style" text DEFAULT 'default' NOT NULL, + "background_prompt" text, + "created_at" timestamp with time zone DEFAULT now() NOT NULL, + "updated_at" timestamp with time zone DEFAULT now() NOT NULL +); +--> statement-breakpoint +CREATE TABLE "canvas_images" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "canvas_id" uuid NOT NULL, + "name" text DEFAULT 'Untitled Image' NOT NULL, + "prompt" text DEFAULT '' NOT NULL, + "model_id" text DEFAULT 'gemini-2.0-flash-exp-image-generation' NOT NULL, + "model_used" text, + "style_id" text DEFAULT 'default' NOT NULL, + "width" integer DEFAULT 512 NOT NULL, + "height" integer DEFAULT 512 NOT NULL, + "position" jsonb NOT NULL, + "rotation" double precision DEFAULT 0 NOT NULL, + "content_base64" text, + "image_url" text, + "metadata" jsonb, + "branch_parent_id" uuid, + "created_at" timestamp with time zone DEFAULT now() NOT NULL, + "updated_at" timestamp with time zone DEFAULT now() NOT NULL +); +--> statement-breakpoint +CREATE TABLE "chat_messages" ( + "id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name "chat_messages_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1), + "thread_id" integer NOT NULL, + "role" varchar(32) NOT NULL, + "content" text NOT NULL, + "created_at" timestamp with time zone DEFAULT now() NOT NULL +); +--> statement-breakpoint +CREATE TABLE "chat_threads" ( + "id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name "chat_threads_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1), + "title" text NOT NULL, + "user_id" text NOT NULL, + "created_at" timestamp with time zone DEFAULT now() NOT NULL +); +--> statement-breakpoint +CREATE TABLE "sessions" ( + "id" text PRIMARY KEY NOT NULL, + "expiresAt" timestamp NOT NULL, + "token" text NOT NULL, + "createdAt" timestamp NOT NULL, + "updatedAt" timestamp NOT NULL, + "ipAddress" text, + "userAgent" text, + "userId" text NOT NULL, + CONSTRAINT "sessions_token_unique" UNIQUE("token") +); +--> statement-breakpoint +CREATE TABLE "users" ( + "id" text PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "email" text NOT NULL, + "emailVerified" boolean NOT NULL, + "image" text, + "createdAt" timestamp NOT NULL, + "updatedAt" timestamp NOT NULL, + CONSTRAINT "users_email_unique" UNIQUE("email") +); +--> statement-breakpoint +CREATE TABLE "verifications" ( + "id" text PRIMARY KEY NOT NULL, + "identifier" text NOT NULL, + "value" text NOT NULL, + "expiresAt" timestamp NOT NULL, + "createdAt" timestamp, + "updatedAt" timestamp +); +--> statement-breakpoint +ALTER TABLE "accounts" ADD CONSTRAINT "accounts_userId_users_id_fk" FOREIGN KEY ("userId") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint +ALTER TABLE "canvas" ADD CONSTRAINT "canvas_owner_id_users_id_fk" FOREIGN KEY ("owner_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint +ALTER TABLE "canvas_images" ADD CONSTRAINT "canvas_images_canvas_id_canvas_id_fk" FOREIGN KEY ("canvas_id") REFERENCES "public"."canvas"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint +ALTER TABLE "canvas_images" ADD CONSTRAINT "canvas_images_branch_parent_id_canvas_images_id_fk" FOREIGN KEY ("branch_parent_id") REFERENCES "public"."canvas_images"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint +ALTER TABLE "chat_messages" ADD CONSTRAINT "chat_messages_thread_id_chat_threads_id_fk" FOREIGN KEY ("thread_id") REFERENCES "public"."chat_threads"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint +ALTER TABLE "sessions" ADD CONSTRAINT "sessions_userId_users_id_fk" FOREIGN KEY ("userId") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action; \ No newline at end of file diff --git a/packages/web/drizzle/0001_loving_captain_midlands.sql b/packages/web/drizzle/0001_loving_captain_midlands.sql new file mode 100644 index 00000000..1b543e23 --- /dev/null +++ b/packages/web/drizzle/0001_loving_captain_midlands.sql @@ -0,0 +1,23 @@ +CREATE TABLE "context_items" ( + "id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name "context_items_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1), + "user_id" text NOT NULL, + "type" varchar(32) NOT NULL, + "url" text, + "name" text NOT NULL, + "content" text, + "refreshing" boolean DEFAULT false NOT NULL, + "parent_id" integer, + "created_at" timestamp with time zone DEFAULT now() NOT NULL, + "updated_at" timestamp with time zone DEFAULT now() NOT NULL +); +--> statement-breakpoint +CREATE TABLE "thread_context_items" ( + "id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name "thread_context_items_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1), + "thread_id" integer NOT NULL, + "context_item_id" integer NOT NULL, + "created_at" timestamp with time zone DEFAULT now() NOT NULL +); +--> statement-breakpoint +ALTER TABLE "context_items" ADD CONSTRAINT "context_items_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint +ALTER TABLE "thread_context_items" ADD CONSTRAINT "thread_context_items_thread_id_chat_threads_id_fk" FOREIGN KEY ("thread_id") REFERENCES "public"."chat_threads"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint +ALTER TABLE "thread_context_items" ADD CONSTRAINT "thread_context_items_context_item_id_context_items_id_fk" FOREIGN KEY ("context_item_id") REFERENCES "public"."context_items"("id") ON DELETE cascade ON UPDATE no action; \ No newline at end of file diff --git a/packages/web/drizzle/0002_uneven_the_renegades.sql b/packages/web/drizzle/0002_uneven_the_renegades.sql new file mode 100644 index 00000000..2143121a --- /dev/null +++ b/packages/web/drizzle/0002_uneven_the_renegades.sql @@ -0,0 +1,31 @@ +CREATE TABLE "blocks" ( + "id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name "blocks_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1), + "name" text NOT NULL, + "created_at" timestamp with time zone DEFAULT now() NOT NULL +); +--> statement-breakpoint +CREATE TABLE "browser_session_tabs" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "session_id" uuid NOT NULL, + "title" text DEFAULT '' NOT NULL, + "url" text NOT NULL, + "position" integer DEFAULT 0 NOT NULL, + "favicon_url" text, + "created_at" timestamp with time zone DEFAULT now() NOT NULL +); +--> statement-breakpoint +CREATE TABLE "browser_sessions" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "user_id" text NOT NULL, + "name" text NOT NULL, + "browser" varchar(32) DEFAULT 'safari' NOT NULL, + "tab_count" integer DEFAULT 0 NOT NULL, + "is_favorite" boolean DEFAULT false NOT NULL, + "captured_at" timestamp with time zone DEFAULT now() NOT NULL, + "created_at" timestamp with time zone DEFAULT now() NOT NULL +); +--> statement-breakpoint +ALTER TABLE "canvas" ALTER COLUMN "default_model" SET DEFAULT 'gemini-2.5-flash-image-preview';--> statement-breakpoint +ALTER TABLE "chat_threads" ALTER COLUMN "user_id" DROP NOT NULL;--> statement-breakpoint +ALTER TABLE "browser_session_tabs" ADD CONSTRAINT "browser_session_tabs_session_id_browser_sessions_id_fk" FOREIGN KEY ("session_id") REFERENCES "public"."browser_sessions"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint +ALTER TABLE "browser_sessions" ADD CONSTRAINT "browser_sessions_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action; \ No newline at end of file diff --git a/packages/web/drizzle/meta/0000_snapshot.json b/packages/web/drizzle/meta/0000_snapshot.json new file mode 100644 index 00000000..69680397 --- /dev/null +++ b/packages/web/drizzle/meta/0000_snapshot.json @@ -0,0 +1,684 @@ +{ + "id": "3e047a36-a388-45ed-b92a-d1a5f7bdddfa", + "prevId": "00000000-0000-0000-0000-000000000000", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.accounts": { + "name": "accounts", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true + }, + "accountId": { + "name": "accountId", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "providerId": { + "name": "providerId", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "userId": { + "name": "userId", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "accessToken": { + "name": "accessToken", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "refreshToken": { + "name": "refreshToken", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "idToken": { + "name": "idToken", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "accessTokenExpiresAt": { + "name": "accessTokenExpiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "refreshTokenExpiresAt": { + "name": "refreshTokenExpiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "scope": { + "name": "scope", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "password": { + "name": "password", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "updatedAt": { + "name": "updatedAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "accounts_userId_users_id_fk": { + "name": "accounts_userId_users_id_fk", + "tableFrom": "accounts", + "tableTo": "users", + "columnsFrom": [ + "userId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.canvas": { + "name": "canvas", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "owner_id": { + "name": "owner_id", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'Untitled Canvas'" + }, + "width": { + "name": "width", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 1024 + }, + "height": { + "name": "height", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 1024 + }, + "default_model": { + "name": "default_model", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'gemini-2.0-flash-exp-image-generation'" + }, + "default_style": { + "name": "default_style", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'default'" + }, + "background_prompt": { + "name": "background_prompt", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "canvas_owner_id_users_id_fk": { + "name": "canvas_owner_id_users_id_fk", + "tableFrom": "canvas", + "tableTo": "users", + "columnsFrom": [ + "owner_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.canvas_images": { + "name": "canvas_images", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "canvas_id": { + "name": "canvas_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'Untitled Image'" + }, + "prompt": { + "name": "prompt", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "''" + }, + "model_id": { + "name": "model_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'gemini-2.0-flash-exp-image-generation'" + }, + "model_used": { + "name": "model_used", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "style_id": { + "name": "style_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'default'" + }, + "width": { + "name": "width", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 512 + }, + "height": { + "name": "height", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 512 + }, + "position": { + "name": "position", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "rotation": { + "name": "rotation", + "type": "double precision", + "primaryKey": false, + "notNull": true, + "default": 0 + }, + "content_base64": { + "name": "content_base64", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "image_url": { + "name": "image_url", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "metadata": { + "name": "metadata", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "branch_parent_id": { + "name": "branch_parent_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "canvas_images_canvas_id_canvas_id_fk": { + "name": "canvas_images_canvas_id_canvas_id_fk", + "tableFrom": "canvas_images", + "tableTo": "canvas", + "columnsFrom": [ + "canvas_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "canvas_images_branch_parent_id_canvas_images_id_fk": { + "name": "canvas_images_branch_parent_id_canvas_images_id_fk", + "tableFrom": "canvas_images", + "tableTo": "canvas_images", + "columnsFrom": [ + "branch_parent_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.chat_messages": { + "name": "chat_messages", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "chat_messages_id_seq", + "schema": "public", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "thread_id": { + "name": "thread_id", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "role": { + "name": "role", + "type": "varchar(32)", + "primaryKey": false, + "notNull": true + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "chat_messages_thread_id_chat_threads_id_fk": { + "name": "chat_messages_thread_id_chat_threads_id_fk", + "tableFrom": "chat_messages", + "tableTo": "chat_threads", + "columnsFrom": [ + "thread_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.chat_threads": { + "name": "chat_threads", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "chat_threads_id_seq", + "schema": "public", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "user_id": { + "name": "user_id", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.sessions": { + "name": "sessions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true + }, + "expiresAt": { + "name": "expiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "token": { + "name": "token", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "updatedAt": { + "name": "updatedAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "ipAddress": { + "name": "ipAddress", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "userAgent": { + "name": "userAgent", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "userId": { + "name": "userId", + "type": "text", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "sessions_userId_users_id_fk": { + "name": "sessions_userId_users_id_fk", + "tableFrom": "sessions", + "tableTo": "users", + "columnsFrom": [ + "userId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "sessions_token_unique": { + "name": "sessions_token_unique", + "nullsNotDistinct": false, + "columns": [ + "token" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.users": { + "name": "users", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "email": { + "name": "email", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "emailVerified": { + "name": "emailVerified", + "type": "boolean", + "primaryKey": false, + "notNull": true + }, + "image": { + "name": "image", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "updatedAt": { + "name": "updatedAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "users_email_unique": { + "name": "users_email_unique", + "nullsNotDistinct": false, + "columns": [ + "email" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.verifications": { + "name": "verifications", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true + }, + "identifier": { + "name": "identifier", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "value": { + "name": "value", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "expiresAt": { + "name": "expiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "updatedAt": { + "name": "updatedAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + } + }, + "enums": {}, + "schemas": {}, + "sequences": {}, + "roles": {}, + "policies": {}, + "views": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} \ No newline at end of file diff --git a/packages/web/drizzle/meta/0001_snapshot.json b/packages/web/drizzle/meta/0001_snapshot.json new file mode 100644 index 00000000..b4527fd7 --- /dev/null +++ b/packages/web/drizzle/meta/0001_snapshot.json @@ -0,0 +1,861 @@ +{ + "id": "49ea3b31-322c-4596-8274-73d9a0e7b6f2", + "prevId": "3e047a36-a388-45ed-b92a-d1a5f7bdddfa", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.accounts": { + "name": "accounts", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true + }, + "accountId": { + "name": "accountId", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "providerId": { + "name": "providerId", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "userId": { + "name": "userId", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "accessToken": { + "name": "accessToken", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "refreshToken": { + "name": "refreshToken", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "idToken": { + "name": "idToken", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "accessTokenExpiresAt": { + "name": "accessTokenExpiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "refreshTokenExpiresAt": { + "name": "refreshTokenExpiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "scope": { + "name": "scope", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "password": { + "name": "password", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "updatedAt": { + "name": "updatedAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "accounts_userId_users_id_fk": { + "name": "accounts_userId_users_id_fk", + "tableFrom": "accounts", + "tableTo": "users", + "columnsFrom": [ + "userId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.canvas": { + "name": "canvas", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "owner_id": { + "name": "owner_id", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'Untitled Canvas'" + }, + "width": { + "name": "width", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 1024 + }, + "height": { + "name": "height", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 1024 + }, + "default_model": { + "name": "default_model", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'gemini-2.0-flash-exp-image-generation'" + }, + "default_style": { + "name": "default_style", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'default'" + }, + "background_prompt": { + "name": "background_prompt", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "canvas_owner_id_users_id_fk": { + "name": "canvas_owner_id_users_id_fk", + "tableFrom": "canvas", + "tableTo": "users", + "columnsFrom": [ + "owner_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.canvas_images": { + "name": "canvas_images", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "canvas_id": { + "name": "canvas_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'Untitled Image'" + }, + "prompt": { + "name": "prompt", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "''" + }, + "model_id": { + "name": "model_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'gemini-2.0-flash-exp-image-generation'" + }, + "model_used": { + "name": "model_used", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "style_id": { + "name": "style_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'default'" + }, + "width": { + "name": "width", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 512 + }, + "height": { + "name": "height", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 512 + }, + "position": { + "name": "position", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "rotation": { + "name": "rotation", + "type": "double precision", + "primaryKey": false, + "notNull": true, + "default": 0 + }, + "content_base64": { + "name": "content_base64", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "image_url": { + "name": "image_url", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "metadata": { + "name": "metadata", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "branch_parent_id": { + "name": "branch_parent_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "canvas_images_canvas_id_canvas_id_fk": { + "name": "canvas_images_canvas_id_canvas_id_fk", + "tableFrom": "canvas_images", + "tableTo": "canvas", + "columnsFrom": [ + "canvas_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "canvas_images_branch_parent_id_canvas_images_id_fk": { + "name": "canvas_images_branch_parent_id_canvas_images_id_fk", + "tableFrom": "canvas_images", + "tableTo": "canvas_images", + "columnsFrom": [ + "branch_parent_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.chat_messages": { + "name": "chat_messages", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "chat_messages_id_seq", + "schema": "public", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "thread_id": { + "name": "thread_id", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "role": { + "name": "role", + "type": "varchar(32)", + "primaryKey": false, + "notNull": true + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "chat_messages_thread_id_chat_threads_id_fk": { + "name": "chat_messages_thread_id_chat_threads_id_fk", + "tableFrom": "chat_messages", + "tableTo": "chat_threads", + "columnsFrom": [ + "thread_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.chat_threads": { + "name": "chat_threads", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "chat_threads_id_seq", + "schema": "public", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "user_id": { + "name": "user_id", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.context_items": { + "name": "context_items", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "context_items_id_seq", + "schema": "public", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "user_id": { + "name": "user_id", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "type": { + "name": "type", + "type": "varchar(32)", + "primaryKey": false, + "notNull": true + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "refreshing": { + "name": "refreshing", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": false + }, + "parent_id": { + "name": "parent_id", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "context_items_user_id_users_id_fk": { + "name": "context_items_user_id_users_id_fk", + "tableFrom": "context_items", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.sessions": { + "name": "sessions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true + }, + "expiresAt": { + "name": "expiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "token": { + "name": "token", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "updatedAt": { + "name": "updatedAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "ipAddress": { + "name": "ipAddress", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "userAgent": { + "name": "userAgent", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "userId": { + "name": "userId", + "type": "text", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "sessions_userId_users_id_fk": { + "name": "sessions_userId_users_id_fk", + "tableFrom": "sessions", + "tableTo": "users", + "columnsFrom": [ + "userId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "sessions_token_unique": { + "name": "sessions_token_unique", + "nullsNotDistinct": false, + "columns": [ + "token" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.thread_context_items": { + "name": "thread_context_items", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "thread_context_items_id_seq", + "schema": "public", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "thread_id": { + "name": "thread_id", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "context_item_id": { + "name": "context_item_id", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "thread_context_items_thread_id_chat_threads_id_fk": { + "name": "thread_context_items_thread_id_chat_threads_id_fk", + "tableFrom": "thread_context_items", + "tableTo": "chat_threads", + "columnsFrom": [ + "thread_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "thread_context_items_context_item_id_context_items_id_fk": { + "name": "thread_context_items_context_item_id_context_items_id_fk", + "tableFrom": "thread_context_items", + "tableTo": "context_items", + "columnsFrom": [ + "context_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.users": { + "name": "users", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "email": { + "name": "email", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "emailVerified": { + "name": "emailVerified", + "type": "boolean", + "primaryKey": false, + "notNull": true + }, + "image": { + "name": "image", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "updatedAt": { + "name": "updatedAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "users_email_unique": { + "name": "users_email_unique", + "nullsNotDistinct": false, + "columns": [ + "email" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.verifications": { + "name": "verifications", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true + }, + "identifier": { + "name": "identifier", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "value": { + "name": "value", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "expiresAt": { + "name": "expiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "updatedAt": { + "name": "updatedAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + } + }, + "enums": {}, + "schemas": {}, + "sequences": {}, + "roles": {}, + "policies": {}, + "views": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} \ No newline at end of file diff --git a/packages/web/drizzle/meta/0002_snapshot.json b/packages/web/drizzle/meta/0002_snapshot.json new file mode 100644 index 00000000..18cda351 --- /dev/null +++ b/packages/web/drizzle/meta/0002_snapshot.json @@ -0,0 +1,1058 @@ +{ + "id": "c18816f4-ba1c-4533-8b15-805862502a8d", + "prevId": "49ea3b31-322c-4596-8274-73d9a0e7b6f2", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.accounts": { + "name": "accounts", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true + }, + "accountId": { + "name": "accountId", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "providerId": { + "name": "providerId", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "userId": { + "name": "userId", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "accessToken": { + "name": "accessToken", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "refreshToken": { + "name": "refreshToken", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "idToken": { + "name": "idToken", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "accessTokenExpiresAt": { + "name": "accessTokenExpiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "refreshTokenExpiresAt": { + "name": "refreshTokenExpiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "scope": { + "name": "scope", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "password": { + "name": "password", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "updatedAt": { + "name": "updatedAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "accounts_userId_users_id_fk": { + "name": "accounts_userId_users_id_fk", + "tableFrom": "accounts", + "tableTo": "users", + "columnsFrom": [ + "userId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.blocks": { + "name": "blocks", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "blocks_id_seq", + "schema": "public", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.browser_session_tabs": { + "name": "browser_session_tabs", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "session_id": { + "name": "session_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "''" + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "position": { + "name": "position", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 0 + }, + "favicon_url": { + "name": "favicon_url", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "browser_session_tabs_session_id_browser_sessions_id_fk": { + "name": "browser_session_tabs_session_id_browser_sessions_id_fk", + "tableFrom": "browser_session_tabs", + "tableTo": "browser_sessions", + "columnsFrom": [ + "session_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.browser_sessions": { + "name": "browser_sessions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "user_id": { + "name": "user_id", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "browser": { + "name": "browser", + "type": "varchar(32)", + "primaryKey": false, + "notNull": true, + "default": "'safari'" + }, + "tab_count": { + "name": "tab_count", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 0 + }, + "is_favorite": { + "name": "is_favorite", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": false + }, + "captured_at": { + "name": "captured_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "browser_sessions_user_id_users_id_fk": { + "name": "browser_sessions_user_id_users_id_fk", + "tableFrom": "browser_sessions", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.canvas": { + "name": "canvas", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "owner_id": { + "name": "owner_id", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'Untitled Canvas'" + }, + "width": { + "name": "width", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 1024 + }, + "height": { + "name": "height", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 1024 + }, + "default_model": { + "name": "default_model", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'gemini-2.5-flash-image-preview'" + }, + "default_style": { + "name": "default_style", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'default'" + }, + "background_prompt": { + "name": "background_prompt", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "canvas_owner_id_users_id_fk": { + "name": "canvas_owner_id_users_id_fk", + "tableFrom": "canvas", + "tableTo": "users", + "columnsFrom": [ + "owner_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.canvas_images": { + "name": "canvas_images", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "canvas_id": { + "name": "canvas_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'Untitled Image'" + }, + "prompt": { + "name": "prompt", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "''" + }, + "model_id": { + "name": "model_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'gemini-2.0-flash-exp-image-generation'" + }, + "model_used": { + "name": "model_used", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "style_id": { + "name": "style_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'default'" + }, + "width": { + "name": "width", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 512 + }, + "height": { + "name": "height", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 512 + }, + "position": { + "name": "position", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "rotation": { + "name": "rotation", + "type": "double precision", + "primaryKey": false, + "notNull": true, + "default": 0 + }, + "content_base64": { + "name": "content_base64", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "image_url": { + "name": "image_url", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "metadata": { + "name": "metadata", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "branch_parent_id": { + "name": "branch_parent_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "canvas_images_canvas_id_canvas_id_fk": { + "name": "canvas_images_canvas_id_canvas_id_fk", + "tableFrom": "canvas_images", + "tableTo": "canvas", + "columnsFrom": [ + "canvas_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "canvas_images_branch_parent_id_canvas_images_id_fk": { + "name": "canvas_images_branch_parent_id_canvas_images_id_fk", + "tableFrom": "canvas_images", + "tableTo": "canvas_images", + "columnsFrom": [ + "branch_parent_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.chat_messages": { + "name": "chat_messages", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "chat_messages_id_seq", + "schema": "public", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "thread_id": { + "name": "thread_id", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "role": { + "name": "role", + "type": "varchar(32)", + "primaryKey": false, + "notNull": true + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "chat_messages_thread_id_chat_threads_id_fk": { + "name": "chat_messages_thread_id_chat_threads_id_fk", + "tableFrom": "chat_messages", + "tableTo": "chat_threads", + "columnsFrom": [ + "thread_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.chat_threads": { + "name": "chat_threads", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "chat_threads_id_seq", + "schema": "public", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "user_id": { + "name": "user_id", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.context_items": { + "name": "context_items", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "context_items_id_seq", + "schema": "public", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "user_id": { + "name": "user_id", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "type": { + "name": "type", + "type": "varchar(32)", + "primaryKey": false, + "notNull": true + }, + "url": { + "name": "url", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "content": { + "name": "content", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "refreshing": { + "name": "refreshing", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": false + }, + "parent_id": { + "name": "parent_id", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "context_items_user_id_users_id_fk": { + "name": "context_items_user_id_users_id_fk", + "tableFrom": "context_items", + "tableTo": "users", + "columnsFrom": [ + "user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.sessions": { + "name": "sessions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true + }, + "expiresAt": { + "name": "expiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "token": { + "name": "token", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "updatedAt": { + "name": "updatedAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "ipAddress": { + "name": "ipAddress", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "userAgent": { + "name": "userAgent", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "userId": { + "name": "userId", + "type": "text", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "sessions_userId_users_id_fk": { + "name": "sessions_userId_users_id_fk", + "tableFrom": "sessions", + "tableTo": "users", + "columnsFrom": [ + "userId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "sessions_token_unique": { + "name": "sessions_token_unique", + "nullsNotDistinct": false, + "columns": [ + "token" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.thread_context_items": { + "name": "thread_context_items", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "identity": { + "type": "always", + "name": "thread_context_items_id_seq", + "schema": "public", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": "1", + "cycle": false + } + }, + "thread_id": { + "name": "thread_id", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "context_item_id": { + "name": "context_item_id", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "created_at": { + "name": "created_at", + "type": "timestamp with time zone", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "thread_context_items_thread_id_chat_threads_id_fk": { + "name": "thread_context_items_thread_id_chat_threads_id_fk", + "tableFrom": "thread_context_items", + "tableTo": "chat_threads", + "columnsFrom": [ + "thread_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "thread_context_items_context_item_id_context_items_id_fk": { + "name": "thread_context_items_context_item_id_context_items_id_fk", + "tableFrom": "thread_context_items", + "tableTo": "context_items", + "columnsFrom": [ + "context_item_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.users": { + "name": "users", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "email": { + "name": "email", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "emailVerified": { + "name": "emailVerified", + "type": "boolean", + "primaryKey": false, + "notNull": true + }, + "image": { + "name": "image", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "updatedAt": { + "name": "updatedAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "users_email_unique": { + "name": "users_email_unique", + "nullsNotDistinct": false, + "columns": [ + "email" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.verifications": { + "name": "verifications", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true + }, + "identifier": { + "name": "identifier", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "value": { + "name": "value", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "expiresAt": { + "name": "expiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "updatedAt": { + "name": "updatedAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + } + }, + "enums": {}, + "schemas": {}, + "sequences": {}, + "roles": {}, + "policies": {}, + "views": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} \ No newline at end of file diff --git a/packages/web/drizzle/meta/_journal.json b/packages/web/drizzle/meta/_journal.json new file mode 100644 index 00000000..7f528a42 --- /dev/null +++ b/packages/web/drizzle/meta/_journal.json @@ -0,0 +1,27 @@ +{ + "version": "7", + "dialect": "postgresql", + "entries": [ + { + "idx": 0, + "version": "7", + "when": 1764686923088, + "tag": "0000_freezing_black_crow", + "breakpoints": true + }, + { + "idx": 1, + "version": "7", + "when": 1764866473191, + "tag": "0001_loving_captain_midlands", + "breakpoints": true + }, + { + "idx": 2, + "version": "7", + "when": 1765916542205, + "tag": "0002_uneven_the_renegades", + "breakpoints": true + } + ] +} \ No newline at end of file diff --git a/packages/web/env.d.ts b/packages/web/env.d.ts new file mode 100644 index 00000000..3652a9a1 --- /dev/null +++ b/packages/web/env.d.ts @@ -0,0 +1,20 @@ +/** + * Environment bindings for the web worker + * This extends the auto-generated worker-configuration.d.ts + */ + +import type { WorkerRpc } from "../worker/src/rpc" + +declare module "cloudflare:workers" { + interface Env { + // Service binding to the worker RPC + WORKER_RPC: Service + } +} + +// For compatibility with TanStack Start +declare global { + interface CloudflareEnv extends Env {} +} + +export {} diff --git a/packages/web/package.json b/packages/web/package.json new file mode 100644 index 00000000..e438489a --- /dev/null +++ b/packages/web/package.json @@ -0,0 +1,71 @@ +{ + "name": "@linsa/web", + "version": "0.0.0", + "private": true, + "type": "module", + "scripts": { + "dev": "vite dev --port 5613 --strictPort", + "build": "vite build", + "serve": "vite preview", + "test": "vitest run", + "deploy": "npm run build && wrangler deploy", + "preview": "npm run build && vite preview", + "cf-typegen": "wrangler types", + "lint": "eslint src", + "lint:fix": "eslint src --fix", + "seed": "tsx scripts/seed.ts", + "migrate": "drizzle-kit migrate", + "migrate:generate": "drizzle-kit generate" + }, + "dependencies": { + "@ai-sdk/openai": "^2.0.79", + "@ai-sdk/react": "^2.0.109", + "@cloudflare/vite-plugin": "^1.17.0", + "@electric-sql/client": "^1.2.0", + "@flowglad/react": "0.15.0", + "@flowglad/server": "0.15.0", + "@openrouter/ai-sdk-provider": "^1.4.1", + "@tailwindcss/vite": "^4.1.17", + "@tanstack/electric-db-collection": "^0.2.12", + "@tanstack/react-db": "^0.1.55", + "@tanstack/react-devtools": "^0.8.2", + "@tanstack/react-router": "^1.140.0", + "@tanstack/react-router-devtools": "^1.140.0", + "@tanstack/react-router-ssr-query": "^1.140.0", + "@tanstack/react-start": "^1.140.0", + "@tanstack/router-plugin": "^1.140.0", + "ai": "^5.0.108", + "better-auth": "^1.4.5", + "drizzle-orm": "^0.45.0", + "drizzle-zod": "^0.8.3", + "framer-motion": "^12.23.25", + "hls.js": "^1.6.15", + "lucide-react": "^0.556.0", + "postgres": "^3.4.7", + "react": "^19.2.1", + "react-dom": "^19.2.1", + "react-markdown": "^10.1.0", + "remark-gfm": "^4.0.1", + "resend": "^6.5.2", + "tailwindcss": "^4.1.17", + "vite-tsconfig-paths": "^5.1.4", + "zod": "^4.1.13" + }, + "devDependencies": { + "@testing-library/dom": "^10.4.1", + "@testing-library/react": "^16.3.0", + "@types/node": "^24.10.1", + "@types/react": "^19.2.7", + "@types/react-dom": "^19.2.3", + "@vitejs/plugin-react": "^5.1.1", + "dotenv": "^17.2.3", + "drizzle-kit": "^0.31.8", + "jsdom": "^27.2.0", + "tsx": "^4.21.0", + "typescript": "^5.9.3", + "vite": "^7.2.6", + "vitest": "^4.0.15", + "web-vitals": "^5.1.0", + "wrangler": "^4.53.0" + } +} diff --git a/packages/web/public/favicon.ico b/packages/web/public/favicon.ico new file mode 100644 index 00000000..a11777cc Binary files /dev/null and b/packages/web/public/favicon.ico differ diff --git a/packages/web/public/logo192.png b/packages/web/public/logo192.png new file mode 100644 index 00000000..fc44b0a3 Binary files /dev/null and b/packages/web/public/logo192.png differ diff --git a/packages/web/public/logo512.png b/packages/web/public/logo512.png new file mode 100644 index 00000000..a4e47a65 Binary files /dev/null and b/packages/web/public/logo512.png differ diff --git a/packages/web/public/manifest.json b/packages/web/public/manifest.json new file mode 100644 index 00000000..078ef501 --- /dev/null +++ b/packages/web/public/manifest.json @@ -0,0 +1,25 @@ +{ + "short_name": "TanStack App", + "name": "Create TanStack App Sample", + "icons": [ + { + "src": "favicon.ico", + "sizes": "64x64 32x32 24x24 16x16", + "type": "image/x-icon" + }, + { + "src": "logo192.png", + "type": "image/png", + "sizes": "192x192" + }, + { + "src": "logo512.png", + "type": "image/png", + "sizes": "512x512" + } + ], + "start_url": ".", + "display": "standalone", + "theme_color": "#000000", + "background_color": "#ffffff" +} diff --git a/packages/web/public/robots.txt b/packages/web/public/robots.txt new file mode 100644 index 00000000..d5f32788 --- /dev/null +++ b/packages/web/public/robots.txt @@ -0,0 +1,6 @@ +# robots.txt for Linsa +User-agent: * +Allow: / +Disallow: /api/ + +Sitemap: https://linsa.io/sitemap.xml diff --git a/packages/web/public/sitemap.xml b/packages/web/public/sitemap.xml new file mode 100644 index 00000000..ee6b0363 --- /dev/null +++ b/packages/web/public/sitemap.xml @@ -0,0 +1,9 @@ + + + + https://linsa.io/ + 2025-12-13 + daily + 1.0 + + diff --git a/packages/web/public/tanstack-circle-logo.png b/packages/web/public/tanstack-circle-logo.png new file mode 100644 index 00000000..9db3e67b Binary files /dev/null and b/packages/web/public/tanstack-circle-logo.png differ diff --git a/packages/web/public/tanstack-word-logo-white.svg b/packages/web/public/tanstack-word-logo-white.svg new file mode 100644 index 00000000..b6ec5086 --- /dev/null +++ b/packages/web/public/tanstack-word-logo-white.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/web/readme.md b/packages/web/readme.md new file mode 100644 index 00000000..a2d0c97f --- /dev/null +++ b/packages/web/readme.md @@ -0,0 +1,283 @@ +Welcome to your new TanStack app! + +# Getting Started + +To run this application: + +```bash +npm install +npm run start +``` + +# Building For Production + +To build this application for production: + +```bash +npm run build +``` + +## Testing + +This project uses [Vitest](https://vitest.dev/) for testing. You can run the tests with: + +```bash +npm run test +``` + +## Styling + +This project uses [Tailwind CSS](https://tailwindcss.com/) for styling. + +## Routing + +This project uses [TanStack Router](https://tanstack.com/router). The initial setup is a file based router. Which means that the routes are managed as files in `src/routes`. + +### Adding A Route + +To add a new route to your application just add another a new file in the `./src/routes` directory. + +TanStack will automatically generate the content of the route file for you. + +Now that you have two routes you can use a `Link` component to navigate between them. + +### Adding Links + +To use SPA (Single Page Application) navigation you will need to import the `Link` component from `@tanstack/react-router`. + +```tsx +import { Link } from '@tanstack/react-router'; +``` + +Then anywhere in your JSX you can use it like so: + +```tsx +About +``` + +This will create a link that will navigate to the `/about` route. + +More information on the `Link` component can be found in the [Link documentation](https://tanstack.com/router/v1/docs/framework/react/api/router/linkComponent). + +### Using A Layout + +In the File Based Routing setup the layout is located in `src/routes/__root.tsx`. Anything you add to the root route will appear in all the routes. The route content will appear in the JSX where you use the `` component. + +Here is an example layout that includes a header: + +```tsx +import { Outlet, createRootRoute } from '@tanstack/react-router'; +import { TanStackRouterDevtools } from '@tanstack/react-router-devtools'; + +import { Link } from '@tanstack/react-router'; + +export const Route = createRootRoute({ + component: () => ( + <> +
+ +
+ + + + ), +}); +``` + +The `` component is not required so you can remove it if you don't want it in your layout. + +More information on layouts can be found in the [Layouts documentation](https://tanstack.com/router/latest/docs/framework/react/guide/routing-concepts#layouts). + +## Data Fetching + +There are multiple ways to fetch data in your application. You can use TanStack Query to fetch data from a server. But you can also use the `loader` functionality built into TanStack Router to load the data for a route before it's rendered. + +For example: + +```tsx +const peopleRoute = createRoute({ + getParentRoute: () => rootRoute, + path: '/people', + loader: async () => { + const response = await fetch('https://swapi.dev/api/people'); + return response.json() as Promise<{ + results: { + name: string; + }[]; + }>; + }, + component: () => { + const data = peopleRoute.useLoaderData(); + return ( +
    + {data.results.map((person) => ( +
  • {person.name}
  • + ))} +
+ ); + }, +}); +``` + +Loaders simplify your data fetching logic dramatically. Check out more information in the [Loader documentation](https://tanstack.com/router/latest/docs/framework/react/guide/data-loading#loader-parameters). + +### React-Query + +React-Query is an excellent addition or alternative to route loading and integrating it into you application is a breeze. + +First add your dependencies: + +```bash +npm install @tanstack/react-query @tanstack/react-query-devtools +``` + +Next we'll need to create a query client and provider. We recommend putting those in `main.tsx`. + +```tsx +import { QueryClient, QueryClientProvider } from '@tanstack/react-query'; + +// ... + +const queryClient = new QueryClient(); + +// ... + +if (!rootElement.innerHTML) { + const root = ReactDOM.createRoot(rootElement); + + root.render( + + + , + ); +} +``` + +You can also add TanStack Query Devtools to the root route (optional). + +```tsx +import { ReactQueryDevtools } from '@tanstack/react-query-devtools'; + +const rootRoute = createRootRoute({ + component: () => ( + <> + + + + + ), +}); +``` + +Now you can use `useQuery` to fetch your data. + +```tsx +import { useQuery } from '@tanstack/react-query'; + +import './App.css'; + +function App() { + const { data } = useQuery({ + queryKey: ['people'], + queryFn: () => + fetch('https://swapi.dev/api/people') + .then((res) => res.json()) + .then((data) => data.results as { name: string }[]), + initialData: [], + }); + + return ( +
+
    + {data.map((person) => ( +
  • {person.name}
  • + ))} +
+
+ ); +} + +export default App; +``` + +You can find out everything you need to know on how to use React-Query in the [React-Query documentation](https://tanstack.com/query/latest/docs/framework/react/overview). + +## State Management + +Another common requirement for React applications is state management. There are many options for state management in React. TanStack Store provides a great starting point for your project. + +First you need to add TanStack Store as a dependency: + +```bash +npm install @tanstack/store +``` + +Now let's create a simple counter in the `src/App.tsx` file as a demonstration. + +```tsx +import { useStore } from '@tanstack/react-store'; +import { Store } from '@tanstack/store'; +import './App.css'; + +const countStore = new Store(0); + +function App() { + const count = useStore(countStore); + return ( +
+ +
+ ); +} + +export default App; +``` + +One of the many nice features of TanStack Store is the ability to derive state from other state. That derived state will update when the base state updates. + +Let's check this out by doubling the count using derived state. + +```tsx +import { useStore } from '@tanstack/react-store'; +import { Store, Derived } from '@tanstack/store'; +import './App.css'; + +const countStore = new Store(0); + +const doubledStore = new Derived({ + fn: () => countStore.state * 2, + deps: [countStore], +}); +doubledStore.mount(); + +function App() { + const count = useStore(countStore); + const doubledCount = useStore(doubledStore); + + return ( +
+ +
Doubled - {doubledCount}
+
+ ); +} + +export default App; +``` + +We use the `Derived` class to create a new store that is derived from another store. The `Derived` class has a `mount` method that will start the derived store updating. + +Once we've created the derived store we can use it in the `App` component just like we would any other store using the `useStore` hook. + +You can find out everything you need to know on how to use TanStack Store in the [TanStack Store documentation](https://tanstack.com/store/latest). + +# Demo files + +Files prefixed with `demo` can be safely deleted. They are there to provide a starting point for you to play around with the features you've installed. + +# Learn More + +You can learn more about all of the offerings from TanStack in the [TanStack documentation](https://tanstack.com). diff --git a/packages/web/scripts/db-connect.ts b/packages/web/scripts/db-connect.ts new file mode 100644 index 00000000..b974a883 --- /dev/null +++ b/packages/web/scripts/db-connect.ts @@ -0,0 +1,70 @@ +/** + * Test PlanetScale Postgres connection + * + * Run: pnpm tsx scripts/db-connect.ts + */ + +import "dotenv/config" +import postgres from "postgres" + +const CONNECTION_STRING = process.env.DATABASE_URL + +if (!CONNECTION_STRING) { + console.error("❌ DATABASE_URL is required in .env") + process.exit(1) +} + +const sql = postgres(CONNECTION_STRING, { + ssl: "require", + max: 1, + idle_timeout: 20, + connect_timeout: 10, +}) + +async function testConnection() { + console.log("🔌 Connecting to PlanetScale Postgres...") + + try { + // Test basic connection + const [result] = await sql`SELECT NOW() as time, current_database() as db` + console.log("✅ Connected!") + console.log(` Database: ${result.db}`) + console.log(` Server time: ${result.time}`) + + // List all databases + const databases = await sql` + SELECT datname FROM pg_database WHERE datistemplate = false ORDER BY datname + ` + console.log(`\n📁 Databases:`) + for (const d of databases) { + console.log(` - ${d.datname}`) + } + + // List tables in current db + const tables = await sql` + SELECT table_name + FROM information_schema.tables + WHERE table_schema = 'public' + ORDER BY table_name + ` + + if (tables.length > 0) { + console.log(`\n📋 Tables (${tables.length}):`) + for (const t of tables) { + console.log(` - ${t.table_name}`) + } + } else { + console.log("\n📋 No tables found in public schema") + } + + // Show version + const [version] = await sql`SELECT version()` + console.log(`\n🐘 ${version.version}`) + } catch (err) { + console.error("❌ Connection failed:", err) + } finally { + await sql.end() + } +} + +testConnection() diff --git a/packages/web/scripts/db-query.ts b/packages/web/scripts/db-query.ts new file mode 100644 index 00000000..f2b06b95 --- /dev/null +++ b/packages/web/scripts/db-query.ts @@ -0,0 +1,250 @@ +/** + * Production Database Query Tool + * Allows CRUD operations on the production database + * + * Usage: + * DATABASE_URL="..." pnpm tsx scripts/db-query.ts + * + * Commands (interactive): + * tables - List all tables + * users - List all users + * threads - List chat threads + * sql - Run raw SQL + * insert-user - Create a user + * delete-user - Delete a user + * help - Show commands + * exit - Exit + */ + +import "dotenv/config" +import postgres from "postgres" +import * as readline from "readline" + +const CONNECTION_STRING = process.env.DATABASE_URL + +if (!CONNECTION_STRING) { + console.error("❌ DATABASE_URL is required") + process.exit(1) +} + +const sql = postgres(CONNECTION_STRING, { + ssl: "require", + max: 1, +}) + +const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, +}) + +function prompt(question: string): Promise { + return new Promise((resolve) => { + rl.question(question, resolve) + }) +} + +async function listTables() { + const tables = await sql` + SELECT table_name + FROM information_schema.tables + WHERE table_schema = 'public' + ORDER BY table_name + ` + console.log("\n📋 Tables:") + if (tables.length === 0) { + console.log(" (no tables found)") + } else { + for (const t of tables) { + const count = await sql` + SELECT COUNT(*) as count FROM ${sql(t.table_name)} + ` + console.log(` - ${t.table_name} (${count[0].count} rows)`) + } + } + console.log() +} + +async function listUsers() { + try { + const users = await sql`SELECT id, name, email, "createdAt" FROM users ORDER BY "createdAt" DESC LIMIT 20` + console.log("\n👥 Users:") + if (users.length === 0) { + console.log(" (no users)") + } else { + for (const u of users) { + console.log(` - ${u.id}: ${u.name} <${u.email}> (${u.createdAt})`) + } + } + console.log() + } catch (e) { + console.log(" ❌ users table not found or error:", (e as Error).message) + } +} + +async function listThreads() { + try { + const threads = await sql` + SELECT id, title, user_id, created_at + FROM chat_threads + ORDER BY created_at DESC + LIMIT 20 + ` + console.log("\n💬 Chat Threads:") + if (threads.length === 0) { + console.log(" (no threads)") + } else { + for (const t of threads) { + console.log(` - #${t.id}: "${t.title}" (user: ${t.user_id})`) + } + } + console.log() + } catch (e) { + console.log(" ❌ chat_threads table not found or error:", (e as Error).message) + } +} + +async function runSQL(query: string) { + try { + const result = await sql.unsafe(query) + console.log("\n✅ Result:") + console.log(result) + console.log() + } catch (e) { + console.log("❌ Error:", (e as Error).message) + } +} + +async function insertUser(email: string, name: string) { + try { + const id = `user_${Date.now()}` + await sql` + INSERT INTO users (id, name, email, "emailVerified", "createdAt", "updatedAt") + VALUES (${id}, ${name}, ${email}, false, NOW(), NOW()) + ` + console.log(`✅ Created user: ${id}`) + } catch (e) { + console.log("❌ Error:", (e as Error).message) + } +} + +async function deleteUser(id: string) { + try { + const result = await sql`DELETE FROM users WHERE id = ${id}` + console.log(`✅ Deleted ${result.count} user(s)`) + } catch (e) { + console.log("❌ Error:", (e as Error).message) + } +} + +function showHelp() { + console.log(` +📖 Commands: + tables - List all tables with row counts + users - List users (max 20) + threads - List chat threads (max 20) + sql - Run raw SQL query + insert-user - Create a new user + delete-user - Delete a user by ID + drop-all - Drop all tables (dangerous!) + help - Show this help + exit - Exit the tool +`) +} + +async function dropAll() { + const confirm = await prompt("⚠️ This will DROP ALL TABLES. Type 'DROP' to confirm: ") + if (confirm !== "DROP") { + console.log("Aborted.") + return + } + + const tables = [ + "thread_context_items", + "context_items", + "canvas_images", + "canvas", + "chat_messages", + "chat_threads", + "verifications", + "accounts", + "sessions", + "users", + ] + + for (const table of tables) { + try { + await sql`DROP TABLE IF EXISTS ${sql(table)} CASCADE` + console.log(` ✓ Dropped ${table}`) + } catch (e) { + console.log(` ✗ ${table}: ${(e as Error).message}`) + } + } + console.log("\n✓ All tables dropped") +} + +async function main() { + console.log("🔌 Connected to production database") + console.log('Type "help" for commands, "exit" to quit.\n') + + // Check initial connection + try { + const [result] = await sql`SELECT current_database() as db` + console.log(`Database: ${result.db}\n`) + } catch (e) { + console.error("❌ Connection failed:", (e as Error).message) + process.exit(1) + } + + while (true) { + const input = await prompt("db> ") + const [cmd, ...args] = input.trim().split(/\s+/) + + switch (cmd.toLowerCase()) { + case "tables": + await listTables() + break + case "users": + await listUsers() + break + case "threads": + await listThreads() + break + case "sql": + await runSQL(args.join(" ")) + break + case "insert-user": + if (args.length < 2) { + console.log("Usage: insert-user ") + } else { + await insertUser(args[0], args.slice(1).join(" ")) + } + break + case "delete-user": + if (args.length < 1) { + console.log("Usage: delete-user ") + } else { + await deleteUser(args[0]) + } + break + case "drop-all": + await dropAll() + break + case "help": + showHelp() + break + case "exit": + case "quit": + case "q": + console.log("Bye!") + await sql.end() + rl.close() + process.exit(0) + case "": + break + default: + console.log(`Unknown command: ${cmd}. Type "help" for commands.`) + } + } +} + +main().catch(console.error) diff --git a/packages/web/scripts/migrate-safe.ts b/packages/web/scripts/migrate-safe.ts new file mode 100644 index 00000000..338eaae7 --- /dev/null +++ b/packages/web/scripts/migrate-safe.ts @@ -0,0 +1,149 @@ +/** + * Safe production migration script + * Usage: DATABASE_URL="..." pnpm tsx scripts/migrate-safe.ts [option] + * Options: check | auth | drizzle | both + */ +import postgres from "postgres" + +const DATABASE_URL = process.env.DATABASE_URL +if (!DATABASE_URL) { + console.error("❌ DATABASE_URL is required") + process.exit(1) +} + +const sql = postgres(DATABASE_URL) + +async function checkConnection() { + try { + await sql`SELECT 1` + console.log("✓ Connected to database") + return true + } catch (e) { + console.error("✗ Connection failed:", (e as Error).message) + return false + } +} + +async function listTables() { + const tables = await sql` + SELECT table_name + FROM information_schema.tables + WHERE table_schema = 'public' + ORDER BY table_name + ` + if (tables.length === 0) { + console.log(" (no tables)") + } else { + tables.forEach((t) => console.log(" -", t.table_name)) + } +} + +async function checkAuthTables() { + const cols = await sql` + SELECT column_name + FROM information_schema.columns + WHERE table_name = 'verifications' + ORDER BY ordinal_position + ` + if (cols.length === 0) { + console.log(" verifications: NOT EXISTS (will be created)") + return false + } + const colNames = cols.map((c) => c.column_name) + if (colNames.includes("expiresAt")) { + console.log(" verifications: ✓ Correct (camelCase)") + return true + } else { + console.log(" verifications: ⚠ Wrong columns:", colNames.join(", ")) + return false + } +} + +async function createAuthTables() { + console.log("Dropping existing auth tables...") + await sql`DROP TABLE IF EXISTS verifications CASCADE` + await sql`DROP TABLE IF EXISTS accounts CASCADE` + await sql`DROP TABLE IF EXISTS sessions CASCADE` + await sql`DROP TABLE IF EXISTS users CASCADE` + + console.log("Creating auth tables with camelCase columns...") + await sql.unsafe(` + CREATE TABLE users ( + id text PRIMARY KEY, + name text NOT NULL, + email text NOT NULL UNIQUE, + "emailVerified" boolean NOT NULL DEFAULT false, + image text, + "createdAt" timestamp NOT NULL DEFAULT now(), + "updatedAt" timestamp NOT NULL DEFAULT now() + ); + CREATE TABLE sessions ( + id text PRIMARY KEY, + "expiresAt" timestamp NOT NULL, + token text NOT NULL UNIQUE, + "createdAt" timestamp NOT NULL, + "updatedAt" timestamp NOT NULL, + "ipAddress" text, + "userAgent" text, + "userId" text NOT NULL REFERENCES users(id) ON DELETE CASCADE + ); + CREATE TABLE accounts ( + id text PRIMARY KEY, + "accountId" text NOT NULL, + "providerId" text NOT NULL, + "userId" text NOT NULL REFERENCES users(id) ON DELETE CASCADE, + "accessToken" text, + "refreshToken" text, + "idToken" text, + "accessTokenExpiresAt" timestamp, + "refreshTokenExpiresAt" timestamp, + scope text, + password text, + "createdAt" timestamp NOT NULL, + "updatedAt" timestamp NOT NULL + ); + CREATE TABLE verifications ( + id text PRIMARY KEY, + identifier text NOT NULL, + value text NOT NULL, + "expiresAt" timestamp NOT NULL, + "createdAt" timestamp NOT NULL DEFAULT now(), + "updatedAt" timestamp NOT NULL DEFAULT now() + ); + `) + console.log("✓ Auth tables created") +} + +async function main() { + const option = process.argv[2] || "check" + + if (!(await checkConnection())) { + await sql.end() + process.exit(1) + } + + if (option === "check") { + console.log("\nCurrent tables:") + await listTables() + console.log("\nAuth tables status:") + await checkAuthTables() + } else if (option === "auth") { + await createAuthTables() + } else if (option === "drizzle") { + console.log("Run: DATABASE_URL=\"...\" pnpm drizzle-kit push --force") + } else if (option === "both") { + await createAuthTables() + console.log("\nNow run: DATABASE_URL=\"...\" pnpm drizzle-kit push --force") + } else { + console.log("Unknown option:", option) + console.log("Options: check | auth | drizzle | both") + } + + await sql.end() +} + +main().catch((e) => { + console.error(e) + sql.end() + process.exit(1) +}) diff --git a/packages/web/scripts/push-schema.ts b/packages/web/scripts/push-schema.ts new file mode 100644 index 00000000..b5f18247 --- /dev/null +++ b/packages/web/scripts/push-schema.ts @@ -0,0 +1,222 @@ +/** + * Push schema directly to PlanetScale Postgres + * Bypasses drizzle-kit permission issues + * + * Run: DATABASE_URL="..." pnpm tsx scripts/push-schema.ts + * + * NOTE: PlanetScale API tokens may not have CREATE permissions. + * If you get "permission denied for schema public", you need to: + * 1. Go to PlanetScale dashboard + * 2. Create a new password with "Admin" role + * 3. Use that connection string instead + * OR run the SQL manually in PlanetScale's web console + */ + +import "dotenv/config" +import postgres from "postgres" + +const databaseUrl = process.env.DATABASE_URL + +if (!databaseUrl) { + throw new Error("DATABASE_URL is required") +} + +// Allow disabling SSL for local/dev databases while keeping require for prod. +const parsed = new URL(databaseUrl) +const hostname = parsed.hostname +const explicitSsl = process.env.DATABASE_SSL?.toLowerCase() +const isLocalHost = + hostname === "localhost" || + hostname === "127.0.0.1" || + hostname.endsWith(".local") || + hostname.endsWith(".localtest.me") + +const ssl = + explicitSsl === "disable" + ? false + : explicitSsl === "require" + ? "require" + : isLocalHost + ? false + : "require" + +const sql = postgres(databaseUrl, { + ssl, + max: 1, +}) + +async function pushSchema() { + console.log("🚀 Pushing schema to PlanetScale Postgres...") + + // Check if we have CREATE permissions + const [user] = await sql`SELECT current_user` + console.log(` Connected as: ${user.current_user}`) + + try { + // Better-auth tables (camelCase columns) + await sql` + CREATE TABLE IF NOT EXISTS "users" ( + "id" text PRIMARY KEY, + "name" text NOT NULL, + "email" text NOT NULL UNIQUE, + "emailVerified" boolean NOT NULL DEFAULT false, + "image" text, + "createdAt" timestamptz NOT NULL DEFAULT now(), + "updatedAt" timestamptz NOT NULL DEFAULT now() + ) + ` + console.log("✅ Created users table") + + await sql` + CREATE TABLE IF NOT EXISTS "sessions" ( + "id" text PRIMARY KEY, + "expiresAt" timestamptz NOT NULL, + "token" text NOT NULL UNIQUE, + "createdAt" timestamptz NOT NULL, + "updatedAt" timestamptz NOT NULL, + "ipAddress" text, + "userAgent" text, + "userId" text NOT NULL REFERENCES "users"("id") ON DELETE cascade + ) + ` + console.log("✅ Created sessions table") + + await sql` + CREATE TABLE IF NOT EXISTS "accounts" ( + "id" text PRIMARY KEY, + "accountId" text NOT NULL, + "providerId" text NOT NULL, + "userId" text NOT NULL REFERENCES "users"("id") ON DELETE cascade, + "accessToken" text, + "refreshToken" text, + "idToken" text, + "accessTokenExpiresAt" timestamptz, + "refreshTokenExpiresAt" timestamptz, + "scope" text, + "password" text, + "createdAt" timestamptz NOT NULL, + "updatedAt" timestamptz NOT NULL + ) + ` + console.log("✅ Created accounts table") + + await sql` + CREATE TABLE IF NOT EXISTS "verifications" ( + "id" text PRIMARY KEY, + "identifier" text NOT NULL, + "value" text NOT NULL, + "expiresAt" timestamptz NOT NULL, + "createdAt" timestamptz DEFAULT now(), + "updatedAt" timestamptz DEFAULT now() + ) + ` + console.log("✅ Created verifications table") + + // App tables (snake_case for Electric sync) + await sql` + CREATE TABLE IF NOT EXISTS "chat_threads" ( + "id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + "title" text NOT NULL, + "user_id" text NOT NULL, + "created_at" timestamptz NOT NULL DEFAULT now() + ) + ` + console.log("✅ Created chat_threads table") + + await sql` + CREATE TABLE IF NOT EXISTS "chat_messages" ( + "id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + "thread_id" integer NOT NULL REFERENCES "chat_threads"("id") ON DELETE cascade, + "role" varchar(32) NOT NULL, + "content" text NOT NULL, + "created_at" timestamptz NOT NULL DEFAULT now() + ) + ` + console.log("✅ Created chat_messages table") + + await sql` + CREATE TABLE IF NOT EXISTS "canvas" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid(), + "owner_id" text NOT NULL REFERENCES "users"("id") ON DELETE cascade, + "name" text NOT NULL DEFAULT 'Untitled Canvas', + "width" integer NOT NULL DEFAULT 1024, + "height" integer NOT NULL DEFAULT 1024, + "default_model" text NOT NULL DEFAULT 'gemini-2.0-flash-exp-image-generation', + "default_style" text NOT NULL DEFAULT 'default', + "background_prompt" text, + "created_at" timestamptz NOT NULL DEFAULT now(), + "updated_at" timestamptz NOT NULL DEFAULT now() + ) + ` + console.log("✅ Created canvas table") + + await sql` + CREATE TABLE IF NOT EXISTS "canvas_images" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid(), + "canvas_id" uuid NOT NULL REFERENCES "canvas"("id") ON DELETE cascade, + "name" text NOT NULL DEFAULT 'Untitled Image', + "prompt" text NOT NULL DEFAULT '', + "model_id" text NOT NULL DEFAULT 'gemini-2.0-flash-exp-image-generation', + "model_used" text, + "style_id" text NOT NULL DEFAULT 'default', + "width" integer NOT NULL DEFAULT 512, + "height" integer NOT NULL DEFAULT 512, + "position" jsonb NOT NULL DEFAULT '{"x": 0, "y": 0}', + "rotation" double precision NOT NULL DEFAULT 0, + "content_base64" text, + "image_url" text, + "metadata" jsonb, + "branch_parent_id" uuid, + "created_at" timestamptz NOT NULL DEFAULT now(), + "updated_at" timestamptz NOT NULL DEFAULT now() + ) + ` + console.log("✅ Created canvas_images table") + + await sql` + CREATE TABLE IF NOT EXISTS "context_items" ( + "id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + "user_id" text NOT NULL REFERENCES "users"("id") ON DELETE cascade, + "type" varchar(32) NOT NULL, + "url" text, + "name" text NOT NULL, + "content" text, + "refreshing" boolean NOT NULL DEFAULT false, + "parent_id" integer, + "created_at" timestamptz NOT NULL DEFAULT now(), + "updated_at" timestamptz NOT NULL DEFAULT now() + ) + ` + console.log("✅ Created context_items table") + + await sql` + CREATE TABLE IF NOT EXISTS "thread_context_items" ( + "id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + "thread_id" integer NOT NULL REFERENCES "chat_threads"("id") ON DELETE cascade, + "context_item_id" integer NOT NULL REFERENCES "context_items"("id") ON DELETE cascade, + "created_at" timestamptz NOT NULL DEFAULT now() + ) + ` + console.log("✅ Created thread_context_items table") + + console.log("\n🎉 All tables created successfully!") + + // List tables + const tables = await sql` + SELECT table_name + FROM information_schema.tables + WHERE table_schema = 'public' + ORDER BY table_name + ` + console.log("\n📋 Tables in database:") + for (const t of tables) { + console.log(` - ${t.table_name}`) + } + } catch (err) { + console.error("❌ Error:", err) + } finally { + await sql.end() + } +} + +pushSchema() diff --git a/packages/web/scripts/schema.sql b/packages/web/scripts/schema.sql new file mode 100644 index 00000000..85f70e2c --- /dev/null +++ b/packages/web/scripts/schema.sql @@ -0,0 +1,118 @@ +-- PlanetScale Postgres Schema +-- Run this in PlanetScale's web console if API token doesn't have CREATE permissions + +-- Better-auth tables (camelCase columns) +CREATE TABLE IF NOT EXISTS "users" ( + "id" text PRIMARY KEY, + "name" text NOT NULL, + "email" text NOT NULL UNIQUE, + "emailVerified" boolean NOT NULL DEFAULT false, + "image" text, + "createdAt" timestamptz NOT NULL DEFAULT now(), + "updatedAt" timestamptz NOT NULL DEFAULT now() +); + +CREATE TABLE IF NOT EXISTS "sessions" ( + "id" text PRIMARY KEY, + "expiresAt" timestamptz NOT NULL, + "token" text NOT NULL UNIQUE, + "createdAt" timestamptz NOT NULL, + "updatedAt" timestamptz NOT NULL, + "ipAddress" text, + "userAgent" text, + "userId" text NOT NULL REFERENCES "users"("id") ON DELETE cascade +); + +CREATE TABLE IF NOT EXISTS "accounts" ( + "id" text PRIMARY KEY, + "accountId" text NOT NULL, + "providerId" text NOT NULL, + "userId" text NOT NULL REFERENCES "users"("id") ON DELETE cascade, + "accessToken" text, + "refreshToken" text, + "idToken" text, + "accessTokenExpiresAt" timestamptz, + "refreshTokenExpiresAt" timestamptz, + "scope" text, + "password" text, + "createdAt" timestamptz NOT NULL, + "updatedAt" timestamptz NOT NULL +); + +CREATE TABLE IF NOT EXISTS "verifications" ( + "id" text PRIMARY KEY, + "identifier" text NOT NULL, + "value" text NOT NULL, + "expiresAt" timestamptz NOT NULL, + "createdAt" timestamptz DEFAULT now(), + "updatedAt" timestamptz DEFAULT now() +); + +-- App tables (snake_case for Electric sync) +CREATE TABLE IF NOT EXISTS "chat_threads" ( + "id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + "title" text NOT NULL, + "user_id" text NOT NULL, + "created_at" timestamptz NOT NULL DEFAULT now() +); + +CREATE TABLE IF NOT EXISTS "chat_messages" ( + "id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + "thread_id" integer NOT NULL REFERENCES "chat_threads"("id") ON DELETE cascade, + "role" varchar(32) NOT NULL, + "content" text NOT NULL, + "created_at" timestamptz NOT NULL DEFAULT now() +); + +CREATE TABLE IF NOT EXISTS "canvas" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid(), + "owner_id" text NOT NULL REFERENCES "users"("id") ON DELETE cascade, + "name" text NOT NULL DEFAULT 'Untitled Canvas', + "width" integer NOT NULL DEFAULT 1024, + "height" integer NOT NULL DEFAULT 1024, + "default_model" text NOT NULL DEFAULT 'gemini-2.0-flash-exp-image-generation', + "default_style" text NOT NULL DEFAULT 'default', + "background_prompt" text, + "created_at" timestamptz NOT NULL DEFAULT now(), + "updated_at" timestamptz NOT NULL DEFAULT now() +); + +CREATE TABLE IF NOT EXISTS "canvas_images" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid(), + "canvas_id" uuid NOT NULL REFERENCES "canvas"("id") ON DELETE cascade, + "name" text NOT NULL DEFAULT 'Untitled Image', + "prompt" text NOT NULL DEFAULT '', + "model_id" text NOT NULL DEFAULT 'gemini-2.0-flash-exp-image-generation', + "model_used" text, + "style_id" text NOT NULL DEFAULT 'default', + "width" integer NOT NULL DEFAULT 512, + "height" integer NOT NULL DEFAULT 512, + "position" jsonb NOT NULL DEFAULT '{"x": 0, "y": 0}', + "rotation" double precision NOT NULL DEFAULT 0, + "content_base64" text, + "image_url" text, + "metadata" jsonb, + "branch_parent_id" uuid, + "created_at" timestamptz NOT NULL DEFAULT now(), + "updated_at" timestamptz NOT NULL DEFAULT now() +); + +CREATE TABLE IF NOT EXISTS "context_items" ( + "id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + "user_id" text NOT NULL REFERENCES "users"("id") ON DELETE cascade, + "type" varchar(32) NOT NULL, + "url" text, + "name" text NOT NULL, + "content" text, + "refreshing" boolean NOT NULL DEFAULT false, + "parent_id" integer, + "created_at" timestamptz NOT NULL DEFAULT now(), + "updated_at" timestamptz NOT NULL DEFAULT now() +); + +CREATE TABLE IF NOT EXISTS "thread_context_items" ( + "id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + "thread_id" integer NOT NULL REFERENCES "chat_threads"("id") ON DELETE cascade, + "context_item_id" integer NOT NULL REFERENCES "context_items"("id") ON DELETE cascade, + "created_at" timestamptz NOT NULL DEFAULT now() +); diff --git a/packages/web/scripts/seed.ts b/packages/web/scripts/seed.ts new file mode 100644 index 00000000..6c236f23 --- /dev/null +++ b/packages/web/scripts/seed.ts @@ -0,0 +1,350 @@ +import "dotenv/config" +import crypto from "node:crypto" +import { sql, eq } from "drizzle-orm" +import { getDb, getAuthDb } from "../src/db/connection" +import { + accounts, + chat_messages, + chat_threads, + sessions, + users, + verifications, +} from "../src/db/schema" + +const databaseUrl = process.env.DATABASE_URL + +if (!databaseUrl) { + throw new Error("DATABASE_URL is required in packages/web/.env") +} + +const appDb = getDb(databaseUrl) +const authDb = getAuthDb(databaseUrl) + +async function ensureTables() { + await authDb.execute(sql` + CREATE TABLE IF NOT EXISTS "users" ( + "id" text PRIMARY KEY, + "name" text NOT NULL, + "email" text NOT NULL UNIQUE, + "emailVerified" boolean NOT NULL DEFAULT false, + "image" text, + "createdAt" timestamptz NOT NULL DEFAULT now(), + "updatedAt" timestamptz NOT NULL DEFAULT now() + ); + `) + + await authDb.execute(sql` + CREATE TABLE IF NOT EXISTS "sessions" ( + "id" text PRIMARY KEY, + "expiresAt" timestamptz NOT NULL, + "token" text NOT NULL UNIQUE, + "createdAt" timestamptz NOT NULL, + "updatedAt" timestamptz NOT NULL, + "ipAddress" text, + "userAgent" text, + "userId" text NOT NULL REFERENCES "users"("id") ON DELETE cascade + ); + `) + + await authDb.execute(sql` + CREATE TABLE IF NOT EXISTS "accounts" ( + "id" text PRIMARY KEY, + "accountId" text NOT NULL, + "providerId" text NOT NULL, + "userId" text NOT NULL REFERENCES "users"("id") ON DELETE cascade, + "accessToken" text, + "refreshToken" text, + "idToken" text, + "accessTokenExpiresAt" timestamptz, + "refreshTokenExpiresAt" timestamptz, + "scope" text, + "password" text, + "createdAt" timestamptz NOT NULL, + "updatedAt" timestamptz NOT NULL + ); + `) + + await authDb.execute(sql` + CREATE TABLE IF NOT EXISTS "verifications" ( + "id" text PRIMARY KEY, + "identifier" text NOT NULL, + "value" text NOT NULL, + "expiresAt" timestamptz NOT NULL, + "createdAt" timestamptz DEFAULT now(), + "updatedAt" timestamptz DEFAULT now() + ); + `) + + // Backfill camelCase columns when an older snake_case seed created the tables. + // Add missing legacy snake_case columns first so COALESCE references are safe. + await authDb.execute(sql` + ALTER TABLE "users" + ADD COLUMN IF NOT EXISTS "email_verified" boolean, + ADD COLUMN IF NOT EXISTS "created_at" timestamptz, + ADD COLUMN IF NOT EXISTS "updated_at" timestamptz + `) + await authDb.execute(sql` + ALTER TABLE "sessions" + ADD COLUMN IF NOT EXISTS "expires_at" timestamptz, + ADD COLUMN IF NOT EXISTS "created_at" timestamptz, + ADD COLUMN IF NOT EXISTS "updated_at" timestamptz, + ADD COLUMN IF NOT EXISTS "ip_address" text, + ADD COLUMN IF NOT EXISTS "user_agent" text, + ADD COLUMN IF NOT EXISTS "user_id" text + `) + await authDb.execute(sql` + ALTER TABLE "accounts" + ADD COLUMN IF NOT EXISTS "account_id" text, + ADD COLUMN IF NOT EXISTS "provider_id" text, + ADD COLUMN IF NOT EXISTS "user_id" text, + ADD COLUMN IF NOT EXISTS "access_token" text, + ADD COLUMN IF NOT EXISTS "refresh_token" text, + ADD COLUMN IF NOT EXISTS "id_token" text, + ADD COLUMN IF NOT EXISTS "access_token_expires_at" timestamptz, + ADD COLUMN IF NOT EXISTS "refresh_token_expires_at" timestamptz, + ADD COLUMN IF NOT EXISTS "created_at" timestamptz, + ADD COLUMN IF NOT EXISTS "updated_at" timestamptz + `) + await authDb.execute(sql` + ALTER TABLE "verifications" + ADD COLUMN IF NOT EXISTS "expires_at" timestamptz, + ADD COLUMN IF NOT EXISTS "created_at" timestamptz, + ADD COLUMN IF NOT EXISTS "updated_at" timestamptz + `) + + await authDb.execute(sql` + ALTER TABLE "users" + ADD COLUMN IF NOT EXISTS "emailVerified" boolean DEFAULT false, + ADD COLUMN IF NOT EXISTS "createdAt" timestamptz DEFAULT now(), + ADD COLUMN IF NOT EXISTS "updatedAt" timestamptz DEFAULT now() + `) + await authDb.execute( + sql`UPDATE "users" SET "emailVerified" = COALESCE("emailVerified", "email_verified")`, + ) + await authDb.execute( + sql`UPDATE "users" SET "createdAt" = COALESCE("createdAt", "created_at")`, + ) + await authDb.execute( + sql`UPDATE "users" SET "updatedAt" = COALESCE("updatedAt", "updated_at")`, + ) + + await authDb.execute(sql` + ALTER TABLE "sessions" + ADD COLUMN IF NOT EXISTS "expiresAt" timestamptz, + ADD COLUMN IF NOT EXISTS "token" text, + ADD COLUMN IF NOT EXISTS "createdAt" timestamptz, + ADD COLUMN IF NOT EXISTS "updatedAt" timestamptz, + ADD COLUMN IF NOT EXISTS "ipAddress" text, + ADD COLUMN IF NOT EXISTS "userAgent" text, + ADD COLUMN IF NOT EXISTS "userId" text + `) + await authDb.execute( + sql`UPDATE "sessions" SET "expiresAt" = COALESCE("expiresAt", "expires_at")`, + ) + await authDb.execute( + sql`UPDATE "sessions" SET "createdAt" = COALESCE("createdAt", "created_at")`, + ) + await authDb.execute( + sql`UPDATE "sessions" SET "updatedAt" = COALESCE("updatedAt", "updated_at")`, + ) + await authDb.execute( + sql`UPDATE "sessions" SET "ipAddress" = COALESCE("ipAddress", "ip_address")`, + ) + await authDb.execute( + sql`UPDATE "sessions" SET "userAgent" = COALESCE("userAgent", "user_agent")`, + ) + await authDb.execute( + sql`UPDATE "sessions" SET "userId" = COALESCE("userId", "user_id")`, + ) + + await authDb.execute(sql` + ALTER TABLE "accounts" + ADD COLUMN IF NOT EXISTS "accountId" text, + ADD COLUMN IF NOT EXISTS "providerId" text, + ADD COLUMN IF NOT EXISTS "userId" text, + ADD COLUMN IF NOT EXISTS "accessToken" text, + ADD COLUMN IF NOT EXISTS "refreshToken" text, + ADD COLUMN IF NOT EXISTS "idToken" text, + ADD COLUMN IF NOT EXISTS "accessTokenExpiresAt" timestamptz, + ADD COLUMN IF NOT EXISTS "refreshTokenExpiresAt" timestamptz, + ADD COLUMN IF NOT EXISTS "createdAt" timestamptz, + ADD COLUMN IF NOT EXISTS "updatedAt" timestamptz + `) + await authDb.execute( + sql`UPDATE "accounts" SET "accountId" = COALESCE("accountId", "account_id")`, + ) + await authDb.execute( + sql`UPDATE "accounts" SET "providerId" = COALESCE("providerId", "provider_id")`, + ) + await authDb.execute( + sql`UPDATE "accounts" SET "userId" = COALESCE("userId", "user_id")`, + ) + await authDb.execute( + sql`UPDATE "accounts" SET "accessToken" = COALESCE("accessToken", "access_token")`, + ) + await authDb.execute( + sql`UPDATE "accounts" SET "refreshToken" = COALESCE("refreshToken", "refresh_token")`, + ) + await authDb.execute( + sql`UPDATE "accounts" SET "idToken" = COALESCE("idToken", "id_token")`, + ) + await authDb.execute( + sql`UPDATE "accounts" SET "accessTokenExpiresAt" = COALESCE("accessTokenExpiresAt", "access_token_expires_at")`, + ) + await authDb.execute( + sql`UPDATE "accounts" SET "refreshTokenExpiresAt" = COALESCE("refreshTokenExpiresAt", "refresh_token_expires_at")`, + ) + await authDb.execute( + sql`UPDATE "accounts" SET "createdAt" = COALESCE("createdAt", "created_at")`, + ) + await authDb.execute( + sql`UPDATE "accounts" SET "updatedAt" = COALESCE("updatedAt", "updated_at")`, + ) + + await authDb.execute(sql` + ALTER TABLE "verifications" + ADD COLUMN IF NOT EXISTS "expiresAt" timestamptz, + ADD COLUMN IF NOT EXISTS "createdAt" timestamptz, + ADD COLUMN IF NOT EXISTS "updatedAt" timestamptz + `) + await authDb.execute( + sql`UPDATE "verifications" SET "expiresAt" = COALESCE("expiresAt", "expires_at")`, + ) + await authDb.execute( + sql`UPDATE "verifications" SET "createdAt" = COALESCE("createdAt", "created_at")`, + ) + await authDb.execute( + sql`UPDATE "verifications" SET "updatedAt" = COALESCE("updatedAt", "updated_at")`, + ) + + await appDb.execute(sql` + CREATE TABLE IF NOT EXISTS "chat_threads" ( + "id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + "title" text NOT NULL, + "user_id" text NOT NULL, + "created_at" timestamptz NOT NULL DEFAULT now() + ); + `) + + await appDb.execute(sql` + CREATE TABLE IF NOT EXISTS "chat_messages" ( + "id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + "thread_id" integer NOT NULL REFERENCES "chat_threads"("id") ON DELETE cascade, + "role" varchar(32) NOT NULL, + "content" text NOT NULL, + "created_at" timestamptz NOT NULL DEFAULT now() + ); + `) + + await appDb.execute(sql` + CREATE TABLE IF NOT EXISTS "context_items" ( + "id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + "user_id" text NOT NULL REFERENCES "users"("id") ON DELETE cascade, + "type" varchar(32) NOT NULL, + "url" text, + "name" text NOT NULL, + "content" text, + "refreshing" boolean NOT NULL DEFAULT false, + "parent_id" integer, + "created_at" timestamptz NOT NULL DEFAULT now(), + "updated_at" timestamptz NOT NULL DEFAULT now() + ); + `) + + await appDb.execute(sql` + CREATE TABLE IF NOT EXISTS "thread_context_items" ( + "id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + "thread_id" integer NOT NULL REFERENCES "chat_threads"("id") ON DELETE cascade, + "context_item_id" integer NOT NULL REFERENCES "context_items"("id") ON DELETE cascade, + "created_at" timestamptz NOT NULL DEFAULT now() + ); + `) +} + +async function seed() { + await ensureTables() + + const demoUserId = "demo-user" + const demoEmail = "demo@ai.chat" + + await authDb + .insert(users) + .values({ + id: demoUserId, + name: "Demo User", + email: demoEmail, + emailVerified: true, + image: null, + createdAt: new Date(), + updatedAt: new Date(), + }) + .onConflictDoNothing({ target: users.id }) + + // Clear any orphaned auth rows for the demo user to keep data tidy + await authDb.delete(sessions).where(eq(sessions.userId, demoUserId)) + await authDb.delete(accounts).where(eq(accounts.userId, demoUserId)) + await authDb.delete(verifications).where(eq(verifications.identifier, demoEmail)) + + // Find or create a chat thread for the demo user + const [existingThread] = await appDb + .select() + .from(chat_threads) + .where(eq(chat_threads.user_id, demoUserId)) + .limit(1) + + const [thread] = + existingThread && existingThread.id + ? [existingThread] + : await appDb + .insert(chat_threads) + .values({ + title: "Getting started with AI chat", + user_id: demoUserId, + }) + .returning() + + const threadId = thread.id + + await appDb + .delete(chat_messages) + .where(eq(chat_messages.thread_id, threadId)) + + const starterMessages = [ + { + role: "user", + content: "How do I get reliable AI chat responses from this app?", + }, + { + role: "assistant", + content: + "Each thread keeps your message history. You can seed demos like this one, or stream responses from your AI provider. Try adding more messages to this thread.", + }, + { + role: "user", + content: "Can I hook this up to my own model API?", + }, + { + role: "assistant", + content: + "Yes. Point your server-side handler at your model endpoint and persist messages into the database. Electric can sync them live to the client.", + }, + ] + + await appDb.insert(chat_messages).values( + starterMessages.map((msg) => ({ + thread_id: threadId, + role: msg.role, + content: msg.content, + created_at: new Date(), + })), + ) +} + +seed() + .then(() => { + console.log("Seed complete: demo user and chat thread ready.") + }) + .catch((err) => { + console.error(err) + process.exit(1) + }) diff --git a/packages/web/src/components/BillingProvider.tsx b/packages/web/src/components/BillingProvider.tsx new file mode 100644 index 00000000..305f8a4f --- /dev/null +++ b/packages/web/src/components/BillingProvider.tsx @@ -0,0 +1,28 @@ +import { FlowgladProvider } from "@flowglad/react" +import { authClient } from "@/lib/auth-client" + +type BillingProviderProps = { + children: React.ReactNode +} + +export function BillingProvider({ children }: BillingProviderProps) { + const flowgladEnabled = import.meta.env.VITE_FLOWGLAD_ENABLED === "true" + + // Skip billing entirely when Flowglad isn't configured + if (!flowgladEnabled) { + return <>{children} + } + + const { data: session, isPending } = authClient.useSession() + + // Don't load billing until we know auth state + if (isPending) { + return <>{children} + } + + return ( + + {children} + + ) +} diff --git a/packages/web/src/components/BlockLayout.tsx b/packages/web/src/components/BlockLayout.tsx new file mode 100644 index 00000000..d6a77aab --- /dev/null +++ b/packages/web/src/components/BlockLayout.tsx @@ -0,0 +1,721 @@ +import { useMemo, type ReactNode } from "react" +import { Link } from "@tanstack/react-router" +import { useState } from "react" +import { + ArrowRight, + ChevronRight, + FileText, + Globe, + MessageCircle, + Zap, + Loader2, + Link2, + ChevronDown, + Search, + ShieldCheck, + Sparkles, + Plus, +} from "lucide-react" + +import ContextPanel from "./Context-panel" + +type BlockLayoutProps = { + activeTab: "blocks" | "marketplace" + toolbar?: ReactNode + subnav?: ReactNode + children: ReactNode +} + +type MarketplaceCard = { + title: string + author: string + price: string + tone: string + accent: string + badge?: string +} + +export default function BlockLayout({ + activeTab, + subnav, + children, +}: BlockLayoutProps) { + return ( +
+ +
+
+
+
+ + {activeTab === "blocks" ? : } +
+ + {subnav ?
{subnav}
: null} + +
{children}
+
+
+
+ ) +} + +function BlockNav({ activeTab }: { activeTab: "blocks" | "marketplace" }) { + const tabs = [ + { id: "blocks", label: "My Blocks", to: "/blocks" }, + { id: "marketplace", label: "Marketplace", to: "/marketplace" }, + ] as const + + return ( +
+ {tabs.map((tab) => { + const isActive = activeTab === tab.id + return ( + + + {tab.label} + + {isActive ? ( + + + + + ) : null} + + ) + })} +
+ ) +} + +function PublishButton() { + return ( + + ) +} + +export function MarketplaceSearch() { + return ( +
+ + +
+ ) +} + +export function MyBlocksView() { + const owned: any[] = useMemo( + () => [ + { name: "Stripe Integration", badge: "Action" }, + { name: "Notion", badge: "Action" }, + { name: "X API", badge: "Action" }, + ], + [], + ) + + const custom: any[] = useMemo( + () => [ + { name: "Gmail", badge: "Action" }, + { name: "Documentation Builder", badge: "Action" }, + { name: "Electron Docs", badge: "Action" }, + { name: "Open Image Editor Ideas", badge: "Action" }, + ], + [], + ) + + return ( + +
+
+ + + +
+ +
+
+ ) +} + +function BlockListGroup({ title, items }: { title: string; items: any[] }) { + return ( +
+
+ {title} + +
+
+ {items.map((item) => ( +
+
+
+ {item.name} +
+ {item.badge ? ( + + {item.badge} + + ) : null} +
+ ))} +
+
+ ) +} + +function CreateBlockPanel() { + const [blockType, setBlockType] = useState< + "text" | "web" | "thread" | "action" + >("web") + const [options, setOptions] = useState({ + update: true, + deepScan: true, + summarise: false, + sections: true, + updateInterval: "1 hour", + deepScanLevel: "5 levels", + }) + + const blockTypes = [ + { id: "text", label: "Text", icon: FileText }, + { id: "web", label: "Web", icon: Globe }, + { id: "thread", label: "Thread", icon: MessageCircle }, + { id: "action", label: "Action", icon: Zap }, + ] as const + + const scanning = [ + { + name: "nikiv.dev", + tokens: "2,284", + children: [ + { name: "/intro", tokens: "508" }, + { name: "/code", tokens: "508" }, + { name: "/focus", tokens: "508" }, + ], + }, + { + name: "Open Image Editor Ideas", + tokens: "5,582", + children: [ + { name: "/intro", tokens: "508" }, + { name: "/code", tokens: "508" }, + { name: "/focus", tokens: "508" }, + ], + }, + ] + + const initialSelection = useMemo(() => { + const map: Record = {} + scanning.forEach((item) => { + map[item.name] = true + item.children?.forEach((child) => { + map[`${item.name}/${child.name}`] = true + }) + }) + return map + }, [scanning]) + const [selectedPaths, setSelectedPaths] = useState>( + () => initialSelection, + ) + + const togglePath = (path: string) => + setSelectedPaths((prev) => ({ ...prev, [path]: !prev[path] })) + + const [expandedPaths, setExpandedPaths] = useState>( + () => { + const map: Record = {} + scanning.forEach((item) => { + if (item.children?.length) map[item.name] = true + }) + return map + }, + ) + + const toggleExpand = (path: string) => + setExpandedPaths((prev) => ({ ...prev, [path]: !prev[path] })) + + return ( +
+
+

Create block

+
+ +
+ {blockTypes.map((type) => { + const isActive = blockType === type.id + const Icon = type.icon + return ( + + ) + })} +
+ +
+ +
+ +
+
+ +
+ + setOptions((prev) => ({ ...prev, update: !prev.update })) + } + select={{ + value: options.updateInterval, + onChange: (value) => + setOptions((prev) => ({ ...prev, updateInterval: value })), + options: ["30 min", "1 hour", "3 hours", "1 day"], + }} + /> + + setOptions((prev) => ({ ...prev, summarise: !prev.summarise })) + } + /> + + + setOptions((prev) => ({ ...prev, deepScan: !prev.deepScan })) + } + select={{ + value: options.deepScanLevel, + onChange: (value) => + setOptions((prev) => ({ ...prev, deepScanLevel: value })), + options: ["3 levels", "5 levels", "7 levels"], + }} + /> + + setOptions((prev) => ({ ...prev, sections: !prev.sections })) + } + /> +
+ +
+
+
+ + Scanning... +
+
+

+ 40 pages +

+

+ 10 tokens +

+
+
+
+ {scanning.map((item) => ( +
+
+
+ {item.children ? ( + + ) : ( + + )} + +
+ + {item.tokens} + +
+ {item.children && expandedPaths[item.name] ? ( +
+ {item.children.map((child) => ( +
+ + + {child.tokens} + +
+ ))} +
+ ) : null} +
+ ))} +
+
+
+ ) +} + +function OptionRow({ + label, + checked, + onChange, + select, +}: { + label: string + checked: boolean + onChange: () => void + select?: { + value: string + onChange: (value: string) => void + options: string[] + } +}) { + const muted = !checked + return ( +
+ + {select ? ( + + ) : ( +
+ )} +
+ ) +} + +function GradientCheckbox({ checked }: { checked: boolean }) { + return ( + + {checked ? ( + + + + ) : null} + + ) +} + +function SoftSelect({ + value, + onChange, + options, + disabled, +}: { + value: string + onChange: (value: string) => void + options: string[] + disabled?: boolean +}) { + return ( +
+ + +
+ ) +} + +function CreateCTA() { + const disabled = false + return ( +
+ +
+ ) +} + +export function MarketplaceView() { + const sections: { title: string; items: MarketplaceCard[] }[] = useMemo( + () => [ + { + title: "Featured", + items: [ + { + title: "Stripe Integration", + author: "Stripe", + price: "Free", + tone: "bg-gradient-to-r from-indigo-400 via-blue-500 to-purple-500", + accent: "border-indigo-300/40", + }, + { + title: "X API", + author: "X", + price: "$19.99", + tone: "bg-gradient-to-r from-slate-900 via-neutral-800 to-slate-950", + accent: "border-slate-500/40", + }, + { + title: "Notion", + author: "Notion", + price: "$11.99", + tone: "bg-gradient-to-r from-amber-200 via-amber-100 to-white", + accent: "border-amber-200/50", + }, + ], + }, + { + title: "Trending", + items: [ + { + title: "Dev Mode MCP", + author: "Figma", + price: "Free", + tone: "bg-gradient-to-r from-green-400 via-emerald-500 to-green-600", + accent: "border-emerald-200/50", + }, + { + title: "Gmail API Tools", + author: "hunter2", + price: "$9.99", + tone: "bg-gradient-to-r from-red-400 via-orange-400 to-yellow-400", + accent: "border-orange-300/60", + }, + { + title: "VS Code", + author: "nikiv", + price: "Free", + tone: "bg-gradient-to-r from-slate-800 via-slate-700 to-slate-900", + accent: "border-slate-500/30", + }, + ], + }, + { + title: "Recently published", + items: [ + { + title: "Spotify API", + author: "greg3", + price: "$6.99", + tone: "bg-gradient-to-r from-emerald-400 via-green-500 to-emerald-600", + accent: "border-emerald-200/50", + }, + { + title: "VS Code", + author: "nikiv", + price: "Free", + tone: "bg-gradient-to-r from-slate-800 via-slate-700 to-slate-900", + accent: "border-slate-500/30", + }, + { + title: "Dev Mode MCP", + author: "Figma", + price: "$4.99", + tone: "bg-gradient-to-r from-lime-400 via-green-500 to-emerald-600", + accent: "border-lime-200/50", + }, + ], + }, + ], + [], + ) + + return ( + }> +
+ {sections.map((section) => ( +
+
+

+ {section.title} +

+ + +
+
+ {section.items.map((item) => ( + + ))} +
+
+ ))} +
+
+ ) +} + +function MarketplaceFilters() { + return ( +
+
+ + + + +
+
+ + +
+
+ ) +} + +function FilterPill({ text, active }: { text: string; active?: boolean }) { + return ( + + ) +} + +function MarketplaceCardView({ card }: { card: MarketplaceCard }) { + return ( +
+
+
+
+ {card.title} +
+
+ by {card.author} +
+
+ + {card.price} + +
+
+
+ ) +} diff --git a/packages/web/src/components/Context-panel.tsx b/packages/web/src/components/Context-panel.tsx new file mode 100644 index 00000000..f2a7d87d --- /dev/null +++ b/packages/web/src/components/Context-panel.tsx @@ -0,0 +1,556 @@ +import { + useState, + useRef, + useEffect, + useCallback, + type ReactNode, + type RefObject, + type MouseEvent as ReactMouseEvent, +} from "react" +// import { useMutation } from "@tanstack/react-db" +import { + Brain, + ChevronDown, + ChevronRight, + File, + Globe, + Ellipsis, + LogIn, + MessageCircle, + Plus, + Trash2, + type LucideIcon, + PanelRight, + Settings, +} from "lucide-react" +import type { ChatThread } from "@/db/schema" + +interface UserProfile { + name?: string | null + email: string + image?: string | null +} + +interface ContextPanelProps { + chats: ChatThread[] + activeChatId?: string | null + isAuthenticated?: boolean + profile?: UserProfile | null | undefined +} + +interface CollapsiblePanelProps { + title: string + icon: LucideIcon + isOpen: boolean + onToggle: () => void + headerActions?: ReactNode + children: ReactNode + height?: string + isDragging?: boolean +} + +function CollapsiblePanel({ + title, + icon: Icon, + isOpen, + onToggle, + headerActions, + children, + height, + isDragging = false, +}: CollapsiblePanelProps) { + const isFlexHeight = height === "flex-1" + + return ( +
+
+
+ + {title} +
+
+ {headerActions} +
+ + +
+
+
+ +
+ {children} +
+
+ ) +} + +interface AddWebsiteModalProps { + isOpen: boolean + onClose: () => void + buttonRef: RefObject +} + +function AddWebsiteModal({ isOpen, onClose, buttonRef }: AddWebsiteModalProps) { + const [url, setUrl] = useState("") + const [isLoading, setIsLoading] = useState(false) + const [error, setError] = useState(null) + const [position, setPosition] = useState({ top: 0, left: 0 }) + + useEffect(() => { + if (isOpen && buttonRef.current) { + const rect = buttonRef.current.getBoundingClientRect() + setPosition({ + top: rect.top - 30, + left: rect.right + 12, + }) + } + }, [isOpen, buttonRef]) + + const handleAdd = async () => { + if (!url.trim()) return + + setIsLoading(true) + setError(null) + + try { + // Normalize URL - add https:// if no protocol + let normalizedUrl = url.trim() + if ( + !normalizedUrl.startsWith("http://") && + !normalizedUrl.startsWith("https://") + ) { + normalizedUrl = `https://${normalizedUrl}` + } + + const response = await fetch("/api/context-items", { + method: "POST", + credentials: "include", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ + action: "addUrl", + url: normalizedUrl, + }), + }) + + if (!response.ok) { + const data = (await response.json()) as { error?: string } + throw new Error(data.error || "Failed to add URL") + } + + setUrl("") + onClose() + } catch (err) { + setError(err instanceof Error ? err.message : "Failed to add URL") + } finally { + setIsLoading(false) + } + } + + const handleKeyDown = (e: React.KeyboardEvent) => { + if (e.key === "Enter" && !isLoading) { + handleAdd() + } + } + + if (!isOpen) return null + + return ( +
+
e.stopPropagation()} + > +
+

Add website

+
+ +
+ setUrl(e.target.value)} + onKeyDown={handleKeyDown} + placeholder="example.com" + disabled={isLoading} + className="flex-1 bg-[#0f1117]/40 rounded-lg px-4 py-2 text-white text-sm placeholder:text-neutral-500 focus:outline-none disabled:opacity-50" + style={{ boxShadow: "1px 0.5px 10px 0 rgba(0,0,0,0.4) inset" }} + /> + +
+ + {error &&

{error}

} + +

+ URL content will be fetched and made available as context for your + chats. +

+
+
+ ) +} + +export default function ContextPanel({ + chats, + activeChatId = null, + isAuthenticated = false, + profile = null, +}: ContextPanelProps) { + // const { remove } = useMutation() + const [openSections, setOpenSections] = useState({ + files: false, + web: false, + }) + const [isContextOpen, setIsContextOpen] = useState(true) + const [isThreadsOpen, setIsThreadsOpen] = useState(true) + const [threadsHeight, setThreadsHeight] = useState(350) + const [isDragging, setIsDragging] = useState(false) + const [deletingChatId, setDeletingChatId] = useState(null) + const [isAddWebsiteModalOpen, setIsAddWebsiteModalOpen] = useState(false) + const containerRef = useRef(null) + const addLinkButtonRef = useRef(null) + // For authenticated users, show email initial or first letter of name + // For guests, show "G" + const profileInitial = profile?.name?.slice(0, 1) ?? profile?.email?.slice(0, 1)?.toUpperCase() ?? "G" + const profileImage = profile?.image ?? null + + const contextItems = [ + { + id: "files", + label: "Files", + icon: File, + count: 0, + hasChevron: true, + }, + { + id: "web", + label: "Web", + icon: Globe, + count: 0, + hasChevron: true, + }, + ] + + const toggleSection = (id: string) => { + setOpenSections((prev) => ({ + ...prev, + [id]: !prev[id as keyof typeof prev], + })) + } + + const handleMouseMove = useCallback( + (e: MouseEvent) => { + if (!containerRef.current) return + + const container = containerRef.current + const containerRect = container.getBoundingClientRect() + const newHeight = e.clientY - containerRect.top - 50 + + const collapseThreshold = 80 + const minHeight = 150 + const maxHeight = containerRect.height - 250 + + if (newHeight < collapseThreshold) { + setIsThreadsOpen(false) + } else if (newHeight >= minHeight && newHeight <= maxHeight) { + if (!isThreadsOpen) { + setIsThreadsOpen(true) + } + setThreadsHeight(newHeight) + } else if (newHeight >= collapseThreshold && newHeight < minHeight) { + if (!isThreadsOpen) { + setIsThreadsOpen(true) + } + setThreadsHeight(minHeight) + } + }, + [isThreadsOpen], + ) + + const handleMouseUp = useCallback(() => { + setIsDragging(false) + }, []) + + const handleMouseDown = (e: React.MouseEvent) => { + e.preventDefault() + setIsDragging(true) + } + + useEffect(() => { + if (isDragging) { + window.addEventListener("mousemove", handleMouseMove) + window.addEventListener("mouseup", handleMouseUp) + } + + return () => { + window.removeEventListener("mousemove", handleMouseMove) + window.removeEventListener("mouseup", handleMouseUp) + } + }, [isDragging, handleMouseMove, handleMouseUp]) + + const handleDeleteChat = async ( + event: ReactMouseEvent, + chatId: string, + ) => { + event.preventDefault() + event.stopPropagation() + if (deletingChatId) return + + try { + setDeletingChatId(chatId) + // await remove.chat.with({ id: chatId }) + } catch (error) { + console.error("[contextPanel] failed to delete chat", { chatId, error }) + } finally { + setDeletingChatId(null) + } + } + + // Profile display (commented out for now) + // const profileUsername = profile?.name ?? null + // const profileInitial = profileUsername?.[0]?.toUpperCase() ?? "?" + + const toggleAllPanels = () => { + const shouldOpen = !isThreadsOpen && !isContextOpen + setIsThreadsOpen(shouldOpen) + setIsContextOpen(shouldOpen) + } + + return ( +
+
+ +
+ + + + +
+
+ +
+ setIsThreadsOpen(!isThreadsOpen)} + height={`${threadsHeight}px`} + headerActions={ + + + New + + } + > +

RECENT

+ {chats.length === 0 ? ( +

+ Start a conversation to see it here. +

+ ) : ( +
+ {chats.map((chat) => { + const isActive = chat.id.toString() === activeChatId + const displayTitle = chat.title?.trim() ?? "Untitled chat" + const isDeleting = deletingChatId === chat.id.toString() + + return ( +
+ + + {displayTitle} + + +
+ ) + })} +
+ )} +
+
+ + {(isThreadsOpen || isContextOpen) && ( +
+ +
+ )} + + setIsContextOpen(!isContextOpen)} + height="flex-1" + > +
+ 0 tokens + 1M +
+ +
+ {contextItems.map((item) => { + const Icon = item.icon + const isOpen = openSections[item.id as keyof typeof openSections] + + return ( + + ) + })} + + +
+
+ + setIsAddWebsiteModalOpen(false)} + buttonRef={addLinkButtonRef} + /> +
+ ) +} diff --git a/packages/web/src/components/Header.tsx b/packages/web/src/components/Header.tsx new file mode 100644 index 00000000..0d157da3 --- /dev/null +++ b/packages/web/src/components/Header.tsx @@ -0,0 +1,283 @@ +import { Link } from "@tanstack/react-router" + +import { useState } from "react" +import { + ChevronDown, + ChevronRight, + Home, + LogIn, + LogOut, + Menu, + Network, + Palette, + SquareFunction, + StickyNote, + User, + X, +} from "lucide-react" +import { authClient } from "@/lib/auth-client" + +export default function Header() { + const [isOpen, setIsOpen] = useState(false) + const [groupedExpanded, setGroupedExpanded] = useState< + Record + >({}) + const { data: session } = authClient.useSession() + + const handleSignOut = async () => { + await authClient.signOut() + window.location.href = "/" + } + + return ( + <> +
+
+ +

+ + TanStack Logo + +

+
+ + {session?.user ? ( + + ) : ( + + + Sign in + + )} +
+ + + + ) +} diff --git a/packages/web/src/components/Settings-panel.tsx b/packages/web/src/components/Settings-panel.tsx new file mode 100644 index 00000000..33a94792 --- /dev/null +++ b/packages/web/src/components/Settings-panel.tsx @@ -0,0 +1,112 @@ +import { useMemo } from "react" +import { + ArrowLeft, + SlidersHorizontal, + UserRound, + type LucideIcon, + CreditCard, +} from "lucide-react" + +type SettingsSection = "preferences" | "profile" | "billing" + +interface UserProfile { + name?: string | null + email: string + image?: string | null +} + +interface SettingsPanelProps { + activeSection: SettingsSection + onSelect: (section: SettingsSection) => void + profile?: UserProfile | null | undefined +} + +type NavItem = { + id: SettingsSection + label: string + icon: LucideIcon +} + +const navItems: NavItem[] = [ + { id: "preferences", label: "Preferences", icon: SlidersHorizontal }, + { id: "profile", label: "Profile", icon: UserRound }, + { id: "billing", label: "Manage Billing", icon: CreditCard }, +] + +function Avatar({ profile }: { profile?: UserProfile | null }) { + const initial = useMemo(() => { + if (!profile) return "G" + return ( + profile.name?.slice(0, 1) ?? + profile.email?.slice(0, 1)?.toUpperCase() ?? + "G" + ) + }, [profile]) + + if (profile?.image) { + return ( + {profile.name + ) + } + + return ( +
+ {initial} +
+ ) +} + +export default function SettingsPanel({ + activeSection, + onSelect, + profile, +}: SettingsPanelProps) { + return ( + + ) +} diff --git a/packages/web/src/components/ShaderBackground.tsx b/packages/web/src/components/ShaderBackground.tsx new file mode 100644 index 00000000..bc98cc4f --- /dev/null +++ b/packages/web/src/components/ShaderBackground.tsx @@ -0,0 +1,493 @@ +import { useEffect, useRef, useState } from "react" + +const BLIT_SHADER = ` +@group(0) @binding(0) var inputTex: texture_2d; +@group(0) @binding(1) var inputSampler: sampler; + +struct VertexOutput { + @builtin(position) position: vec4f, + @location(0) uv: vec2f, +} + +@vertex +fn vs(@builtin(vertex_index) vertexIndex: u32) -> VertexOutput { + var pos = array( + vec2f(-1.0, -1.0), + vec2f(1.0, -1.0), + vec2f(-1.0, 1.0), + vec2f(-1.0, 1.0), + vec2f(1.0, -1.0), + vec2f(1.0, 1.0), + ); + var uv = array( + vec2f(0.0, 1.0), + vec2f(1.0, 1.0), + vec2f(0.0, 0.0), + vec2f(0.0, 0.0), + vec2f(1.0, 1.0), + vec2f(1.0, 0.0), + ); + var output: VertexOutput; + output.position = vec4f(pos[vertexIndex], 0.0, 1.0); + output.uv = uv[vertexIndex]; + return output; +} + +@fragment +fn fs(input: VertexOutput) -> @location(0) vec4f { + return textureSample(inputTex, inputSampler, input.uv); +} +` + +const SHADER_CODE = ` +struct Time { + elapsed: f32, + delta: f32, + frame: u32, + _pad: u32, +} + +struct Custom { + twist: f32, + viz: f32, +} + +@group(0) @binding(0) var time: Time; +@group(0) @binding(1) var custom: Custom; +@group(0) @binding(2) var screen: texture_storage_2d; + +fn w(T: f32) -> vec3f { + let Q = vec3f(0.5, 0.5, 0.5); + let P = vec3f(0.5, 0.5, 0.5); + let J = vec3f(1.0, 1.0, 1.0); + let H = vec3f(0.263, 0.416, 0.557); + return Q + P * cos(6.28318 * (J * T + H)); +} + +fn v(z: vec3f) -> vec3f { + var x = z + vec3f(12.34, 56.78, 90.12); + var a = fract(x * vec3f(0.1031, 0.1030, 0.0973)); + a = a + dot(a, a.yzx + 19.19); + return fract(vec3f(a.x + a.y, a.y + a.z, a.z + a.x) * a.zxy); +} + +fn m(s: f32) -> mat2x2 { + let n: f32 = sin(s); + let r: f32 = cos(s); + return mat2x2(r, -n, n, r); +} + +fn t(U: vec3, S: f32) -> f32 { + return length(U) - S; +} + +fn u(R: vec3) -> f32 { + var d = R; + let G = custom.twist * 0.1; + d = vec3f(d.xy * m(d.z * 0.05 * sin(G * 0.5)), d.z); + let l = 8.0; + let k = vec3(floor(d / l)); + let i = v(vec3f(f32(k.x), f32(k.y), f32(k.z)) + 1337.0); + let K = 1.0; + if (i.x >= K) { + return 0.9; + } + var h = (d / l); + h = fract(h) - 0.5; + let A = (pow(sin(4.0 * time.elapsed), 4.0) + 1.0) / 2.0; + let B = custom.viz * 0.4; + let C = (i.yzx - vec3f(0.5)) * mix(0.1, 0.3 + B, A); + let D = (vec3f(h) + C); + let E = mix(0.05, 0.12, i.z) + (custom.viz * 0.15); + let F = t(D, E); + return F * l; +} + +@compute @workgroup_size(16, 16) +fn main(@builtin(global_invocation_id) e: vec3u) { + let c = textureDimensions(screen); + if (e.x >= c.x || e.y >= c.y) { + return; + } + let I = vec2f(f32(e.x) + .5, f32(c.y - e.y) - .5); + var f = (I * 2.0 - vec2f(f32(c.x), f32(c.y))) / f32(c.y); + let y = custom.twist; + f = f * m(y * 0.1); + let L = 8.0; + let M = 0.6 - (custom.viz * 0.2); + let N = vec3f(0, 0, -3 + time.elapsed * L); + let O = normalize(vec3f(f * M, 1.0)); + var g = 0.0; + var b = vec3(0); + for (var q: i32 = 0; q < 80; q++) { + var p = N + O * g; + var j = u(p); + let o = w(p.z * 0.04 + time.elapsed * 0.2); + let V = 0.008 + (custom.viz * 0.01); + let W = 8.0; + b += o * V * exp(-j * W); + if (j < 0.001) { + b += o * 2.0; + break; + } + g += j * 0.7 * (1.0 - custom.viz); + if (g > 150.0) { + break; + } + } + b = b / (b + 1.0); + b = pow(b, vec3f(1.0 / 2.2)); + let X = length(f); + b *= 1.0 - X * 0.5; + textureStore(screen, e.xy, vec4f(b, 1.)); +} +` + +type WebGPUState = { + device: GPUDevice + context: GPUCanvasContext + format: GPUTextureFormat + computePipeline: GPUComputePipeline + computeBindGroup: GPUBindGroup + blitPipeline: GPURenderPipeline + blitBindGroup: GPUBindGroup + timeBuffer: GPUBuffer + customBuffer: GPUBuffer + screenTexture: GPUTexture + width: number + height: number +} + +export function ShaderBackground() { + const canvasRef = useRef(null) + const stateRef = useRef(null) + const frameRef = useRef(0) + const startTimeRef = useRef(0) + const [supported, setSupported] = useState(true) + + useEffect(() => { + const canvas = canvasRef.current + if (!canvas) return + + let animationId: number + let disposed = false + + const init = async () => { + // Check WebGPU support + if (!navigator.gpu) { + setSupported(false) + return + } + + const adapter = await navigator.gpu.requestAdapter() + if (!adapter) { + setSupported(false) + return + } + + const device = await adapter.requestDevice() + if (disposed) return + + const context = canvas.getContext("webgpu") + if (!context) { + setSupported(false) + return + } + + const format = navigator.gpu.getPreferredCanvasFormat() + const dpr = Math.min(window.devicePixelRatio, 2) + const width = Math.floor(canvas.clientWidth * dpr) + const height = Math.floor(canvas.clientHeight * dpr) + + canvas.width = width + canvas.height = height + + context.configure({ + device, + format, + alphaMode: "premultiplied", + }) + + // Create shader modules + const computeModule = device.createShaderModule({ + code: SHADER_CODE, + }) + const blitModule = device.createShaderModule({ + code: BLIT_SHADER, + }) + + // Create buffers + const timeBuffer = device.createBuffer({ + size: 16, + usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST, + }) + + const customBuffer = device.createBuffer({ + size: 8, + usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST, + }) + + // Create screen texture (for compute output) + const screenTexture = device.createTexture({ + size: [width, height], + format: "rgba8unorm", + usage: GPUTextureUsage.STORAGE_BINDING | GPUTextureUsage.TEXTURE_BINDING, + }) + + // Create compute bind group layout and pipeline + const computeBindGroupLayout = device.createBindGroupLayout({ + entries: [ + { + binding: 0, + visibility: GPUShaderStage.COMPUTE, + buffer: { type: "uniform" }, + }, + { + binding: 1, + visibility: GPUShaderStage.COMPUTE, + buffer: { type: "uniform" }, + }, + { + binding: 2, + visibility: GPUShaderStage.COMPUTE, + storageTexture: { access: "write-only", format: "rgba8unorm" }, + }, + ], + }) + + const computePipeline = device.createComputePipeline({ + layout: device.createPipelineLayout({ + bindGroupLayouts: [computeBindGroupLayout], + }), + compute: { + module: computeModule, + entryPoint: "main", + }, + }) + + const computeBindGroup = device.createBindGroup({ + layout: computeBindGroupLayout, + entries: [ + { binding: 0, resource: { buffer: timeBuffer } }, + { binding: 1, resource: { buffer: customBuffer } }, + { binding: 2, resource: screenTexture.createView() }, + ], + }) + + // Create blit pipeline for rendering to canvas + const sampler = device.createSampler({ + magFilter: "linear", + minFilter: "linear", + }) + + const blitBindGroupLayout = device.createBindGroupLayout({ + entries: [ + { + binding: 0, + visibility: GPUShaderStage.FRAGMENT, + texture: { sampleType: "float" }, + }, + { + binding: 1, + visibility: GPUShaderStage.FRAGMENT, + sampler: { type: "filtering" }, + }, + ], + }) + + const blitPipeline = device.createRenderPipeline({ + layout: device.createPipelineLayout({ + bindGroupLayouts: [blitBindGroupLayout], + }), + vertex: { + module: blitModule, + entryPoint: "vs", + }, + fragment: { + module: blitModule, + entryPoint: "fs", + targets: [{ format }], + }, + primitive: { + topology: "triangle-list", + }, + }) + + const blitBindGroup = device.createBindGroup({ + layout: blitBindGroupLayout, + entries: [ + { binding: 0, resource: screenTexture.createView() }, + { binding: 1, resource: sampler }, + ], + }) + + stateRef.current = { + device, + context, + format, + computePipeline, + computeBindGroup, + blitPipeline, + blitBindGroup, + timeBuffer, + customBuffer, + screenTexture, + width, + height, + } + + startTimeRef.current = performance.now() + + // Start render loop + const render = () => { + if (disposed || !stateRef.current) return + + const state = stateRef.current + const elapsed = (performance.now() - startTimeRef.current) / 1000 + frameRef.current++ + + // Update time uniform + const timeData = new ArrayBuffer(16) + const timeView = new DataView(timeData) + timeView.setFloat32(0, elapsed, true) + timeView.setFloat32(4, 0.016, true) + timeView.setUint32(8, frameRef.current, true) + timeView.setUint32(12, 0, true) + state.device.queue.writeBuffer(state.timeBuffer, 0, timeData) + + // Update custom uniform (animated values) + const twist = Math.sin(elapsed * 0.3) * 2 + const viz = 0.3 + Math.sin(elapsed * 0.5) * 0.2 + const customData = new Float32Array([twist, viz]) + state.device.queue.writeBuffer(state.customBuffer, 0, customData) + + // Create command encoder + const encoder = state.device.createCommandEncoder() + + // Run compute shader + const computePass = encoder.beginComputePass() + computePass.setPipeline(state.computePipeline) + computePass.setBindGroup(0, state.computeBindGroup) + computePass.dispatchWorkgroups( + Math.ceil(state.width / 16), + Math.ceil(state.height / 16) + ) + computePass.end() + + // Blit to canvas using render pass + const canvasTexture = state.context.getCurrentTexture() + const renderPass = encoder.beginRenderPass({ + colorAttachments: [ + { + view: canvasTexture.createView(), + clearValue: { r: 0, g: 0, b: 0, a: 1 }, + loadOp: "clear", + storeOp: "store", + }, + ], + }) + renderPass.setPipeline(state.blitPipeline) + renderPass.setBindGroup(0, state.blitBindGroup) + renderPass.draw(6) + renderPass.end() + + state.device.queue.submit([encoder.finish()]) + + animationId = requestAnimationFrame(render) + } + + render() + } + + init().catch((err) => { + console.error("WebGPU init error:", err) + setSupported(false) + }) + + // Handle resize + const handleResize = () => { + if (!stateRef.current || !canvas) return + + const state = stateRef.current + const dpr = Math.min(window.devicePixelRatio, 2) + const width = Math.floor(canvas.clientWidth * dpr) + const height = Math.floor(canvas.clientHeight * dpr) + + if (width === state.width && height === state.height) return + if (width === 0 || height === 0) return + + canvas.width = width + canvas.height = height + + // Recreate screen texture + state.screenTexture.destroy() + const screenTexture = state.device.createTexture({ + size: [width, height], + format: "rgba8unorm", + usage: GPUTextureUsage.STORAGE_BINDING | GPUTextureUsage.TEXTURE_BINDING, + }) + + // Recreate compute bind group + const computeBindGroupLayout = state.computePipeline.getBindGroupLayout(0) + const computeBindGroup = state.device.createBindGroup({ + layout: computeBindGroupLayout, + entries: [ + { binding: 0, resource: { buffer: state.timeBuffer } }, + { binding: 1, resource: { buffer: state.customBuffer } }, + { binding: 2, resource: screenTexture.createView() }, + ], + }) + + // Recreate blit bind group + const sampler = state.device.createSampler({ + magFilter: "linear", + minFilter: "linear", + }) + const blitBindGroupLayout = state.blitPipeline.getBindGroupLayout(0) + const blitBindGroup = state.device.createBindGroup({ + layout: blitBindGroupLayout, + entries: [ + { binding: 0, resource: screenTexture.createView() }, + { binding: 1, resource: sampler }, + ], + }) + + stateRef.current = { + ...state, + screenTexture, + computeBindGroup, + blitBindGroup, + width, + height, + } + } + + window.addEventListener("resize", handleResize) + + return () => { + disposed = true + if (animationId) cancelAnimationFrame(animationId) + window.removeEventListener("resize", handleResize) + if (stateRef.current) { + stateRef.current.screenTexture.destroy() + stateRef.current.timeBuffer.destroy() + stateRef.current.customBuffer.destroy() + } + } + }, []) + + if (!supported) { + // Fallback gradient background + return ( +
+ ) + } + + return ( + + ) +} diff --git a/packages/web/src/components/VideoPlayer.tsx b/packages/web/src/components/VideoPlayer.tsx new file mode 100644 index 00000000..b5b59e98 --- /dev/null +++ b/packages/web/src/components/VideoPlayer.tsx @@ -0,0 +1,259 @@ +import { useEffect, useRef, useState } from "react" +import Hls from "hls.js" + +interface VideoPlayerProps { + src: string + autoPlay?: boolean + muted?: boolean +} + +export function VideoPlayer({ + src, + autoPlay = true, + muted = false, +}: VideoPlayerProps) { + const videoRef = useRef(null) + const hlsRef = useRef(null) + const [isPlaying, setIsPlaying] = useState(autoPlay) + const [isMuted, setIsMuted] = useState(muted) + const [volume, setVolume] = useState(1) + const [isFullscreen, setIsFullscreen] = useState(false) + const [showControls, setShowControls] = useState(true) + const [error, setError] = useState(null) + const hideControlsTimeoutRef = useRef | null>(null) + + useEffect(() => { + const video = videoRef.current + if (!video || !src) return + + // Check if native HLS is supported (Safari) + if (video.canPlayType("application/vnd.apple.mpegurl")) { + video.src = src + if (autoPlay) video.play().catch(() => setIsPlaying(false)) + return + } + + // Use HLS.js for other browsers + if (Hls.isSupported()) { + const hls = new Hls({ + enableWorker: true, + lowLatencyMode: true, + liveSyncDurationCount: 3, + liveMaxLatencyDurationCount: 6, + }) + + hls.loadSource(src) + hls.attachMedia(video) + + hls.on(Hls.Events.MANIFEST_PARSED, () => { + if (autoPlay) video.play().catch(() => setIsPlaying(false)) + }) + + hls.on(Hls.Events.ERROR, (_, data) => { + if (data.fatal) { + switch (data.type) { + case Hls.ErrorTypes.NETWORK_ERROR: + setError("Network error - retrying...") + hls.startLoad() + break + case Hls.ErrorTypes.MEDIA_ERROR: + setError("Media error - recovering...") + hls.recoverMediaError() + break + default: + setError("Stream error") + hls.destroy() + break + } + } + }) + + hlsRef.current = hls + + return () => { + hls.destroy() + hlsRef.current = null + } + } else { + setError("HLS playback not supported in this browser") + } + }, [src, autoPlay]) + + const handlePlayPause = () => { + const video = videoRef.current + if (!video) return + + if (video.paused) { + video.play().then(() => setIsPlaying(true)) + } else { + video.pause() + setIsPlaying(false) + } + } + + const handleMute = () => { + const video = videoRef.current + if (!video) return + + video.muted = !video.muted + setIsMuted(video.muted) + } + + const handleVolumeChange = (e: React.ChangeEvent) => { + const video = videoRef.current + if (!video) return + + const newVolume = parseFloat(e.target.value) + video.volume = newVolume + setVolume(newVolume) + if (newVolume === 0) { + setIsMuted(true) + video.muted = true + } else if (isMuted) { + setIsMuted(false) + video.muted = false + } + } + + const handleFullscreen = async () => { + const video = videoRef.current + if (!video) return + + if (document.fullscreenElement) { + await document.exitFullscreen() + setIsFullscreen(false) + } else { + await video.requestFullscreen() + setIsFullscreen(true) + } + } + + const handleMouseMove = () => { + setShowControls(true) + if (hideControlsTimeoutRef.current) { + clearTimeout(hideControlsTimeoutRef.current) + } + hideControlsTimeoutRef.current = setTimeout(() => { + if (isPlaying) setShowControls(false) + }, 3000) + } + + if (error) { + return ( +
+

{error}

+
+ ) + } + + return ( +
isPlaying && setShowControls(false)} + > +