commit 40ef943d24095d8d978565013c9acee7f632a573 Author: Joey Eamigh <55670930+JoeyEamigh@users.noreply.github.com> Date: Wed Feb 11 03:33:58 2026 -0500 initial setup commit diff --git a/.prettierrc.js b/.prettierrc.js new file mode 100644 index 0000000..9f3acb2 --- /dev/null +++ b/.prettierrc.js @@ -0,0 +1,19 @@ +/** @type {import('prettier').Config & import('prettier-plugin-tailwindcss').PluginOptions} */ +export default { + bracketSpacing: true, + bracketSameLine: true, + singleQuote: true, + trailingComma: 'all', + arrowParens: 'avoid', + semi: true, + plugins: ['prettier-plugin-organize-imports', 'prettier-plugin-tailwindcss'], + tailwindStylesheet: './app/globals.css', + overrides: [ + { + files: ['*.ts', '*.js', '*.tsx', '*.jsx', '*.cjs', '*.mjs'], + options: { + printWidth: 120, + }, + }, + ], +}; diff --git a/Assignment.md b/Assignment.md new file mode 100644 index 0000000..106e3f8 --- /dev/null +++ b/Assignment.md @@ -0,0 +1,21 @@ +# Assignment Details + +Create an interactive dashboard or analysis tool that anyone can access through the internet. Your dashboard/tool needs to business relevant and create business insights. It needs to load data in real-time, process them, and present results. Be creative. + +> **HINT:** You can also add a static database that holds historical data and then augment these data with real-time / contemporary data. + +## Main Objectives + +1. Apply all you have learnt in the course to create an actual data science / AI product (service) using state-of-the-art technologies. +2. Experiment with SOTA tools like Google's Antigravity to build and deploy AI powered tools for business insights. +3. Discover and Explore new tools, services, and platforms that enable advanced analytics that you can deploy and share. + +## Four Deliverables + +1. Fully working, publicly accessible / deployed dashboard (and/or analytics tool) including its public URL +2. Full code base including documentation for installation (ideally as a repo on GitHub) that allows me to replicate your dashboard (tool). +3. Real-time analysis of data (with optional static historic database) +4. A one page summary that states: + - Your dashboard's (tool's) purpose and functionality + - A clearly defined target audience: Who is it, how will they use it, and why would they use it over alternative solutions. + - A sales pitch: To whom does your dashboard/tool generate value and how would you monetize your development? diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..a04f550 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,123 @@ +# Bonus Assignment 4 — Energy & AI Dashboard + +Your task is to complete the local development portion of this assignment. Look at `Assignment.md` for the assignment brief and `SPEC.md` for the full technical specification. + +## Rules (NON-NEGOTIABLE) + +- You will NOT create a github repository, push code, or deploy ANYTHING. Your task is the local development portion. +- You will NOT publish the code anywhere. +- You will NOT make this code public in any way. + +I am very serious about this. I DO NOT want this to go on my public github. I will create a new github account for it later, and I will push the code there myself. You are only responsible for the local development portion of the assignment. You will NOT deploy anything. You will NOT make this public in any way. You will NOT create a github repository or push code. + +## Browser Automation + +Use `agent-browser` for web automation. Run `agent-browser --help` for all commands. + +Core workflow: + +1. `agent-browser connect 9222` - Connect to chromium +2. `agent-browser open ` - Navigate to page +3. `agent-browser snapshot -i` - Get interactive elements with refs (@e1, @e2) +4. `agent-browser click @e1` / `fill @e2 "text"` - Interact using refs +5. Re-snapshot after page changes + +--- + +## You Are the Orchestrator + +You are the **lead architect and project manager**. You do NOT write code yourself. You break work into phases, spin up teams, assign roles, verify output, and keep the project moving. Your job is to think clearly, delegate precisely, and catch problems early. + +### Your Core Principles + +1. **Protect your context window.** You will be orchestrating for hours. Every file you read, every long output you process, eats into your ability to think clearly later. Delegate all file reading, code writing, searching, and debugging to teammates and subagents. You should primarily be reading short status messages, verifying summaries, and making decisions. + +2. **Never trust, always verify.** Teammates will tell you their work is done when it isn't. They will say "tests pass" when they haven't run tests. They will say "everything compiles" when there are type errors. **Always send a different teammate to verify claims.** The agent who wrote the code must NOT be the one who verifies it. + +3. **Break work into the smallest digestible chunks.** A task like "build the map page" is too big. "Create the energy-map.tsx component with Google Maps APIProvider, dark theme styling, and three hardcoded test markers" is the right size. Small tasks are easier to verify, easier to parallelize, and produce better results. + +4. **Parallelize aggressively.** If two pieces of work don't depend on each other, they should be happening simultaneously on different teams. The foundation phase has many parallelizable tracks (Docker setup, Next.js scaffold, seed data curation). Identify and exploit these. + +5. **Use the task list as the source of truth.** Every piece of work gets a task. Every task gets assigned. Every completion gets verified by someone other than the assignee. The task list is how you maintain awareness without filling your context. + +### Team Structure & Roles + +Spin up teams organized by concern. Each teammate wears a specific **hat** — a role that defines what they're responsible for and how they should approach their work. When spawning a teammate, include their hat description in their prompt so they understand their role. + +Recommended hats: + +| Hat | Responsibility | Key Behavior | +|-----|---------------|-------------| +| **Builder** | Writes code. Implements features. | Writes clean, typed code. Reports what they built and any decisions they made. Must NOT self-verify — a Reviewer does that. | +| **Reviewer** | Reads and verifies code written by others. | Reads the actual files, checks for type errors, checks for missing imports, verifies the code matches the spec. Reports discrepancies honestly. Runs builds/lints/type-checks. | +| **Researcher** | Investigates APIs, docs, and libraries. | Fetches documentation, reads API responses, figures out data schemas. Returns structured findings — not prose. | +| **Tester** | Runs the app, checks behavior, tests in browser. | Uses `agent-browser` to verify the app works visually. Checks that data loads, maps render, charts display. Takes snapshots and reports what they see. | + +You may combine or adapt hats depending on the phase. A small team might have one Builder and one Reviewer. A larger phase might have multiple Builders working in parallel with a shared Reviewer. + +### Verification Protocol + +After any teammate claims a task is complete: + +1. **Send a Reviewer** (different teammate) to read the actual files and check the work. The Reviewer should: + - Read every file the Builder claims to have created/modified + - Run `bunx tsc --noEmit` to check for type errors + - Run the linter (`bun run lint` or equivalent) + - Check that the code matches the spec + - Report any issues back to you + +2. **If the Reviewer finds issues**, send them back to the Builder with specific feedback. Do NOT accept "I fixed it" without re-verification. + +3. **For UI work**, send a Tester with `agent-browser` to verify visual output. Screenshots/snapshots don't lie. + +4. **For data work**, have the Reviewer run a quick query or check the API response shape. Don't trust "the API returns the right data" without evidence. + +### Context Hygiene + +Your context is your most precious resource. Follow these rules: + +- **Do NOT read source files yourself.** Send a subagent or teammate to read them and summarize. +- **Do NOT run builds or tests yourself.** Delegate to a Tester or Reviewer. +- **Do NOT debug issues yourself.** Describe the problem to a teammate and let them investigate. +- **Keep your messages to teammates concise.** Include: what to do, what files are involved, what the expected outcome is, and what to report back. +- **Use the task list** to track state instead of trying to remember what's been done. +- **When a phase is complete**, briefly summarize what was accomplished and what the next phase needs before moving on. This is your checkpoint — a few sentences, not a full report. + +### How to Structure a Phase + +For each implementation phase from SPEC.md: + +1. **Plan the phase.** Read the spec section (via subagent). Identify the specific tasks. Identify dependencies and what can be parallelized. +2. **Create tasks.** Use TaskCreate for every discrete unit of work. Set up blockedBy relationships for dependencies. +3. **Spin up a team.** Create a team for the phase. Spawn teammates with clear hat assignments. +4. **Assign and monitor.** Assign tasks to teammates. Let them work. Read their status messages. +5. **Verify.** When a Builder says they're done, send a Reviewer. When a Reviewer says it's clean, send a Tester if applicable. +6. **Close the phase.** Mark all tasks complete. Summarize the phase outcome. Shut down the team. Move to the next phase. + +### Error Recovery + +Things will go wrong. When they do: + +- **Build fails**: Send a Reviewer to read the error output and identify the issue. Then send a Builder to fix it. Then re-verify. +- **Teammate produces wrong output**: Don't retry with the same teammate. Spawn a new one with clearer instructions, or reassign to a different teammate with context about what went wrong. +- **Scope creep**: If a teammate starts adding things not in the spec, redirect them. The spec is the source of truth. +- **Stuck teammate**: If a teammate isn't making progress after a reasonable attempt, get a status update, then either give them more specific guidance or reassign the task. + +### Phase Overview (from SPEC.md) + +1. **Foundation** — Scaffold, configs, Docker, Prisma schema, seed data +2. **Data Layer** — API clients, Zod schemas, TypedSQL queries, server actions, ingestion routes +3. **Dashboard UI** — Layout, home page, Google Maps, markers, overlays, interactions +4. **Charts & Analysis** — Recharts, trends, demand, generation mix, annotations +5. **Polish** — Responsive, loading states, disclaimers, documentation + +Each phase should be its own team. Shut down each team when the phase is verified complete before moving to the next. This keeps context clean and prevents cross-phase confusion. + +### Tech Reminders + +- **Python**: Always use `uv` +- **JS/TS**: Always use `bun` (not npm, not yarn, not pnpm) +- **PostgreSQL**: Port 5433 (not 5432 — another project is on 5432) +- **Existing configs**: `.prettierrc.js`, `eslint.config.js`, and `tsconfig.json` are already in the project root. They need their dependencies installed and integrated with the Next.js project. Do NOT overwrite them. +- **Google Maps API key**: Already in `.env`. Do NOT regenerate or modify. +- **Versions**: Next.js 16, Tailwind 4, PostgreSQL 18, PostGIS 3.5, Prisma 7.x. Check that everything is current — there have been JS CVEs recently, so no outdated packages. diff --git a/SPEC.md b/SPEC.md new file mode 100644 index 0000000..893602e --- /dev/null +++ b/SPEC.md @@ -0,0 +1,373 @@ +# Energy & AI: The Datacenter Power Crunch + +An interactive dashboard visualizing how AI datacenter buildout is driving regional electricity demand and energy prices across the United States. + +## Why This Matters + +The AI boom isn't just a software story — it's an energy story. Training a single frontier model can consume as much electricity as a small town uses in a year. And we're not talking about one model: every major tech company is racing to build out GPU clusters at unprecedented scale. + +This is creating a tectonic shift in US energy markets: + +- **Dominion Energy (Virginia/PJM)** has seen datacenter load applications surge 10x since 2020. Northern Virginia alone hosts ~70% of the world's internet traffic and is adding gigawatts of new AI load. +- **ERCOT (Texas)** is fielding datacenter interconnection requests totaling more than the entire current grid capacity of some US states. +- **Natural gas** prices are being pushed up because gas-fired power plants are the marginal generator in most US regions — when demand spikes, gas sets the price. +- **Grid reliability** is under threat: regions that were comfortably supplied five years ago are now facing capacity shortfalls, triggering emergency demand response events. + +The people making billion-dollar decisions about this — energy investors, utility planners, datacenter operators, commodity traders — need real-time visibility into where demand is concentrating, how prices are responding, and which regions are approaching their limits. That's what this dashboard provides. + +## Business Case + +**The problem**: Energy market data is scattered across dozens of sources (EIA, ISOs, FRED, commodity exchanges) with inconsistent formats, no geospatial context, and no connection to the AI infrastructure buildout driving the changes. Analysts spend hours stitching together spreadsheets to answer basic questions like "how have electricity prices changed in regions with heavy datacenter buildout?" + +**The solution**: A single, real-time dashboard that overlays datacenter locations on energy market data, making the AI-energy nexus immediately visible and explorable. + +**Target audience**: Energy investors evaluating utility stocks and commodity positions. Utility analysts planning generation and transmission investments. Datacenter site selectors choosing where to build next. Business strategists assessing the infrastructure costs underlying AI. + +**Monetization angle**: Freemium model. Free tier provides the dashboard with real-time data. Premium tier adds predictive analytics (price forecasting, capacity constraint alerts), custom region comparisons, CSV/API data export, and email alerting for price spike events. Enterprise tier provides embeddable widgets for trading desks and analyst reports. + +## Tech Stack + +| Layer | Technology | Version | Why | +|-------|-----------|---------|-----| +| Framework | Next.js (App Router) | 16 | Turbopack default, `"use cache"` directive, React 19.2 — the most capable React framework available | +| Styling | Tailwind CSS | 4 | CSS-first config, zero-runtime, pairs perfectly with shadcn/ui | +| Components | shadcn/ui | latest | Copy-paste components, not a dependency — full control, great defaults, built-in chart components (Recharts wrappers) | +| Maps | @vis.gl/react-google-maps | latest | Google's own recommended React library for Maps. Declarative, hooks-based, AdvancedMarker support | +| Database | PostgreSQL + PostGIS | 18 + 3.5 | PostGIS is the gold standard for geospatial queries — ST_DWithin, ST_Distance, polygon containment, all in SQL | +| ORM | Prisma (with TypedSQL) | 7.x | TypedSQL lets us write `.sql` files for PostGIS queries and get fully typed TypeScript wrappers. Best of both worlds: Prisma for CRUD, raw SQL for geo | +| Runtime / PM | Bun | latest | Fastest JS runtime, built-in TS support, great package manager | +| Charts | Recharts (via shadcn/ui) | latest | shadcn/ui's chart components wrap Recharts with consistent theming — no extra config | +| Containerization | Docker Compose | - | One command to spin up PostGIS. Reproducible dev environment | + +## Type Safety Strategy (E2E) + +External APIs like EIA and FRED return untyped JSON. A single bad response shape can cascade into runtime crashes or — worse — silently wrong data on a dashboard people might make investment decisions from. Every data boundary is typed and validated: + +``` +External APIs → Zod schemas → Server (validated, typed) +Database → Prisma generated types → Server +PostGIS queries → TypedSQL (.sql files) → Typed wrappers +Server → Client: Next.js Server Actions + Server Components (typed props) +Forms → Zod + react-hook-form (validated inputs) +``` + +No `any`. No unvalidated API responses. If the EIA changes their response format, we get a Zod parse error at ingestion time, not a broken chart at render time. + +## Data Sources + +We use two categories of data: real-time feeds from government APIs (free, reliable, well-documented) and curated seed data for the geospatial layer. + +### Real-Time (API-driven) + +| Source | Data | Granularity | Update Frequency | Why This Source | +|--------|------|-------------|-----------------|-----------------| +| **EIA API** | Regional electricity prices, demand, generation mix | Hourly, by balancing authority | Hourly | The definitive US energy data source. Free, 9k req/hr, decades of history. Covers all ISO/RTO regions. | +| **EIA API** | Natural gas (Henry Hub), WTI crude, coal spot prices | Daily/weekly | Daily | Gas prices directly drive electricity prices (gas is the marginal fuel). Oil/coal provide macro context. | +| **FRED API** | Historical commodity price time series | Daily | Daily | Clean, reliable time series going back to the 1940s. Perfect for long-run trend analysis. | + +### Static / Seed Data + +| Source | Data | Why | +|--------|------|-----| +| **DataCenterMap / manual curation** | Datacenter locations (lat/lng, operator, capacity MW) | The core geospatial layer. PostGIS Point geometries enable spatial queries (nearby DCs, DCs in region, clustering). | +| **EIA / ISO boundaries** | Grid region polygons (PJM, ERCOT, CAISO, etc.) | PostGIS MultiPolygon geometries enable the price heatmap overlay and spatial joins between DCs and regions. | +| **AI milestones** | Timeline of major AI announcements | Chart annotations that tell the story — "here's when ChatGPT launched, here's when prices started climbing." Turns data into narrative. | + +## Database Schema (PostgreSQL + PostGIS) + +``` +┌─────────────────────────┐ ┌──────────────────────────┐ +│ datacenters │ │ grid_regions │ +├─────────────────────────┤ ├──────────────────────────┤ +│ id UUID PK │ │ id UUID PK │ +│ name TEXT │ │ name TEXT │ +│ operator TEXT │ │ code TEXT │ (e.g. "PJM", "ERCOT") +│ location GEOGRAPHY │◄───►│ boundary GEOGRAPHY │ (MultiPolygon) +│ (Point, 4326) │ │ iso TEXT │ +│ capacity_mw FLOAT │ │ created_at TIMESTAMPTZ│ +│ status TEXT │ └──────────────────────────┘ +│ year_opened INT │ +│ region_id UUID FK │──────┘ +│ created_at TIMESTAMPTZ│ +└─────────────────────────┘ + +┌─────────────────────────┐ ┌──────────────────────────┐ +│ electricity_prices │ │ commodity_prices │ +├─────────────────────────┤ ├──────────────────────────┤ +│ id UUID PK │ │ id UUID PK │ +│ region_id UUID FK │ │ commodity TEXT │ (natural_gas, wti_crude, coal) +│ price_mwh FLOAT │ │ price FLOAT │ +│ demand_mw FLOAT │ │ unit TEXT │ +│ timestamp TIMESTAMPTZ│ │ timestamp TIMESTAMPTZ│ +│ source TEXT │ │ source TEXT │ +└─────────────────────────┘ └──────────────────────────┘ + +┌─────────────────────────┐ +│ generation_mix │ +├─────────────────────────┤ +│ id UUID PK │ +│ region_id UUID FK │ +│ fuel_type TEXT │ (gas, nuclear, wind, solar, coal, hydro) +│ generation_mw FLOAT │ +│ timestamp TIMESTAMPTZ│ +└─────────────────────────┘ +``` + +## TypedSQL Queries (prisma/sql/) + +Examples of PostGIS queries that get typed wrappers: + +**`findDatacentersInRegion.sql`** +```sql +-- @param {String} $1:regionCode +SELECT + d.id, d.name, d.operator, d.capacity_mw, d.status, d.year_opened, + ST_AsGeoJSON(d.location)::TEXT as location_geojson +FROM datacenters d +JOIN grid_regions r ON d.region_id = r.id +WHERE r.code = $1 +ORDER BY d.capacity_mw DESC +``` + +**`findNearbyDatacenters.sql`** +```sql +-- @param {Float} $1:lat +-- @param {Float} $2:lng +-- @param {Float} $3:radiusKm +SELECT + d.id, d.name, d.operator, d.capacity_mw, + ST_AsGeoJSON(d.location)::TEXT as location_geojson, + ST_Distance(d.location, ST_MakePoint($2, $1)::geography) / 1000 as distance_km +FROM datacenters d +WHERE ST_DWithin(d.location, ST_MakePoint($2, $1)::geography, $3 * 1000) +ORDER BY distance_km +``` + +**`getRegionPriceHeatmap.sql`** +```sql +SELECT + r.code, r.name, + ST_AsGeoJSON(r.boundary)::TEXT as boundary_geojson, + AVG(ep.price_mwh) as avg_price, + MAX(ep.price_mwh) as max_price, + AVG(ep.demand_mw) as avg_demand, + COUNT(DISTINCT d.id)::INT as datacenter_count, + COALESCE(SUM(d.capacity_mw), 0) as total_dc_capacity_mw +FROM grid_regions r +LEFT JOIN electricity_prices ep ON ep.region_id = r.id + AND ep.timestamp > NOW() - INTERVAL '24 hours' +LEFT JOIN datacenters d ON d.region_id = r.id +GROUP BY r.id, r.code, r.name, r.boundary +``` + +## Real-Time Candy + +These features make the dashboard feel alive — like a trading floor terminal, not a static report. + +### Live Price Ticker Tape +A scrolling horizontal banner across the top of every page showing current regional electricity prices and commodity spot prices — styled like a financial news ticker. Green/red coloring for price direction. Always visible, always updating. + +### Animated Number Transitions +Hero metrics (avg electricity price, gas spot, total DC capacity) use smooth count-up/count-down animations when data updates. Numbers don't just appear — they *roll* to the new value. Uses `framer-motion` `animate` with spring physics. + +### Pulsing Map Markers +Datacenter markers on the Google Map emit a soft radial pulse animation when their region's electricity price exceeds its 30-day average. The faster the pulse, the bigger the price deviation. A calm map means stable prices; a map full of pulsing dots means something interesting is happening. + +### GPU Cost Calculator (Live) +A sticky widget: "Running 1,000 H100 GPUs right now costs **$X,XXX/hr** in Virginia vs **$Y,YYY/hr** in Texas." Updates with live regional prices. Users can adjust GPU count with a slider. Makes the abstract price data immediately tangible — *this is what it actually costs to train AI right now*. + +### Grid Stress Gauges +Radial gauge components per region showing `current demand / peak capacity` as a percentage. Styled like a speedometer — green zone, yellow zone, red zone. When a region creeps past 85% capacity utilization, the gauge glows red. Immediate visual signal for grid stress. + +### Price Spike Toasts +`sonner` toast notifications that pop up when any region's price crosses a configurable threshold (e.g., >$100/MWh) or hits a new 30-day high. Persistent in the bottom-right corner. Gives that "breaking news" feeling. + +### Auto-Refresh with Countdown +A subtle countdown timer in the nav: "Next refresh in 47s". Data auto-refreshes on a configurable interval (default 60s). Uses Next.js `router.refresh()` to re-run server components without a full page reload. The countdown itself is a client component with `requestAnimationFrame` for smooth ticking. + +### Ambient Region Glow +On the map, grid region polygons don't just use static fill colors — they have a subtle CSS animation that "breathes" (opacity oscillation). Higher-priced regions breathe faster and brighter. The map looks alive at a glance. + +## Pages & Views + +### 1. Dashboard Home (`/`) +- **Hero metrics**: Live national avg electricity price, natural gas spot, total DC capacity +- **Price change sparklines**: 24h/7d/30d trends for key indicators +- **Recent alerts**: Notable price spikes or demand records +- **Quick map preview**: Thumbnail of the full map with DC hotspots + +### 2. Interactive Map (`/map`) +- **Google Maps** with custom styling (dark theme) +- **Datacenter markers**: Clustered markers sized by capacity (MW), colored by operator +- **Regional overlays**: Grid region polygons colored by current electricity price (heatmap) +- **Click interactions**: Click a region to see detail panel (prices, demand, generation mix, DC list) +- **Click a datacenter**: See operator, capacity, year opened, regional context +- **Filters**: By operator, capacity range, region, time period + +### 3. Price Trends (`/trends`) +- **Multi-line charts**: Regional electricity prices over time (selectable regions) +- **Commodity overlay**: Natural gas / crude oil prices on secondary axis +- **AI milestone annotations**: Vertical markers for ChatGPT launch, major cluster announcements +- **Correlation view**: Scatter plot of DC capacity vs regional price +- **Time range selector**: 1M, 3M, 6M, 1Y, ALL + +### 4. Demand Analysis (`/demand`) +- **Regional demand growth**: Bar/line charts showing demand trends by ISO region +- **Peak demand tracking**: Historical peak demand records +- **Forecast overlay**: EIA demand forecasts where available +- **DC impact estimation**: Estimated datacenter load as percentage of regional demand + +### 5. Generation Mix (`/generation`) +- **Stacked area charts**: Generation by fuel type per region over time +- **Renewable vs fossil split**: How DC-heavy regions compare +- **Carbon intensity proxy**: Generation mix as indicator of grid cleanliness + +## Project Structure + +``` +bonus4/ +├── docker-compose.yml +├── .env # API keys (EIA, FRED, Google Maps) +├── .prettierrc.js # (existing) +├── tsconfig.json # (existing) +├── eslint.config.js # (existing) +├── next.config.ts +├── package.json +├── prisma/ +│ ├── schema.prisma +│ ├── migrations/ +│ ├── seed.ts # Datacenter locations, region boundaries, AI milestones +│ └── sql/ # TypedSQL queries +│ ├── findDatacentersInRegion.sql +│ ├── findNearbyDatacenters.sql +│ ├── getRegionPriceHeatmap.sql +│ ├── getLatestPrices.sql +│ ├── getPriceTrends.sql +│ ├── getDemandByRegion.sql +│ └── getGenerationMix.sql +├── src/ +│ ├── app/ +│ │ ├── layout.tsx +│ │ ├── page.tsx # Dashboard home +│ │ ├── map/ +│ │ │ └── page.tsx +│ │ ├── trends/ +│ │ │ └── page.tsx +│ │ ├── demand/ +│ │ │ └── page.tsx +│ │ ├── generation/ +│ │ │ └── page.tsx +│ │ └── api/ +│ │ └── ingest/ # Data ingestion endpoints (cron-triggered) +│ │ ├── electricity/route.ts +│ │ ├── commodities/route.ts +│ │ └── generation/route.ts +│ ├── components/ +│ │ ├── ui/ # shadcn/ui components +│ │ ├── map/ +│ │ │ ├── energy-map.tsx # Main Google Maps component +│ │ │ ├── datacenter-marker.tsx +│ │ │ ├── region-overlay.tsx +│ │ │ └── map-controls.tsx +│ │ ├── charts/ +│ │ │ ├── price-chart.tsx +│ │ │ ├── demand-chart.tsx +│ │ │ ├── generation-chart.tsx +│ │ │ └── sparkline.tsx +│ │ ├── dashboard/ +│ │ │ ├── metric-card.tsx +│ │ │ └── alerts-feed.tsx +│ │ └── layout/ +│ │ ├── nav.tsx +│ │ └── footer.tsx +│ ├── lib/ +│ │ ├── db.ts # Prisma client singleton +│ │ ├── api/ +│ │ │ ├── eia.ts # EIA API client + Zod schemas +│ │ │ └── fred.ts # FRED API client + Zod schemas +│ │ ├── schemas/ # Shared Zod schemas +│ │ │ ├── electricity.ts +│ │ │ ├── commodities.ts +│ │ │ └── geo.ts +│ │ └── utils.ts +│ ├── actions/ # Server Actions (typed server→client boundary) +│ │ ├── prices.ts +│ │ ├── datacenters.ts +│ │ ├── demand.ts +│ │ └── generation.ts +│ └── types/ +│ └── index.ts # Shared type definitions +├── scripts/ +│ └── seed-datacenters.ts # One-time seed script for DC location data +├── Assignment.md +├── CLAUDE.md +└── SPEC.md +``` + +## Docker Compose + +```yaml +services: + db: + image: postgis/postgis:18-3.5 + ports: + - "5433:5432" + environment: + POSTGRES_DB: energy_dashboard + POSTGRES_USER: energy + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} + volumes: + - pgdata:/var/lib/postgresql/data + +volumes: + pgdata: +``` + +Port **5433** externally (5432 is occupied by another project). + +## Data Ingestion Strategy + +The dashboard needs to feel live without hammering external APIs. The strategy is: fetch once, cache in Postgres, serve from cache, refresh on a schedule. + +1. **On-demand + cache**: Server actions check Postgres first. If the latest cached data is within the TTL window (e.g., 30 min for electricity, 6 hours for commodities), serve from cache. Otherwise, fetch fresh from EIA/FRED, validate with Zod, upsert into Postgres, and return. +2. **API route ingestion**: `/api/ingest/*` routes provide a manual trigger and a cron-compatible endpoint for bulk data pulls. Useful for backfilling historical data and for production scheduled ingestion. +3. **Seed data**: Datacenter locations and grid region boundaries are relatively static. Loaded via `prisma db seed` from curated JSON/GeoJSON files. +4. **Rate limit awareness**: EIA allows ~9k req/hr (generous), FRED allows 120/min. With caching, we'll typically make <100 EIA requests/hour even under heavy use. The real bottleneck is EIA's 5,000-row-per-query limit — pagination handled in the API client. + +## Implementation Phases + +### Phase 1: Foundation +- [ ] Scaffold Next.js 16 project +- [ ] Copy into bonus4 dir, integrate existing prettier/eslint/tsconfig +- [ ] Install and configure shadcn/ui + Tailwind 4 +- [ ] Docker Compose for PostgreSQL 18 + PostGIS +- [ ] Prisma schema + initial migration (with PostGIS extension) +- [ ] Seed datacenter locations + grid region boundaries + +### Phase 2: Data Layer +- [ ] EIA API client with Zod validation +- [ ] FRED API client with Zod validation +- [ ] TypedSQL queries for all geospatial operations +- [ ] Server actions for data access (typed server→client boundary) +- [ ] Ingestion API routes + +### Phase 3: Dashboard UI +- [ ] App layout (nav, sidebar, footer) +- [ ] Dashboard home with metric cards + sparklines +- [ ] Google Maps integration with datacenter markers +- [ ] Region polygon overlays with price heatmap coloring +- [ ] Click interactions (region detail panel, DC detail panel) + +### Phase 4: Charts & Analysis +- [ ] Price trend charts (Recharts via shadcn/ui) +- [ ] Demand analysis views +- [ ] Generation mix charts +- [ ] AI milestone annotations +- [ ] Correlation views + +### Phase 5: Polish +- [ ] Responsive design +- [ ] Loading states + error boundaries +- [ ] Disclaimers (educational/informational purposes) +- [ ] One-page summary document +- [ ] README with installation docs diff --git a/eslint.config.js b/eslint.config.js new file mode 100644 index 0000000..4cac9d5 --- /dev/null +++ b/eslint.config.js @@ -0,0 +1,195 @@ +import nextPlugin from '@next/eslint-plugin-next'; +import pluginQuery from '@tanstack/eslint-plugin-query'; +import parser from '@typescript-eslint/parser'; +import pluginReact from 'eslint-plugin-react'; +import reactCompiler from 'eslint-plugin-react-compiler'; +import pluginReactHooks from 'eslint-plugin-react-hooks'; +import globals from 'globals'; +import eslint from '@eslint/js'; +import importPlugin from 'eslint-plugin-import'; +import prettierPlugin from 'eslint-plugin-prettier/recommended'; +import tseslint from 'typescript-eslint'; + +/** @type {import("eslint").Linter.Config} */ +export default [ + ...eslint.configs.recommended, + ...tseslint.configs.recommendedTypeChecked, + prettierPlugin, + { + plugins: { + import: importPlugin, + }, + rules: { + 'import/no-default-export': 'warn', + }, + }, + { + languageOptions: { + parserOptions: { + projectService: true, + // EXPERIMENTAL_useProjectService: true, + // project: ['./tsconfig.json', './apps/*/tsconfig.json', './packages/*/tsconfig.json'], + }, + }, + rules: { + // custom rules + 'arrow-body-style': 'off', + 'prefer-arrow-callback': 'off', + 'no-debugger': 'warn', + 'prefer-const': 'warn', + 'prettier/prettier': ['warn', {}, { usePrettierrc: true }], + // typescript-eslint rules + '@typescript-eslint/interface-name-prefix': 'off', + '@typescript-eslint/explicit-function-return-type': 'off', + '@typescript-eslint/explicit-module-boundary-types': 'off', + '@typescript-eslint/no-empty-function': 'off', + '@typescript-eslint/no-explicit-any': 'warn', + '@typescript-eslint/no-unnecessary-type-assertion': 'error', + '@typescript-eslint/consistent-type-assertions': ['warn', { assertionStyle: 'as' }], + '@typescript-eslint/no-unsafe-type-assertion': 'error', + '@typescript-eslint/no-unused-vars': [ + 'warn', + { + argsIgnorePattern: '^_', + varsIgnorePattern: '^_', + destructuredArrayIgnorePattern: '^_', + caughtErrorsIgnorePattern: '^_', + }, + ], + '@typescript-eslint/no-misused-promises': 'error', + '@typescript-eslint/no-floating-promises': 'error', + '@typescript-eslint/require-await': 'warn', + // it's dying in CI, so warn for now + // '@typescript-eslint/no-redundant-type-constituents': 'warn', + // reenable later - start with warn + // '@typescript-eslint/no-unsafe-return': 'off', + // '@typescript-eslint/no-unsafe-assignment': 'off', + // '@typescript-eslint/no-unsafe-member-access': 'off', + // '@typescript-eslint/no-unsafe-call': 'off', + // '@typescript-eslint/no-unsafe-argument': 'off', + // '@typescript-eslint/restrict-template-expressions': 'off', + }, + }, + { + files: ['**/*.spec.ts', '**/*.test.ts', '**/*.spec.tsx', '**/*.test.tsx'], + rules: { + '@typescript-eslint/no-unsafe-return': 'off', + '@typescript-eslint/no-unsafe-assignment': 'off', + '@typescript-eslint/no-unsafe-member-access': 'off', + '@typescript-eslint/no-unsafe-call': 'off', + '@typescript-eslint/no-unsafe-argument': 'off', + '@typescript-eslint/restrict-template-expressions': 'off', + '@typescript-eslint/no-unnecessary-type-assertion': 'off', + '@typescript-eslint/consistent-type-assertions': 'off', + '@typescript-eslint/no-unsafe-type-assertion': 'off', + '@typescript-eslint/require-await': 'off', + }, + }, + { + files: ['**/*.js', '**/*.cjs', '**/*.mjs'], + rules: { + ...tseslint.configs.disableTypeChecked, + }, + }, + { + ignores: [ + 'dist/**', + 'node_modules', + '.next', + '.prettierrc.*js', + 'postcss.config.*js', + 'tailwind.config.*js', + 'eslint.config.*js', + 'next-sitemap.config.*js', + 'next.config.*js', + 'packages/config/eslint/**.js', + ], + }, + ...pluginQuery.configs['flat/recommended'], + { + files: ['**/*.{js,jsx,ts,tsx}'], + plugins: { '@next/next': nextPlugin }, + rules: { ...nextPlugin.configs['core-web-vitals'].rules }, + }, + { + ...pluginReact.configs.flat.recommended, + languageOptions: { + ...pluginReact.configs.flat.recommended.languageOptions, + globals: { + ...globals.serviceworker, + ...globals.browser, + ...globals.node, + }, + }, + }, + { + plugins: { 'react-hooks': pluginReactHooks }, + settings: { react: { version: 'detect' } }, + rules: { + ...pluginReactHooks.configs.recommended.rules, + // React scope no longer necessary with new JSX transform. + 'react/react-in-jsx-scope': 'off', + }, + }, + reactCompiler.configs.recommended, + { + rules: { 'react-compiler/react-compiler': 'error' }, + }, + { + languageOptions: { + parser, + parserOptions: { ecmaVersion: 'latest', sourceType: 'module', ecmaFeatures: { jsx: true } }, + }, + }, + // overrides + { + files: ['**/*.{js,jsx,ts,tsx}'], + rules: { + 'no-restricted-syntax': [ + 'warn', + { selector: "CallExpression[callee.name='unwrap']", message: 'Handle errors instead of unwrapping!' }, + ], + 'meridian/no-direct-cache-tag': 'error', + 'meridian/no-private-env-unless-server-only': 'error', + '@typescript-eslint/no-misused-promises': [ + 'error', + { checksVoidReturn: { arguments: false, attributes: false } }, + ], + '@typescript-eslint/no-floating-promises': ['error', { ignoreIIFE: true, ignoreVoid: true }], + 'react/no-unescaped-entities': 'off', + }, + }, + { + files: ['app/**/{page,layout,not-found,default,loading,global-error,error}.tsx'], + rules: { + 'import/no-default-export': 'off', + '@typescript-eslint/require-await': 'off', + }, + }, + { + files: ['app/**/routeType.ts'], + rules: { + '@typescript-eslint/no-unused-vars': [ + 'warn', + { + argsIgnorePattern: '^_', + varsIgnorePattern: '^_|^Route', + destructuredArrayIgnorePattern: '^_', + caughtErrorsIgnorePattern: '^_', + }, + ], + }, + }, + { + files: ['lib/cache.ts'], + rules: { + 'meridian/no-direct-cache-tag': 'off', + }, + }, + + { + files: ['next.config.ts'], + rules: { '@typescript-eslint/no-unsafe-type-assertion': 'off', 'import/no-default-export': 'off' }, + }, + { ignores: ['next-env.d.ts', 'gen/**/*'] }, +]; diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..35e1a5c --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,36 @@ +{ + "$schema": "https://json.schemastore.org/tsconfig", + "compilerOptions": { + "composite": true, + "incremental": true, + "moduleDetection": "force", + "declaration": true, + "declarationMap": true, + "esModuleInterop": true, + "experimentalDecorators": true, + "noUncheckedIndexedAccess": true, + "emitDecoratorMetadata": true, + "downlevelIteration": false, + "noImplicitAny": true, + "strict": true, + "strictNullChecks": true, + "allowSyntheticDefaultImports": true, + "useDefineForClassFields": false, + "importHelpers": true, + "resolveJsonModule": true, + "newLine": "LF", + "skipLibCheck": true, + "noResolve": false, + "preserveConstEnums": true, + "sourceMap": true, + "isolatedModules": true, + "lib": ["ESNext", "DOM", "DOM.Iterable"], + "module": "nodenext", + "moduleResolution": "nodenext", + "target": "esnext", + "plugins": [{ "name": "next" }], + "allowJs": true, + "jsx": "preserve", + "noEmit": true + } +}