phase 7: full US coverage — grid regions, datacenters, power plants, backfill, chart perf
- Add 7 new grid regions (BPA, DUKE, SOCO, TVA, FPC, WAPA, NWMT) to cover entire continental US - Expand datacenters from 108 to 292 facilities across 39 operators - Add EIA power plant pipeline: download script, 3,546 plants >= 50 MW with diamond map markers - Rewrite backfill script for 10-year data (2015-07-01) with quarterly/monthly chunking, 3-region parallelism, resumability - Add materialized views (daily/weekly) with server-side granularity selection for chart performance - Fix map UX: z-index tooltips, disable POI clicks, move legend via MapControl
This commit is contained in:
parent
3251e30a2e
commit
8f99f6535e
File diff suppressed because it is too large
Load Diff
@ -340,6 +340,244 @@
|
||||
"code": "SPP",
|
||||
"iso": "SPP"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "MultiPolygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
[-124.73, 49.00],
|
||||
[-116.05, 49.00],
|
||||
[-116.05, 46.00],
|
||||
[-117.04, 44.30],
|
||||
[-117.04, 42.00],
|
||||
[-120.00, 42.00],
|
||||
[-124.41, 42.00],
|
||||
[-124.56, 42.80],
|
||||
[-124.07, 44.60],
|
||||
[-123.94, 46.18],
|
||||
[-124.10, 46.86],
|
||||
[-124.73, 48.40],
|
||||
[-124.73, 49.00]
|
||||
]
|
||||
]
|
||||
]
|
||||
},
|
||||
"properties": {
|
||||
"name": "Bonneville Power Administration",
|
||||
"code": "BPA",
|
||||
"iso": "BPA"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "MultiPolygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
[-116.05, 49.00],
|
||||
[-104.05, 49.00],
|
||||
[-104.05, 45.94],
|
||||
[-104.05, 45.00],
|
||||
[-111.05, 45.00],
|
||||
[-116.05, 46.00],
|
||||
[-116.05, 49.00]
|
||||
]
|
||||
]
|
||||
]
|
||||
},
|
||||
"properties": {
|
||||
"name": "NorthWestern Energy Montana",
|
||||
"code": "NWMT",
|
||||
"iso": "NWMT"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "MultiPolygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
[-120.00, 42.00],
|
||||
[-117.04, 42.00],
|
||||
[-117.04, 44.30],
|
||||
[-116.05, 46.00],
|
||||
[-111.05, 45.00],
|
||||
[-104.05, 45.00],
|
||||
[-104.05, 43.00],
|
||||
[-104.05, 41.00],
|
||||
[-104.05, 38.00],
|
||||
[-103.00, 37.00],
|
||||
[-103.00, 36.50],
|
||||
[-100.00, 34.56],
|
||||
[-103.04, 32.00],
|
||||
[-106.65, 31.75],
|
||||
[-109.05, 31.33],
|
||||
[-111.07, 31.33],
|
||||
[-114.63, 32.72],
|
||||
[-114.63, 34.87],
|
||||
[-116.09, 35.98],
|
||||
[-117.63, 37.43],
|
||||
[-120.00, 39.00],
|
||||
[-120.00, 42.00]
|
||||
]
|
||||
]
|
||||
]
|
||||
},
|
||||
"properties": {
|
||||
"name": "Western Area Power Administration",
|
||||
"code": "WAPA",
|
||||
"iso": "WAPA"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "MultiPolygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
[-88.07, 37.50],
|
||||
[-87.69, 37.79],
|
||||
[-87.10, 37.79],
|
||||
[-86.52, 36.64],
|
||||
[-85.98, 36.63],
|
||||
[-84.86, 36.63],
|
||||
[-84.22, 36.60],
|
||||
[-82.30, 36.60],
|
||||
[-81.65, 36.60],
|
||||
[-81.65, 35.17],
|
||||
[-82.78, 35.07],
|
||||
[-84.32, 35.00],
|
||||
[-85.61, 34.98],
|
||||
[-88.20, 35.00],
|
||||
[-89.70, 36.25],
|
||||
[-89.10, 36.95],
|
||||
[-88.47, 37.07],
|
||||
[-88.07, 37.50]
|
||||
]
|
||||
]
|
||||
]
|
||||
},
|
||||
"properties": {
|
||||
"name": "Tennessee Valley Authority",
|
||||
"code": "TVA",
|
||||
"iso": "TVA"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "MultiPolygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
[-84.32, 35.00],
|
||||
[-82.78, 35.07],
|
||||
[-81.65, 35.17],
|
||||
[-81.65, 36.60],
|
||||
[-82.30, 36.60],
|
||||
[-84.22, 36.60],
|
||||
[-84.86, 36.63],
|
||||
[-84.43, 38.45],
|
||||
[-83.65, 38.63],
|
||||
[-82.60, 38.17],
|
||||
[-81.95, 37.54],
|
||||
[-81.23, 37.27],
|
||||
[-80.52, 37.48],
|
||||
[-80.30, 37.10],
|
||||
[-79.51, 36.54],
|
||||
[-78.45, 35.69],
|
||||
[-77.75, 36.00],
|
||||
[-75.87, 36.55],
|
||||
[-75.87, 35.19],
|
||||
[-76.52, 34.62],
|
||||
[-77.68, 33.95],
|
||||
[-78.90, 33.65],
|
||||
[-79.45, 33.16],
|
||||
[-80.85, 32.11],
|
||||
[-81.15, 32.11],
|
||||
[-82.25, 33.31],
|
||||
[-83.35, 34.49],
|
||||
[-84.32, 35.00]
|
||||
]
|
||||
]
|
||||
]
|
||||
},
|
||||
"properties": {
|
||||
"name": "Duke Energy Carolinas",
|
||||
"code": "DUKE",
|
||||
"iso": "DUKE"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "MultiPolygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
[-88.20, 35.00],
|
||||
[-85.61, 34.98],
|
||||
[-84.32, 35.00],
|
||||
[-83.35, 34.49],
|
||||
[-82.25, 33.31],
|
||||
[-81.15, 32.11],
|
||||
[-81.15, 31.00],
|
||||
[-84.86, 30.70],
|
||||
[-87.60, 30.25],
|
||||
[-88.40, 30.23],
|
||||
[-89.67, 34.96],
|
||||
[-89.70, 36.25],
|
||||
[-88.20, 35.00]
|
||||
]
|
||||
]
|
||||
]
|
||||
},
|
||||
"properties": {
|
||||
"name": "Southern Company",
|
||||
"code": "SOCO",
|
||||
"iso": "SOCO"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "MultiPolygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
[-87.60, 30.25],
|
||||
[-84.86, 30.70],
|
||||
[-81.15, 31.00],
|
||||
[-81.15, 32.11],
|
||||
[-80.85, 32.11],
|
||||
[-80.45, 31.62],
|
||||
[-81.26, 30.75],
|
||||
[-81.52, 29.49],
|
||||
[-80.52, 28.00],
|
||||
[-80.22, 26.30],
|
||||
[-80.84, 25.15],
|
||||
[-81.81, 24.55],
|
||||
[-82.63, 27.52],
|
||||
[-82.85, 27.83],
|
||||
[-84.34, 29.96],
|
||||
[-85.39, 29.68],
|
||||
[-86.52, 30.38],
|
||||
[-87.60, 30.25]
|
||||
]
|
||||
]
|
||||
]
|
||||
},
|
||||
"properties": {
|
||||
"name": "Florida Power",
|
||||
"code": "FPC",
|
||||
"iso": "FPC"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
74471
data/power-plants.geojson
Normal file
74471
data/power-plants.geojson
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,66 @@
|
||||
-- Daily aggregation of electricity prices + demand
|
||||
CREATE MATERIALIZED VIEW electricity_prices_daily AS
|
||||
SELECT
|
||||
region_id,
|
||||
date_trunc('day', timestamp) AS day,
|
||||
AVG(price_mwh) AS avg_price,
|
||||
MAX(price_mwh) AS max_price,
|
||||
MIN(price_mwh) AS min_price,
|
||||
AVG(demand_mw) AS avg_demand,
|
||||
MAX(demand_mw) AS peak_demand
|
||||
FROM electricity_prices
|
||||
GROUP BY region_id, date_trunc('day', timestamp);
|
||||
|
||||
CREATE UNIQUE INDEX electricity_prices_daily_region_day
|
||||
ON electricity_prices_daily (region_id, day);
|
||||
|
||||
-- Weekly aggregation of electricity prices + demand
|
||||
CREATE MATERIALIZED VIEW electricity_prices_weekly AS
|
||||
SELECT
|
||||
region_id,
|
||||
date_trunc('week', timestamp) AS week,
|
||||
AVG(price_mwh) AS avg_price,
|
||||
MAX(price_mwh) AS max_price,
|
||||
MIN(price_mwh) AS min_price,
|
||||
AVG(demand_mw) AS avg_demand,
|
||||
MAX(demand_mw) AS peak_demand
|
||||
FROM electricity_prices
|
||||
GROUP BY region_id, date_trunc('week', timestamp);
|
||||
|
||||
CREATE UNIQUE INDEX electricity_prices_weekly_region_week
|
||||
ON electricity_prices_weekly (region_id, week);
|
||||
|
||||
-- Daily aggregation of generation mix
|
||||
CREATE MATERIALIZED VIEW generation_mix_daily AS
|
||||
SELECT
|
||||
region_id,
|
||||
fuel_type,
|
||||
date_trunc('day', timestamp) AS day,
|
||||
AVG(generation_mw) AS avg_generation,
|
||||
MAX(generation_mw) AS peak_generation
|
||||
FROM generation_mix
|
||||
GROUP BY region_id, fuel_type, date_trunc('day', timestamp);
|
||||
|
||||
CREATE UNIQUE INDEX generation_mix_daily_region_fuel_day
|
||||
ON generation_mix_daily (region_id, fuel_type, day);
|
||||
|
||||
-- Weekly aggregation of generation mix
|
||||
CREATE MATERIALIZED VIEW generation_mix_weekly AS
|
||||
SELECT
|
||||
region_id,
|
||||
fuel_type,
|
||||
date_trunc('week', timestamp) AS week,
|
||||
AVG(generation_mw) AS avg_generation,
|
||||
MAX(generation_mw) AS peak_generation
|
||||
FROM generation_mix
|
||||
GROUP BY region_id, fuel_type, date_trunc('week', timestamp);
|
||||
|
||||
CREATE UNIQUE INDEX generation_mix_weekly_region_fuel_week
|
||||
ON generation_mix_weekly (region_id, fuel_type, week);
|
||||
|
||||
-- BRIN index for time-series range scans on large tables
|
||||
CREATE INDEX electricity_prices_timestamp_brin
|
||||
ON electricity_prices USING brin (timestamp);
|
||||
|
||||
CREATE INDEX generation_mix_timestamp_brin
|
||||
ON generation_mix USING brin (timestamp);
|
||||
@ -76,3 +76,17 @@ model GenerationMix {
|
||||
@@index([regionId, timestamp])
|
||||
@@map("generation_mix")
|
||||
}
|
||||
|
||||
model PowerPlant {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
plantCode Int @unique @map("plant_code")
|
||||
name String
|
||||
operator String
|
||||
location Unsupported("geography(Point, 4326)")
|
||||
capacityMw Float @map("capacity_mw")
|
||||
fuelType String @map("fuel_type")
|
||||
state String
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||
|
||||
@@map("power_plants")
|
||||
}
|
||||
|
||||
@ -55,6 +55,26 @@ const DatacenterCollectionSchema = z.object({
|
||||
features: z.array(DatacenterFeatureSchema),
|
||||
});
|
||||
|
||||
const PowerPlantPropertiesSchema = z.object({
|
||||
Plant_Name: z.string(),
|
||||
Plant_Code: z.number(),
|
||||
Utility_Na: z.string(),
|
||||
State: z.string(),
|
||||
PrimSource: z.string(),
|
||||
Total_MW: z.number(),
|
||||
});
|
||||
|
||||
const PowerPlantFeatureSchema = z.object({
|
||||
type: z.literal('Feature'),
|
||||
geometry: PointGeometrySchema,
|
||||
properties: PowerPlantPropertiesSchema,
|
||||
});
|
||||
|
||||
const PowerPlantCollectionSchema = z.object({
|
||||
type: z.literal('FeatureCollection'),
|
||||
features: z.array(PowerPlantFeatureSchema),
|
||||
});
|
||||
|
||||
const AIMilestoneSchema = z.object({
|
||||
date: z.string(),
|
||||
title: z.string(),
|
||||
@ -176,6 +196,69 @@ async function seedDatacenters() {
|
||||
console.log(` Total: ${inserted.toString()} inserted, ${skipped.toString()} skipped`);
|
||||
}
|
||||
|
||||
/** Normalize ArcGIS PrimSource strings to consistent capitalized fuel types. */
|
||||
function normalizeFuelType(primSource: string): string {
|
||||
const lower = primSource.toLowerCase().trim();
|
||||
if (lower === 'natural gas') return 'Natural Gas';
|
||||
if (lower === 'coal') return 'Coal';
|
||||
if (lower === 'nuclear') return 'Nuclear';
|
||||
if (lower.includes('hydro') || lower === 'hydroelectric conventional' || lower === 'pumped storage')
|
||||
return 'Hydroelectric';
|
||||
if (lower === 'wind') return 'Wind';
|
||||
if (lower === 'solar') return 'Solar';
|
||||
if (lower.includes('petroleum') || lower === 'petroleum') return 'Petroleum';
|
||||
if (lower.includes('biomass') || lower === 'wood' || lower === 'wood and wood derived fuels') return 'Biomass';
|
||||
if (lower === 'geothermal') return 'Geothermal';
|
||||
return 'Other';
|
||||
}
|
||||
|
||||
async function seedPowerPlants() {
|
||||
console.log('Seeding power plants...');
|
||||
|
||||
const geojson = readAndParse('data/power-plants.geojson', PowerPlantCollectionSchema);
|
||||
|
||||
let upserted = 0;
|
||||
let skipped = 0;
|
||||
|
||||
for (const feature of geojson.features) {
|
||||
const props = feature.properties;
|
||||
const [lng, lat] = feature.geometry.coordinates;
|
||||
|
||||
// Skip features with invalid coordinates
|
||||
if (!lng || !lat || lat === 0 || lng === 0) {
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const id = randomUUID();
|
||||
const fuelType = normalizeFuelType(props.PrimSource);
|
||||
|
||||
await prisma.$executeRawUnsafe(
|
||||
`INSERT INTO power_plants (id, plant_code, name, operator, location, capacity_mw, fuel_type, state, created_at)
|
||||
VALUES ($1::uuid, $2, $3, $4, ST_SetSRID(ST_MakePoint($5, $6), 4326)::geography, $7, $8, $9, NOW())
|
||||
ON CONFLICT (plant_code) DO UPDATE SET
|
||||
name = EXCLUDED.name,
|
||||
operator = EXCLUDED.operator,
|
||||
location = EXCLUDED.location,
|
||||
capacity_mw = EXCLUDED.capacity_mw,
|
||||
fuel_type = EXCLUDED.fuel_type,
|
||||
state = EXCLUDED.state`,
|
||||
id,
|
||||
props.Plant_Code,
|
||||
props.Plant_Name,
|
||||
props.Utility_Na,
|
||||
lng,
|
||||
lat,
|
||||
props.Total_MW,
|
||||
fuelType,
|
||||
props.State,
|
||||
);
|
||||
upserted++;
|
||||
}
|
||||
|
||||
console.log(` Total: ${upserted.toString()} upserted, ${skipped.toString()} skipped`);
|
||||
}
|
||||
|
||||
function validateAIMilestones() {
|
||||
console.log('Validating AI milestones...');
|
||||
const milestones = readAndParse('data/ai-milestones.json', z.array(AIMilestoneSchema));
|
||||
@ -198,6 +281,9 @@ async function main() {
|
||||
await seedDatacenters();
|
||||
console.log('');
|
||||
|
||||
await seedPowerPlants();
|
||||
console.log('');
|
||||
|
||||
validateAIMilestones();
|
||||
|
||||
// Print summary
|
||||
@ -206,8 +292,10 @@ async function main() {
|
||||
'SELECT count(*) as count FROM grid_regions',
|
||||
);
|
||||
const dcCount = await prisma.$queryRawUnsafe<Array<{ count: bigint }>>('SELECT count(*) as count FROM datacenters');
|
||||
const ppCount = await prisma.$queryRawUnsafe<Array<{ count: bigint }>>('SELECT count(*) as count FROM power_plants');
|
||||
console.log(`Grid regions: ${regionCount[0]!.count.toString()}`);
|
||||
console.log(`Datacenters: ${dcCount[0]!.count.toString()}`);
|
||||
console.log(`Power plants: ${ppCount[0]!.count.toString()}`);
|
||||
|
||||
// Show sample spatial data
|
||||
const sample = await prisma.$queryRawUnsafe<Array<{ name: string; location_text: string }>>(
|
||||
|
||||
4
prisma/sql/getAllPowerPlants.sql
Normal file
4
prisma/sql/getAllPowerPlants.sql
Normal file
@ -0,0 +1,4 @@
|
||||
SELECT id, plant_code, name, operator, capacity_mw, fuel_type, state,
|
||||
ST_AsGeoJSON(location)::TEXT as location_geojson
|
||||
FROM power_plants
|
||||
ORDER BY capacity_mw DESC
|
||||
21
prisma/sql/getDemandDaily.sql
Normal file
21
prisma/sql/getDemandDaily.sql
Normal file
@ -0,0 +1,21 @@
|
||||
-- @param {DateTime} $1:startDate
|
||||
-- @param {DateTime} $2:endDate
|
||||
-- @param {String} $3:regionCode - pass 'ALL' to return all regions
|
||||
SELECT
|
||||
r.code AS region_code,
|
||||
r.name AS region_name,
|
||||
d.day,
|
||||
d.avg_demand,
|
||||
d.peak_demand,
|
||||
COALESCE(dc.datacenter_count, 0)::INT AS datacenter_count,
|
||||
COALESCE(dc.total_dc_capacity_mw, 0) AS total_dc_capacity_mw
|
||||
FROM electricity_prices_daily d
|
||||
JOIN grid_regions r ON d.region_id = r.id
|
||||
LEFT JOIN (
|
||||
SELECT region_id, COUNT(*)::INT AS datacenter_count,
|
||||
COALESCE(SUM(capacity_mw), 0) AS total_dc_capacity_mw
|
||||
FROM datacenters GROUP BY region_id
|
||||
) dc ON dc.region_id = r.id
|
||||
WHERE d.day BETWEEN $1 AND $2
|
||||
AND ($3 = 'ALL' OR r.code = $3)
|
||||
ORDER BY r.code, d.day
|
||||
21
prisma/sql/getDemandHourly.sql
Normal file
21
prisma/sql/getDemandHourly.sql
Normal file
@ -0,0 +1,21 @@
|
||||
-- @param {DateTime} $1:startDate
|
||||
-- @param {DateTime} $2:endDate
|
||||
-- @param {String} $3:regionCode - pass 'ALL' to return all regions
|
||||
SELECT
|
||||
r.code AS region_code,
|
||||
r.name AS region_name,
|
||||
ep.timestamp AS day,
|
||||
ep.demand_mw AS avg_demand,
|
||||
ep.demand_mw AS peak_demand,
|
||||
COALESCE(dc.datacenter_count, 0)::INT AS datacenter_count,
|
||||
COALESCE(dc.total_dc_capacity_mw, 0) AS total_dc_capacity_mw
|
||||
FROM electricity_prices ep
|
||||
JOIN grid_regions r ON ep.region_id = r.id
|
||||
LEFT JOIN (
|
||||
SELECT region_id, COUNT(*)::INT AS datacenter_count,
|
||||
COALESCE(SUM(capacity_mw), 0) AS total_dc_capacity_mw
|
||||
FROM datacenters GROUP BY region_id
|
||||
) dc ON dc.region_id = r.id
|
||||
WHERE ep.timestamp BETWEEN $1 AND $2
|
||||
AND ($3 = 'ALL' OR r.code = $3)
|
||||
ORDER BY r.code, ep.timestamp
|
||||
21
prisma/sql/getDemandWeekly.sql
Normal file
21
prisma/sql/getDemandWeekly.sql
Normal file
@ -0,0 +1,21 @@
|
||||
-- @param {DateTime} $1:startDate
|
||||
-- @param {DateTime} $2:endDate
|
||||
-- @param {String} $3:regionCode - pass 'ALL' to return all regions
|
||||
SELECT
|
||||
r.code AS region_code,
|
||||
r.name AS region_name,
|
||||
w.week AS day,
|
||||
w.avg_demand,
|
||||
w.peak_demand,
|
||||
COALESCE(dc.datacenter_count, 0)::INT AS datacenter_count,
|
||||
COALESCE(dc.total_dc_capacity_mw, 0) AS total_dc_capacity_mw
|
||||
FROM electricity_prices_weekly w
|
||||
JOIN grid_regions r ON w.region_id = r.id
|
||||
LEFT JOIN (
|
||||
SELECT region_id, COUNT(*)::INT AS datacenter_count,
|
||||
COALESCE(SUM(capacity_mw), 0) AS total_dc_capacity_mw
|
||||
FROM datacenters GROUP BY region_id
|
||||
) dc ON dc.region_id = r.id
|
||||
WHERE w.week BETWEEN $1 AND $2
|
||||
AND ($3 = 'ALL' OR r.code = $3)
|
||||
ORDER BY r.code, w.week
|
||||
14
prisma/sql/getGenerationDaily.sql
Normal file
14
prisma/sql/getGenerationDaily.sql
Normal file
@ -0,0 +1,14 @@
|
||||
-- @param {String} $1:regionCode
|
||||
-- @param {DateTime} $2:startDate
|
||||
-- @param {DateTime} $3:endDate
|
||||
SELECT
|
||||
gd.fuel_type,
|
||||
gd.day AS timestamp,
|
||||
gd.avg_generation AS generation_mw,
|
||||
r.code AS region_code,
|
||||
r.name AS region_name
|
||||
FROM generation_mix_daily gd
|
||||
JOIN grid_regions r ON gd.region_id = r.id
|
||||
WHERE r.code = $1
|
||||
AND gd.day BETWEEN $2 AND $3
|
||||
ORDER BY gd.day ASC, gd.fuel_type
|
||||
11
prisma/sql/getGenerationHourly.sql
Normal file
11
prisma/sql/getGenerationHourly.sql
Normal file
@ -0,0 +1,11 @@
|
||||
-- @param {String} $1:regionCode
|
||||
-- @param {DateTime} $2:startDate
|
||||
-- @param {DateTime} $3:endDate
|
||||
SELECT
|
||||
gm.fuel_type, gm.timestamp, gm.generation_mw,
|
||||
r.code AS region_code, r.name AS region_name
|
||||
FROM generation_mix gm
|
||||
JOIN grid_regions r ON gm.region_id = r.id
|
||||
WHERE r.code = $1
|
||||
AND gm.timestamp BETWEEN $2 AND $3
|
||||
ORDER BY gm.timestamp ASC, gm.fuel_type
|
||||
14
prisma/sql/getGenerationWeekly.sql
Normal file
14
prisma/sql/getGenerationWeekly.sql
Normal file
@ -0,0 +1,14 @@
|
||||
-- @param {String} $1:regionCode
|
||||
-- @param {DateTime} $2:startDate
|
||||
-- @param {DateTime} $3:endDate
|
||||
SELECT
|
||||
gw.fuel_type,
|
||||
gw.week AS timestamp,
|
||||
gw.avg_generation AS generation_mw,
|
||||
r.code AS region_code,
|
||||
r.name AS region_name
|
||||
FROM generation_mix_weekly gw
|
||||
JOIN grid_regions r ON gw.region_id = r.id
|
||||
WHERE r.code = $1
|
||||
AND gw.week BETWEEN $2 AND $3
|
||||
ORDER BY gw.week ASC, gw.fuel_type
|
||||
14
prisma/sql/getPricesDaily.sql
Normal file
14
prisma/sql/getPricesDaily.sql
Normal file
@ -0,0 +1,14 @@
|
||||
-- @param {String} $1:regionCode
|
||||
-- @param {DateTime} $2:startDate
|
||||
-- @param {DateTime} $3:endDate
|
||||
SELECT
|
||||
d.day AS timestamp,
|
||||
d.avg_price AS price_mwh,
|
||||
d.avg_demand AS demand_mw,
|
||||
r.code AS region_code,
|
||||
r.name AS region_name
|
||||
FROM electricity_prices_daily d
|
||||
JOIN grid_regions r ON d.region_id = r.id
|
||||
WHERE r.code = $1
|
||||
AND d.day BETWEEN $2 AND $3
|
||||
ORDER BY d.day ASC
|
||||
14
prisma/sql/getPricesHourly.sql
Normal file
14
prisma/sql/getPricesHourly.sql
Normal file
@ -0,0 +1,14 @@
|
||||
-- @param {String} $1:regionCode
|
||||
-- @param {DateTime} $2:startDate
|
||||
-- @param {DateTime} $3:endDate
|
||||
SELECT
|
||||
ep.timestamp,
|
||||
ep.price_mwh,
|
||||
ep.demand_mw,
|
||||
r.code AS region_code,
|
||||
r.name AS region_name
|
||||
FROM electricity_prices ep
|
||||
JOIN grid_regions r ON ep.region_id = r.id
|
||||
WHERE r.code = $1
|
||||
AND ep.timestamp BETWEEN $2 AND $3
|
||||
ORDER BY ep.timestamp ASC
|
||||
14
prisma/sql/getPricesWeekly.sql
Normal file
14
prisma/sql/getPricesWeekly.sql
Normal file
@ -0,0 +1,14 @@
|
||||
-- @param {String} $1:regionCode
|
||||
-- @param {DateTime} $2:startDate
|
||||
-- @param {DateTime} $3:endDate
|
||||
SELECT
|
||||
w.week AS timestamp,
|
||||
w.avg_price AS price_mwh,
|
||||
w.avg_demand AS demand_mw,
|
||||
r.code AS region_code,
|
||||
r.name AS region_name
|
||||
FROM electricity_prices_weekly w
|
||||
JOIN grid_regions r ON w.region_id = r.id
|
||||
WHERE r.code = $1
|
||||
AND w.week BETWEEN $2 AND $3
|
||||
ORDER BY w.week ASC
|
||||
@ -1,10 +1,15 @@
|
||||
/**
|
||||
* Historical data backfill script.
|
||||
* Historical data backfill script (10-year).
|
||||
*
|
||||
* Populates 6 months of historical data from EIA and FRED into Postgres.
|
||||
* Idempotent — safe to re-run; existing records are updated, not duplicated.
|
||||
* Populates ~10 years of historical data (from 2015-07-01) from EIA and FRED
|
||||
* into Postgres. Uses time-chunked requests to stay under EIA's 5,000-row
|
||||
* pagination limit, with concurrent region fetching and resumability.
|
||||
*
|
||||
* Idempotent — safe to re-run; uses ON CONFLICT upserts.
|
||||
*
|
||||
* Usage: bun run scripts/backfill.ts
|
||||
* bun run scripts/backfill.ts --skip-demand --skip-generation
|
||||
* bun run scripts/backfill.ts --only-commodities
|
||||
*/
|
||||
|
||||
import 'dotenv/config';
|
||||
@ -13,24 +18,59 @@ import { PrismaPg } from '@prisma/adapter-pg';
|
||||
import { PrismaClient } from '../src/generated/prisma/client.js';
|
||||
|
||||
import * as eia from '../src/lib/api/eia.js';
|
||||
import { getFuelTypeData, getRegionData, getRetailElectricityPrices } from '../src/lib/api/eia.js';
|
||||
import { getRetailElectricityPrices } from '../src/lib/api/eia.js';
|
||||
import * as fred from '../src/lib/api/fred.js';
|
||||
import type { RegionCode } from '../src/lib/schemas/electricity.js';
|
||||
import { type RegionCode } from '../src/lib/schemas/electricity.js';
|
||||
|
||||
const adapter = new PrismaPg({ connectionString: process.env.DATABASE_URL });
|
||||
const prisma = new PrismaClient({ adapter });
|
||||
|
||||
const ALL_REGIONS: RegionCode[] = ['PJM', 'ERCOT', 'CAISO', 'NYISO', 'ISONE', 'MISO', 'SPP'];
|
||||
// ---------------------------------------------------------------------------
|
||||
// Configuration
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const SIX_MONTHS_MS = 6 * 30 * 24 * 60 * 60 * 1000;
|
||||
/** EIA RTO hourly data begins around 2015-07 for most ISOs */
|
||||
const BACKFILL_START = '2015-07-01';
|
||||
|
||||
function sixMonthsAgoIso(): string {
|
||||
return new Date(Date.now() - SIX_MONTHS_MS).toISOString().slice(0, 10);
|
||||
}
|
||||
const ALL_REGIONS: RegionCode[] = [
|
||||
'PJM',
|
||||
'ERCOT',
|
||||
'CAISO',
|
||||
'NYISO',
|
||||
'ISONE',
|
||||
'MISO',
|
||||
'SPP',
|
||||
'BPA',
|
||||
'DUKE',
|
||||
'SOCO',
|
||||
'TVA',
|
||||
'FPC',
|
||||
'WAPA',
|
||||
'NWMT',
|
||||
];
|
||||
|
||||
function todayIso(): string {
|
||||
return new Date().toISOString().slice(0, 10);
|
||||
}
|
||||
/** Number of regions to fetch concurrently */
|
||||
const CONCURRENCY = 3;
|
||||
|
||||
/** Minimum delay between sequential API requests (ms) */
|
||||
const REQUEST_DELAY_MS = 200;
|
||||
|
||||
/** DB upsert batch size */
|
||||
const UPSERT_BATCH_SIZE = 2000;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// CLI flags
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const args = new Set(process.argv.slice(2));
|
||||
const skipDemand = args.has('--skip-demand');
|
||||
const skipGeneration = args.has('--skip-generation');
|
||||
const skipCommodities = args.has('--skip-commodities');
|
||||
const onlyCommodities = args.has('--only-commodities');
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function sleep(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
@ -41,27 +81,253 @@ function log(msg: string): void {
|
||||
console.log(`[${ts}] ${msg}`);
|
||||
}
|
||||
|
||||
function todayIso(): string {
|
||||
return new Date().toISOString().slice(0, 10);
|
||||
}
|
||||
|
||||
/** Generate quarterly date ranges: [start, end] pairs as YYYY-MM-DD strings */
|
||||
function generateQuarterChunks(startDate: string, endDate: string): Array<[string, string]> {
|
||||
const chunks: Array<[string, string]> = [];
|
||||
const start = new Date(`${startDate}T00:00:00Z`);
|
||||
const end = new Date(`${endDate}T00:00:00Z`);
|
||||
|
||||
const cursor = new Date(start);
|
||||
while (cursor < end) {
|
||||
const chunkStart = cursor.toISOString().slice(0, 10);
|
||||
// Advance 3 months
|
||||
cursor.setUTCMonth(cursor.getUTCMonth() + 3);
|
||||
const chunkEnd = cursor < end ? cursor.toISOString().slice(0, 10) : endDate;
|
||||
chunks.push([chunkStart, chunkEnd]);
|
||||
}
|
||||
|
||||
return chunks;
|
||||
}
|
||||
|
||||
/** Generate monthly date ranges: [start, end] pairs as YYYY-MM-DD strings */
|
||||
function generateMonthChunks(startDate: string, endDate: string): Array<[string, string]> {
|
||||
const chunks: Array<[string, string]> = [];
|
||||
const start = new Date(`${startDate}T00:00:00Z`);
|
||||
const end = new Date(`${endDate}T00:00:00Z`);
|
||||
|
||||
const cursor = new Date(start);
|
||||
while (cursor < end) {
|
||||
const chunkStart = cursor.toISOString().slice(0, 10);
|
||||
cursor.setUTCMonth(cursor.getUTCMonth() + 1);
|
||||
const chunkEnd = cursor < end ? cursor.toISOString().slice(0, 10) : endDate;
|
||||
chunks.push([chunkStart, chunkEnd]);
|
||||
}
|
||||
|
||||
return chunks;
|
||||
}
|
||||
|
||||
/** Format a quarter label like "Q3 2015" */
|
||||
function quarterLabel(dateStr: string): string {
|
||||
const d = new Date(`${dateStr}T00:00:00Z`);
|
||||
const q = Math.floor(d.getUTCMonth() / 3) + 1;
|
||||
return `Q${q} ${d.getUTCFullYear()}`;
|
||||
}
|
||||
|
||||
/** Format a month label like "Jul 2015" */
|
||||
function monthLabel(dateStr: string): string {
|
||||
const d = new Date(`${dateStr}T00:00:00Z`);
|
||||
return d.toLocaleString('en-US', { month: 'short', year: 'numeric', timeZone: 'UTC' });
|
||||
}
|
||||
|
||||
/** Run async tasks with limited concurrency */
|
||||
async function runWithConcurrency<T>(tasks: Array<() => Promise<T>>, limit: number): Promise<T[]> {
|
||||
const results: T[] = [];
|
||||
let index = 0;
|
||||
|
||||
async function worker(): Promise<void> {
|
||||
while (index < tasks.length) {
|
||||
const currentIndex = index++;
|
||||
const task = tasks[currentIndex]!;
|
||||
results[currentIndex] = await task();
|
||||
}
|
||||
}
|
||||
|
||||
const workers = Array.from({ length: Math.min(limit, tasks.length) }, () => worker());
|
||||
await Promise.all(workers);
|
||||
return results;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Electricity demand backfill
|
||||
// Progress tracker — stores completed region+chunk combos in memory
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const completedChunks = new Set<string>();
|
||||
|
||||
function chunkKey(phase: string, region: string, chunkStart: string): string {
|
||||
return `${phase}:${region}:${chunkStart}`;
|
||||
}
|
||||
|
||||
function isChunkDone(phase: string, region: string, chunkStart: string): boolean {
|
||||
return completedChunks.has(chunkKey(phase, region, chunkStart));
|
||||
}
|
||||
|
||||
function markChunkDone(phase: string, region: string, chunkStart: string): void {
|
||||
completedChunks.add(chunkKey(phase, region, chunkStart));
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Stats
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
interface BackfillStats {
|
||||
demandInserted: number;
|
||||
demandUpdated: number;
|
||||
demandErrors: number;
|
||||
genInserted: number;
|
||||
genUpdated: number;
|
||||
genErrors: number;
|
||||
commodityInserted: number;
|
||||
commodityUpdated: number;
|
||||
}
|
||||
|
||||
const stats: BackfillStats = {
|
||||
demandInserted: 0,
|
||||
demandUpdated: 0,
|
||||
demandErrors: 0,
|
||||
genInserted: 0,
|
||||
genUpdated: 0,
|
||||
genErrors: 0,
|
||||
commodityInserted: 0,
|
||||
commodityUpdated: 0,
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Electricity demand backfill — chunked by quarter
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async function backfillDemandForRegion(
|
||||
regionCode: RegionCode,
|
||||
regionId: string,
|
||||
chunks: Array<[string, string]>,
|
||||
retailPrices: Map<string, number>,
|
||||
latestPriceByRegion: Map<string, number>,
|
||||
): Promise<void> {
|
||||
for (let i = 0; i < chunks.length; i++) {
|
||||
const [start, end] = chunks[i]!;
|
||||
const label = `[DEMAND] ${regionCode}: ${quarterLabel(start)} (${i + 1}/${chunks.length})`;
|
||||
|
||||
if (isChunkDone('demand', regionCode, start)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
log(` ${label} — fetching...`);
|
||||
const demandData = await eia.getRegionData(regionCode, 'D', { start, end });
|
||||
const validPoints = demandData.filter((p): p is typeof p & { valueMw: number } => p.valueMw !== null);
|
||||
|
||||
if (validPoints.length === 0) {
|
||||
log(` ${label} — 0 data points, skipping`);
|
||||
markChunkDone('demand', regionCode, start);
|
||||
await sleep(REQUEST_DELAY_MS);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Compute peak demand for price variation within this chunk
|
||||
const peakDemand = Math.max(...validPoints.map(p => p.valueMw));
|
||||
|
||||
// Build upsert rows
|
||||
const rows = validPoints.map(point => {
|
||||
const month = point.timestamp.toISOString().slice(0, 7);
|
||||
const basePrice = retailPrices.get(`${regionCode}:${month}`) ?? latestPriceByRegion.get(regionCode) ?? 0;
|
||||
const demandRatio = peakDemand > 0 ? point.valueMw / peakDemand : 0.5;
|
||||
const priceMwh = basePrice > 0 ? basePrice * (0.8 + 0.4 * demandRatio) : 0;
|
||||
|
||||
return {
|
||||
regionId,
|
||||
priceMwh,
|
||||
demandMw: point.valueMw,
|
||||
timestamp: point.timestamp,
|
||||
};
|
||||
});
|
||||
|
||||
// Batch upsert using raw SQL for speed
|
||||
let inserted = 0;
|
||||
let updated = 0;
|
||||
for (let j = 0; j < rows.length; j += UPSERT_BATCH_SIZE) {
|
||||
const batch = rows.slice(j, j + UPSERT_BATCH_SIZE);
|
||||
const result = await upsertDemandBatch(batch);
|
||||
inserted += result.inserted;
|
||||
updated += result.updated;
|
||||
}
|
||||
|
||||
stats.demandInserted += inserted;
|
||||
stats.demandUpdated += updated;
|
||||
log(` ${label} — ${inserted} inserted, ${updated} updated (${validPoints.length} points)`);
|
||||
markChunkDone('demand', regionCode, start);
|
||||
} catch (err) {
|
||||
stats.demandErrors++;
|
||||
log(` ${label} — ERROR: ${err instanceof Error ? err.message : String(err)}`);
|
||||
}
|
||||
|
||||
await sleep(REQUEST_DELAY_MS);
|
||||
}
|
||||
}
|
||||
|
||||
interface UpsertResult {
|
||||
inserted: number;
|
||||
updated: number;
|
||||
}
|
||||
|
||||
async function upsertDemandBatch(
|
||||
rows: Array<{ regionId: string; priceMwh: number; demandMw: number; timestamp: Date }>,
|
||||
): Promise<UpsertResult> {
|
||||
if (rows.length === 0) return { inserted: 0, updated: 0 };
|
||||
|
||||
// Build VALUES clause with parameterized placeholders
|
||||
const values: unknown[] = [];
|
||||
const placeholders: string[] = [];
|
||||
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
const row = rows[i]!;
|
||||
const offset = i * 4;
|
||||
placeholders.push(
|
||||
`(gen_random_uuid(), $${offset + 1}::uuid, $${offset + 2}, $${offset + 3}, $${offset + 4}::timestamptz, 'EIA')`,
|
||||
);
|
||||
values.push(row.regionId, row.priceMwh, row.demandMw, row.timestamp);
|
||||
}
|
||||
|
||||
const sql = `
|
||||
WITH upserted AS (
|
||||
INSERT INTO electricity_prices (id, region_id, price_mwh, demand_mw, timestamp, source)
|
||||
VALUES ${placeholders.join(',\n')}
|
||||
ON CONFLICT (region_id, timestamp) DO UPDATE SET
|
||||
price_mwh = EXCLUDED.price_mwh,
|
||||
demand_mw = EXCLUDED.demand_mw,
|
||||
source = EXCLUDED.source
|
||||
RETURNING (xmax = 0) AS is_insert
|
||||
)
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE is_insert) AS inserted,
|
||||
COUNT(*) FILTER (WHERE NOT is_insert) AS updated
|
||||
FROM upserted
|
||||
`;
|
||||
|
||||
const result = await prisma.$queryRawUnsafe<Array<{ inserted: bigint; updated: bigint }>>(sql, ...values);
|
||||
const row = result[0]!;
|
||||
return { inserted: Number(row.inserted), updated: Number(row.updated) };
|
||||
}
|
||||
|
||||
async function backfillElectricity(): Promise<void> {
|
||||
log('=== Backfilling electricity demand + price data ===');
|
||||
log('=== Backfilling electricity demand + price data (10-year) ===');
|
||||
|
||||
const gridRegions = await prisma.gridRegion.findMany({
|
||||
select: { id: true, code: true },
|
||||
});
|
||||
const regionIdByCode = new Map(gridRegions.map(r => [r.code, r.id]));
|
||||
|
||||
const start = sixMonthsAgoIso();
|
||||
const end = todayIso();
|
||||
const chunks = generateQuarterChunks(BACKFILL_START, end);
|
||||
log(` ${chunks.length} quarterly chunks from ${BACKFILL_START} to ${end}`);
|
||||
|
||||
// Fetch monthly retail electricity prices for all regions upfront
|
||||
// Key: "REGION:YYYY-MM" -> $/MWh
|
||||
// Fetch retail prices upfront (one call covers all months + all states)
|
||||
const retailPriceByRegionMonth = new Map<string, number>();
|
||||
log(' Fetching retail electricity prices...');
|
||||
try {
|
||||
const startMonth = start.slice(0, 7); // YYYY-MM
|
||||
const startMonth = BACKFILL_START.slice(0, 7);
|
||||
const endMonth = end.slice(0, 7);
|
||||
const retailPrices = await getRetailElectricityPrices({ start: startMonth, end: endMonth });
|
||||
for (const rp of retailPrices) {
|
||||
@ -72,208 +338,152 @@ async function backfillElectricity(): Promise<void> {
|
||||
log(` ERROR fetching retail prices: ${err instanceof Error ? err.message : String(err)}`);
|
||||
}
|
||||
|
||||
// Build a fallback: for each region, find the most recent month with data
|
||||
// Build fallback: latest known price per region
|
||||
const latestPriceByRegion = new Map<string, number>();
|
||||
for (const [key, price] of retailPriceByRegionMonth) {
|
||||
const region = key.split(':')[0]!;
|
||||
const existing = latestPriceByRegion.get(region);
|
||||
// Since keys are "REGION:YYYY-MM", the latest month lexicographically is the most recent
|
||||
if (!existing || key > `${region}:${existing}`) {
|
||||
latestPriceByRegion.set(region, price);
|
||||
}
|
||||
}
|
||||
|
||||
/** Look up price for a region+month, falling back to latest known price */
|
||||
function getRetailPrice(region: string, month: string): number {
|
||||
return retailPriceByRegionMonth.get(`${region}:${month}`) ?? latestPriceByRegion.get(region) ?? 0;
|
||||
}
|
||||
await sleep(REQUEST_DELAY_MS);
|
||||
|
||||
await sleep(200);
|
||||
|
||||
for (const regionCode of ALL_REGIONS) {
|
||||
// Build tasks for each region
|
||||
const regionTasks = ALL_REGIONS.map(regionCode => {
|
||||
return async () => {
|
||||
const regionId = regionIdByCode.get(regionCode);
|
||||
if (!regionId) {
|
||||
log(` SKIP ${regionCode} — no grid_region row found`);
|
||||
continue;
|
||||
return;
|
||||
}
|
||||
|
||||
log(` Fetching demand for ${regionCode}...`);
|
||||
try {
|
||||
const demandData = await getRegionData(regionCode, 'D', { start, end });
|
||||
const validPoints = demandData.filter((p): p is typeof p & { valueMw: number } => p.valueMw !== null);
|
||||
|
||||
if (validPoints.length === 0) {
|
||||
log(` ${regionCode}: 0 valid data points`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check existing records to decide create vs update
|
||||
const timestamps = validPoints.map(p => p.timestamp);
|
||||
const existing = await prisma.electricityPrice.findMany({
|
||||
where: { regionId, timestamp: { in: timestamps } },
|
||||
select: { id: true, timestamp: true },
|
||||
await backfillDemandForRegion(regionCode, regionId, chunks, retailPriceByRegionMonth, latestPriceByRegion);
|
||||
};
|
||||
});
|
||||
const existingByTime = new Map(existing.map(e => [e.timestamp.getTime(), e.id]));
|
||||
|
||||
// Find peak demand for demand-based price variation
|
||||
const peakDemand = Math.max(...validPoints.map(p => p.valueMw));
|
||||
|
||||
const toCreate: Array<{
|
||||
regionId: string;
|
||||
priceMwh: number;
|
||||
demandMw: number;
|
||||
timestamp: Date;
|
||||
source: string;
|
||||
}> = [];
|
||||
const toUpdate: Array<{ id: string; demandMw: number; priceMwh: number }> = [];
|
||||
|
||||
for (const point of validPoints) {
|
||||
const month = point.timestamp.toISOString().slice(0, 7);
|
||||
const basePrice = getRetailPrice(regionCode, month);
|
||||
// Add demand-based variation: scale price between 0.8x and 1.2x based on demand
|
||||
const demandRatio = peakDemand > 0 ? point.valueMw / peakDemand : 0.5;
|
||||
const priceMwh = basePrice > 0 ? basePrice * (0.8 + 0.4 * demandRatio) : 0;
|
||||
|
||||
const existingId = existingByTime.get(point.timestamp.getTime());
|
||||
if (existingId) {
|
||||
toUpdate.push({ id: existingId, demandMw: point.valueMw, priceMwh });
|
||||
} else {
|
||||
toCreate.push({
|
||||
regionId,
|
||||
priceMwh,
|
||||
demandMw: point.valueMw,
|
||||
timestamp: point.timestamp,
|
||||
source: 'EIA',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (toCreate.length > 0) {
|
||||
const result = await prisma.electricityPrice.createMany({ data: toCreate });
|
||||
log(` ${regionCode}: ${result.count} records inserted`);
|
||||
}
|
||||
|
||||
if (toUpdate.length > 0) {
|
||||
// Batch updates in chunks of 100 to avoid transaction timeouts
|
||||
const chunkSize = 100;
|
||||
for (let i = 0; i < toUpdate.length; i += chunkSize) {
|
||||
const chunk = toUpdate.slice(i, i + chunkSize);
|
||||
await prisma.$transaction(
|
||||
chunk.map(u =>
|
||||
prisma.electricityPrice.update({
|
||||
where: { id: u.id },
|
||||
data: { demandMw: u.demandMw, priceMwh: u.priceMwh, source: 'EIA' },
|
||||
}),
|
||||
),
|
||||
);
|
||||
}
|
||||
log(` ${regionCode}: ${toUpdate.length} records updated`);
|
||||
}
|
||||
|
||||
if (toCreate.length === 0 && toUpdate.length === 0) {
|
||||
log(` ${regionCode}: no changes needed`);
|
||||
}
|
||||
} catch (err) {
|
||||
log(` ERROR ${regionCode}: ${err instanceof Error ? err.message : String(err)}`);
|
||||
}
|
||||
|
||||
// Rate limit: 200ms between regions
|
||||
await sleep(200);
|
||||
}
|
||||
await runWithConcurrency(regionTasks, CONCURRENCY);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Generation mix backfill
|
||||
// Generation mix backfill — chunked by month
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async function backfillGenerationForRegion(
|
||||
regionCode: RegionCode,
|
||||
regionId: string,
|
||||
chunks: Array<[string, string]>,
|
||||
): Promise<void> {
|
||||
for (let i = 0; i < chunks.length; i++) {
|
||||
const [start, end] = chunks[i]!;
|
||||
const label = `[GEN] ${regionCode}: ${monthLabel(start)} (${i + 1}/${chunks.length})`;
|
||||
|
||||
if (isChunkDone('gen', regionCode, start)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
log(` ${label} — fetching...`);
|
||||
const fuelData = await eia.getFuelTypeData(regionCode, { start, end });
|
||||
const validPoints = fuelData.filter((p): p is typeof p & { generationMw: number } => p.generationMw !== null);
|
||||
|
||||
if (validPoints.length === 0) {
|
||||
log(` ${label} — 0 data points, skipping`);
|
||||
markChunkDone('gen', regionCode, start);
|
||||
await sleep(REQUEST_DELAY_MS);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Build upsert rows
|
||||
const rows = validPoints.map(point => ({
|
||||
regionId,
|
||||
fuelType: point.fuelType,
|
||||
generationMw: point.generationMw,
|
||||
timestamp: point.timestamp,
|
||||
}));
|
||||
|
||||
let inserted = 0;
|
||||
let updated = 0;
|
||||
for (let j = 0; j < rows.length; j += UPSERT_BATCH_SIZE) {
|
||||
const batch = rows.slice(j, j + UPSERT_BATCH_SIZE);
|
||||
const result = await upsertGenerationBatch(batch);
|
||||
inserted += result.inserted;
|
||||
updated += result.updated;
|
||||
}
|
||||
|
||||
stats.genInserted += inserted;
|
||||
stats.genUpdated += updated;
|
||||
log(` ${label} — ${inserted} inserted, ${updated} updated (${validPoints.length} points)`);
|
||||
markChunkDone('gen', regionCode, start);
|
||||
} catch (err) {
|
||||
stats.genErrors++;
|
||||
log(` ${label} — ERROR: ${err instanceof Error ? err.message : String(err)}`);
|
||||
}
|
||||
|
||||
await sleep(REQUEST_DELAY_MS);
|
||||
}
|
||||
}
|
||||
|
||||
async function upsertGenerationBatch(
|
||||
rows: Array<{ regionId: string; fuelType: string; generationMw: number; timestamp: Date }>,
|
||||
): Promise<UpsertResult> {
|
||||
if (rows.length === 0) return { inserted: 0, updated: 0 };
|
||||
|
||||
const values: unknown[] = [];
|
||||
const placeholders: string[] = [];
|
||||
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
const row = rows[i]!;
|
||||
const offset = i * 4;
|
||||
placeholders.push(
|
||||
`(gen_random_uuid(), $${offset + 1}::uuid, $${offset + 2}, $${offset + 3}, $${offset + 4}::timestamptz)`,
|
||||
);
|
||||
values.push(row.regionId, row.fuelType, row.generationMw, row.timestamp);
|
||||
}
|
||||
|
||||
const sql = `
|
||||
WITH upserted AS (
|
||||
INSERT INTO generation_mix (id, region_id, fuel_type, generation_mw, timestamp)
|
||||
VALUES ${placeholders.join(',\n')}
|
||||
ON CONFLICT (region_id, fuel_type, timestamp) DO UPDATE SET
|
||||
generation_mw = EXCLUDED.generation_mw
|
||||
RETURNING (xmax = 0) AS is_insert
|
||||
)
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE is_insert) AS inserted,
|
||||
COUNT(*) FILTER (WHERE NOT is_insert) AS updated
|
||||
FROM upserted
|
||||
`;
|
||||
|
||||
const result = await prisma.$queryRawUnsafe<Array<{ inserted: bigint; updated: bigint }>>(sql, ...values);
|
||||
const row = result[0]!;
|
||||
return { inserted: Number(row.inserted), updated: Number(row.updated) };
|
||||
}
|
||||
|
||||
async function backfillGeneration(): Promise<void> {
|
||||
log('=== Backfilling generation mix data ===');
|
||||
log('=== Backfilling generation mix data (10-year) ===');
|
||||
|
||||
const gridRegions = await prisma.gridRegion.findMany({
|
||||
select: { id: true, code: true },
|
||||
});
|
||||
const regionIdByCode = new Map(gridRegions.map(r => [r.code, r.id]));
|
||||
|
||||
const start = sixMonthsAgoIso();
|
||||
const end = todayIso();
|
||||
const chunks = generateMonthChunks(BACKFILL_START, end);
|
||||
log(` ${chunks.length} monthly chunks from ${BACKFILL_START} to ${end}`);
|
||||
|
||||
for (const regionCode of ALL_REGIONS) {
|
||||
const regionTasks = ALL_REGIONS.map(regionCode => {
|
||||
return async () => {
|
||||
const regionId = regionIdByCode.get(regionCode);
|
||||
if (!regionId) {
|
||||
log(` SKIP ${regionCode} — no grid_region row found`);
|
||||
continue;
|
||||
return;
|
||||
}
|
||||
|
||||
log(` Fetching generation mix for ${regionCode}...`);
|
||||
try {
|
||||
const fuelData = await getFuelTypeData(regionCode, { start, end });
|
||||
const validPoints = fuelData.filter((p): p is typeof p & { generationMw: number } => p.generationMw !== null);
|
||||
|
||||
if (validPoints.length === 0) {
|
||||
log(` ${regionCode}: 0 valid data points`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const timestamps = validPoints.map(p => p.timestamp);
|
||||
const existing = await prisma.generationMix.findMany({
|
||||
where: { regionId, timestamp: { in: timestamps } },
|
||||
select: { id: true, timestamp: true, fuelType: true },
|
||||
await backfillGenerationForRegion(regionCode, regionId, chunks);
|
||||
};
|
||||
});
|
||||
const existingKeys = new Map(existing.map(e => [`${e.fuelType}:${e.timestamp.getTime()}`, e.id]));
|
||||
|
||||
const toCreate: Array<{
|
||||
regionId: string;
|
||||
fuelType: string;
|
||||
generationMw: number;
|
||||
timestamp: Date;
|
||||
}> = [];
|
||||
const toUpdate: Array<{ id: string; generationMw: number }> = [];
|
||||
|
||||
for (const point of validPoints) {
|
||||
const key = `${point.fuelType}:${point.timestamp.getTime()}`;
|
||||
const existingId = existingKeys.get(key);
|
||||
if (existingId) {
|
||||
toUpdate.push({ id: existingId, generationMw: point.generationMw });
|
||||
} else {
|
||||
toCreate.push({
|
||||
regionId,
|
||||
fuelType: point.fuelType,
|
||||
generationMw: point.generationMw,
|
||||
timestamp: point.timestamp,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (toCreate.length > 0) {
|
||||
const result = await prisma.generationMix.createMany({ data: toCreate });
|
||||
log(` ${regionCode}: ${result.count} generation records inserted`);
|
||||
}
|
||||
|
||||
if (toUpdate.length > 0) {
|
||||
const chunkSize = 100;
|
||||
for (let i = 0; i < toUpdate.length; i += chunkSize) {
|
||||
const chunk = toUpdate.slice(i, i + chunkSize);
|
||||
await prisma.$transaction(
|
||||
chunk.map(u =>
|
||||
prisma.generationMix.update({
|
||||
where: { id: u.id },
|
||||
data: { generationMw: u.generationMw },
|
||||
}),
|
||||
),
|
||||
);
|
||||
}
|
||||
log(` ${regionCode}: ${toUpdate.length} generation records updated`);
|
||||
}
|
||||
|
||||
if (toCreate.length === 0 && toUpdate.length === 0) {
|
||||
log(` ${regionCode}: no changes needed`);
|
||||
}
|
||||
} catch (err) {
|
||||
log(` ERROR ${regionCode}: ${err instanceof Error ? err.message : String(err)}`);
|
||||
}
|
||||
|
||||
await sleep(200);
|
||||
}
|
||||
await runWithConcurrency(regionTasks, CONCURRENCY);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -289,9 +499,9 @@ interface CommodityRow {
|
||||
}
|
||||
|
||||
async function backfillCommodities(): Promise<void> {
|
||||
log('=== Backfilling commodity prices ===');
|
||||
log('=== Backfilling commodity prices (10-year) ===');
|
||||
|
||||
const start = sixMonthsAgoIso();
|
||||
const start = BACKFILL_START;
|
||||
const end = todayIso();
|
||||
const rows: CommodityRow[] = [];
|
||||
|
||||
@ -308,7 +518,7 @@ async function backfillCommodities(): Promise<void> {
|
||||
log(` ERROR EIA natural gas: ${err instanceof Error ? err.message : String(err)}`);
|
||||
}
|
||||
|
||||
await sleep(200);
|
||||
await sleep(REQUEST_DELAY_MS);
|
||||
|
||||
// EIA: WTI Crude
|
||||
log(' Fetching EIA WTI crude prices...');
|
||||
@ -323,7 +533,7 @@ async function backfillCommodities(): Promise<void> {
|
||||
log(` ERROR EIA WTI crude: ${err instanceof Error ? err.message : String(err)}`);
|
||||
}
|
||||
|
||||
await sleep(200);
|
||||
await sleep(REQUEST_DELAY_MS);
|
||||
|
||||
// FRED: Natural Gas (DHHNGSP)
|
||||
log(' Fetching FRED natural gas prices...');
|
||||
@ -337,7 +547,7 @@ async function backfillCommodities(): Promise<void> {
|
||||
log(` ERROR FRED natural gas: ${fredGas.error}`);
|
||||
}
|
||||
|
||||
await sleep(200);
|
||||
await sleep(REQUEST_DELAY_MS);
|
||||
|
||||
// FRED: WTI Crude (DCOILWTICO)
|
||||
log(' Fetching FRED WTI crude prices...');
|
||||
@ -351,7 +561,7 @@ async function backfillCommodities(): Promise<void> {
|
||||
log(` ERROR FRED WTI crude: ${fredOil.error}`);
|
||||
}
|
||||
|
||||
await sleep(200);
|
||||
await sleep(REQUEST_DELAY_MS);
|
||||
|
||||
// FRED: Coal (PCOALAUUSDM)
|
||||
log(' Fetching FRED coal prices...');
|
||||
@ -381,58 +591,78 @@ async function backfillCommodities(): Promise<void> {
|
||||
}
|
||||
const uniqueRows = [...deduped.values()];
|
||||
|
||||
// Upsert into database
|
||||
const timestamps = uniqueRows.map(r => r.timestamp);
|
||||
const commodities = [...new Set(uniqueRows.map(r => r.commodity))];
|
||||
|
||||
const existing = await prisma.commodityPrice.findMany({
|
||||
where: { commodity: { in: commodities }, timestamp: { in: timestamps } },
|
||||
select: { id: true, commodity: true, timestamp: true },
|
||||
});
|
||||
const existingKeys = new Map(existing.map(e => [`${e.commodity}:${e.timestamp.getTime()}`, e.id]));
|
||||
|
||||
const toCreate: Array<{ commodity: string; price: number; unit: string; timestamp: Date; source: string }> = [];
|
||||
const toUpdate: Array<{ id: string; price: number; unit: string; source: string }> = [];
|
||||
|
||||
for (const row of uniqueRows) {
|
||||
const key = `${row.commodity}:${row.timestamp.getTime()}`;
|
||||
const existingId = existingKeys.get(key);
|
||||
if (existingId) {
|
||||
toUpdate.push({ id: existingId, price: row.price, unit: row.unit, source: row.source });
|
||||
} else {
|
||||
toCreate.push({
|
||||
commodity: row.commodity,
|
||||
price: row.price,
|
||||
unit: row.unit,
|
||||
timestamp: row.timestamp,
|
||||
source: row.source,
|
||||
});
|
||||
}
|
||||
// Batch upsert commodities
|
||||
let totalInserted = 0;
|
||||
let totalUpdated = 0;
|
||||
for (let i = 0; i < uniqueRows.length; i += UPSERT_BATCH_SIZE) {
|
||||
const batch = uniqueRows.slice(i, i + UPSERT_BATCH_SIZE);
|
||||
const result = await upsertCommodityBatch(batch);
|
||||
totalInserted += result.inserted;
|
||||
totalUpdated += result.updated;
|
||||
}
|
||||
|
||||
if (toCreate.length > 0) {
|
||||
const result = await prisma.commodityPrice.createMany({ data: toCreate });
|
||||
log(` Commodities: ${result.count} records inserted`);
|
||||
}
|
||||
stats.commodityInserted = totalInserted;
|
||||
stats.commodityUpdated = totalUpdated;
|
||||
log(` Commodities: ${totalInserted} inserted, ${totalUpdated} updated (${uniqueRows.length} unique rows)`);
|
||||
}
|
||||
|
||||
if (toUpdate.length > 0) {
|
||||
const chunkSize = 100;
|
||||
for (let i = 0; i < toUpdate.length; i += chunkSize) {
|
||||
const chunk = toUpdate.slice(i, i + chunkSize);
|
||||
await prisma.$transaction(
|
||||
chunk.map(u =>
|
||||
prisma.commodityPrice.update({
|
||||
where: { id: u.id },
|
||||
data: { price: u.price, unit: u.unit, source: u.source },
|
||||
}),
|
||||
),
|
||||
async function upsertCommodityBatch(rows: CommodityRow[]): Promise<UpsertResult> {
|
||||
if (rows.length === 0) return { inserted: 0, updated: 0 };
|
||||
|
||||
const values: unknown[] = [];
|
||||
const placeholders: string[] = [];
|
||||
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
const row = rows[i]!;
|
||||
const offset = i * 5;
|
||||
placeholders.push(
|
||||
`(gen_random_uuid(), $${offset + 1}, $${offset + 2}, $${offset + 3}, $${offset + 4}::timestamptz, $${offset + 5})`,
|
||||
);
|
||||
}
|
||||
log(` Commodities: ${toUpdate.length} records updated`);
|
||||
values.push(row.commodity, row.price, row.unit, row.timestamp, row.source);
|
||||
}
|
||||
|
||||
if (toCreate.length === 0 && toUpdate.length === 0) {
|
||||
log(' Commodities: no changes needed');
|
||||
const sql = `
|
||||
WITH upserted AS (
|
||||
INSERT INTO commodity_prices (id, commodity, price, unit, timestamp, source)
|
||||
VALUES ${placeholders.join(',\n')}
|
||||
ON CONFLICT (commodity, timestamp) DO UPDATE SET
|
||||
price = EXCLUDED.price,
|
||||
unit = EXCLUDED.unit,
|
||||
source = EXCLUDED.source
|
||||
RETURNING (xmax = 0) AS is_insert
|
||||
)
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE is_insert) AS inserted,
|
||||
COUNT(*) FILTER (WHERE NOT is_insert) AS updated
|
||||
FROM upserted
|
||||
`;
|
||||
|
||||
const result = await prisma.$queryRawUnsafe<Array<{ inserted: bigint; updated: bigint }>>(sql, ...values);
|
||||
const row = result[0]!;
|
||||
return { inserted: Number(row.inserted), updated: Number(row.updated) };
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Materialized view refresh
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const MATERIALIZED_VIEWS = [
|
||||
'electricity_prices_daily',
|
||||
'electricity_prices_weekly',
|
||||
'generation_mix_daily',
|
||||
'generation_mix_weekly',
|
||||
] as const;
|
||||
|
||||
async function refreshMaterializedViews(): Promise<void> {
|
||||
log('=== Refreshing materialized views ===');
|
||||
for (const view of MATERIALIZED_VIEWS) {
|
||||
try {
|
||||
log(` Refreshing ${view}...`);
|
||||
await prisma.$executeRawUnsafe(`REFRESH MATERIALIZED VIEW CONCURRENTLY ${view}`);
|
||||
log(` ${view} refreshed`);
|
||||
} catch (err) {
|
||||
log(` ERROR refreshing ${view}: ${err instanceof Error ? err.message : String(err)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -441,19 +671,36 @@ async function backfillCommodities(): Promise<void> {
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async function main(): Promise<void> {
|
||||
log('Starting historical backfill (6 months)...');
|
||||
log(`Date range: ${sixMonthsAgoIso()} to ${todayIso()}`);
|
||||
const end = todayIso();
|
||||
log(`Starting 10-year historical backfill...`);
|
||||
log(`Date range: ${BACKFILL_START} to ${end}`);
|
||||
log(`Regions: ${ALL_REGIONS.join(', ')} (${ALL_REGIONS.length} total)`);
|
||||
log(`Concurrency: ${CONCURRENCY} regions in parallel`);
|
||||
log('');
|
||||
|
||||
if (!onlyCommodities && !skipDemand) {
|
||||
await backfillElectricity();
|
||||
log('');
|
||||
}
|
||||
|
||||
if (!onlyCommodities && !skipGeneration) {
|
||||
await backfillGeneration();
|
||||
log('');
|
||||
}
|
||||
|
||||
if (!skipCommodities) {
|
||||
await backfillCommodities();
|
||||
log('');
|
||||
}
|
||||
|
||||
// Refresh materialized views after data load
|
||||
await refreshMaterializedViews();
|
||||
log('');
|
||||
|
||||
log('=== Backfill Summary ===');
|
||||
log(` Demand: ${stats.demandInserted} inserted, ${stats.demandUpdated} updated, ${stats.demandErrors} errors`);
|
||||
log(` Generation: ${stats.genInserted} inserted, ${stats.genUpdated} updated, ${stats.genErrors} errors`);
|
||||
log(` Commodities: ${stats.commodityInserted} inserted, ${stats.commodityUpdated} updated`);
|
||||
log('Backfill complete.');
|
||||
}
|
||||
|
||||
|
||||
104
scripts/download-power-plants.ts
Normal file
104
scripts/download-power-plants.ts
Normal file
@ -0,0 +1,104 @@
|
||||
/**
|
||||
* Downloads power plant data from the EIA ArcGIS FeatureServer.
|
||||
*
|
||||
* Fetches all US power plants >= 50 MW with pagination,
|
||||
* then saves the combined result as data/power-plants.geojson.
|
||||
*
|
||||
* Usage: bun run scripts/download-power-plants.ts
|
||||
*/
|
||||
|
||||
import { mkdirSync, writeFileSync } from 'fs';
|
||||
import { resolve } from 'path';
|
||||
import { z } from 'zod/v4';
|
||||
|
||||
const BASE_URL =
|
||||
'https://services2.arcgis.com/FiaPA4ga0iQKduv3/ArcGIS/rest/services/Power_Plants_in_the_US/FeatureServer/0/query';
|
||||
|
||||
const OUT_FIELDS = [
|
||||
'Plant_Name',
|
||||
'Plant_Code',
|
||||
'Utility_Na',
|
||||
'State',
|
||||
'County',
|
||||
'Latitude',
|
||||
'Longitude',
|
||||
'PrimSource',
|
||||
'Total_MW',
|
||||
].join(',');
|
||||
|
||||
const PAGE_SIZE = 2000;
|
||||
|
||||
const ArcGISFeatureSchema = z.object({
|
||||
type: z.literal('Feature'),
|
||||
geometry: z.object({
|
||||
type: z.literal('Point'),
|
||||
coordinates: z.tuple([z.number(), z.number()]),
|
||||
}),
|
||||
properties: z.record(z.string(), z.unknown()),
|
||||
});
|
||||
|
||||
const ArcGISResponseSchema = z.object({
|
||||
type: z.literal('FeatureCollection'),
|
||||
features: z.array(ArcGISFeatureSchema),
|
||||
properties: z.object({ exceededTransferLimit: z.boolean().optional() }).optional(),
|
||||
});
|
||||
|
||||
type ArcGISFeature = z.infer<typeof ArcGISFeatureSchema>;
|
||||
type ArcGISGeoJSONResponse = z.infer<typeof ArcGISResponseSchema>;
|
||||
|
||||
async function fetchPage(offset: number): Promise<ArcGISGeoJSONResponse> {
|
||||
const params = new URLSearchParams({
|
||||
where: 'Total_MW >= 50',
|
||||
outFields: OUT_FIELDS,
|
||||
f: 'geojson',
|
||||
resultRecordCount: String(PAGE_SIZE),
|
||||
resultOffset: String(offset),
|
||||
});
|
||||
|
||||
const url = `${BASE_URL}?${params.toString()}`;
|
||||
console.log(`Fetching offset=${offset}...`);
|
||||
|
||||
const res = await fetch(url);
|
||||
if (!res.ok) {
|
||||
throw new Error(`HTTP ${res.status}: ${res.statusText}`);
|
||||
}
|
||||
|
||||
const json: unknown = await res.json();
|
||||
return ArcGISResponseSchema.parse(json);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const allFeatures: ArcGISFeature[] = [];
|
||||
let offset = 0;
|
||||
|
||||
while (true) {
|
||||
const page = await fetchPage(offset);
|
||||
const count = page.features.length;
|
||||
console.log(` Got ${count} features`);
|
||||
allFeatures.push(...page.features);
|
||||
|
||||
// ArcGIS signals more data via exceededTransferLimit or by returning a full page
|
||||
const hasMore = page.properties?.exceededTransferLimit === true || count >= PAGE_SIZE;
|
||||
if (!hasMore || count === 0) break;
|
||||
offset += PAGE_SIZE;
|
||||
}
|
||||
|
||||
console.log(`\nTotal features: ${allFeatures.length}`);
|
||||
|
||||
const collection: ArcGISGeoJSONResponse = {
|
||||
type: 'FeatureCollection',
|
||||
features: allFeatures,
|
||||
};
|
||||
|
||||
const outDir = resolve(import.meta.dirname, '..', 'data');
|
||||
mkdirSync(outDir, { recursive: true });
|
||||
|
||||
const outPath = resolve(outDir, 'power-plants.geojson');
|
||||
writeFileSync(outPath, JSON.stringify(collection, null, 2));
|
||||
console.log(`Saved to ${outPath}`);
|
||||
}
|
||||
|
||||
main().catch((err: unknown) => {
|
||||
console.error('Download failed:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
@ -1,7 +1,8 @@
|
||||
'use server';
|
||||
|
||||
import { getDemandByRegion } from '@/generated/prisma/sql.js';
|
||||
import { getDemandByRegion, getDemandDaily, getDemandHourly, getDemandWeekly } from '@/generated/prisma/sql.js';
|
||||
import { prisma } from '@/lib/db.js';
|
||||
import { getGranularity } from '@/lib/granularity.js';
|
||||
import { serialize } from '@/lib/superjson.js';
|
||||
import { validateRegionCode } from '@/lib/utils.js';
|
||||
import { cacheLife, cacheTag } from 'next/cache';
|
||||
@ -32,10 +33,43 @@ interface ActionError {
|
||||
|
||||
type ActionResult<T> = ActionSuccess<T> | ActionError;
|
||||
|
||||
/** Unified demand row returned to the client */
|
||||
interface DemandRow {
|
||||
region_code: string;
|
||||
region_name: string;
|
||||
day: Date;
|
||||
avg_demand: number;
|
||||
peak_demand: number;
|
||||
datacenter_count: number | null;
|
||||
total_dc_capacity_mw: number | null;
|
||||
}
|
||||
|
||||
async function queryDemand(startDate: Date, endDate: Date, regionCode: string): Promise<DemandRow[]> {
|
||||
const granularity = getGranularity(startDate, endDate);
|
||||
switch (granularity) {
|
||||
case 'hourly':
|
||||
return prisma.$queryRawTyped(getDemandHourly(startDate, endDate, regionCode));
|
||||
case 'daily': {
|
||||
const rows = await prisma.$queryRawTyped(getDemandDaily(startDate, endDate, regionCode));
|
||||
return rows.filter(
|
||||
(r): r is typeof r & { day: Date; avg_demand: number; peak_demand: number } =>
|
||||
r.day !== null && r.avg_demand !== null && r.peak_demand !== null,
|
||||
);
|
||||
}
|
||||
case 'weekly': {
|
||||
const rows = await prisma.$queryRawTyped(getDemandWeekly(startDate, endDate, regionCode));
|
||||
return rows.filter(
|
||||
(r): r is typeof r & { day: Date; avg_demand: number; peak_demand: number } =>
|
||||
r.day !== null && r.avg_demand !== null && r.peak_demand !== null,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchDemandByRegion(
|
||||
regionCode: string,
|
||||
timeRange: TimeRange = '30d',
|
||||
): Promise<ActionResult<getDemandByRegion.Result[]>> {
|
||||
): Promise<ActionResult<DemandRow[]>> {
|
||||
'use cache';
|
||||
cacheLife('demand');
|
||||
cacheTag(`demand-${regionCode}-${timeRange}`);
|
||||
@ -46,7 +80,7 @@ export async function fetchDemandByRegion(
|
||||
}
|
||||
const startDate = timeRangeToStartDate(timeRange);
|
||||
const endDate = new Date();
|
||||
const rows = await prisma.$queryRawTyped(getDemandByRegion(startDate, endDate, regionCode));
|
||||
const rows = await queryDemand(startDate, endDate, regionCode);
|
||||
return { ok: true, data: serialize(rows) };
|
||||
} catch (err) {
|
||||
return {
|
||||
@ -64,6 +98,7 @@ export async function fetchRegionDemandSummary(): Promise<ActionResult<getDemand
|
||||
try {
|
||||
const startDate = new Date(Date.now() - 7 * 24 * 60 * 60 * 1000);
|
||||
const endDate = new Date();
|
||||
// Summary always uses the original daily-aggregating query for 7-day window
|
||||
const rows = await prisma.$queryRawTyped(getDemandByRegion(startDate, endDate, 'ALL'));
|
||||
return { ok: true, data: serialize(rows) };
|
||||
} catch (err) {
|
||||
|
||||
@ -1,7 +1,8 @@
|
||||
'use server';
|
||||
|
||||
import { getGenerationMix } from '@/generated/prisma/sql.js';
|
||||
import { getGenerationDaily, getGenerationHourly, getGenerationWeekly } from '@/generated/prisma/sql.js';
|
||||
import { prisma } from '@/lib/db.js';
|
||||
import { getGranularity } from '@/lib/granularity.js';
|
||||
import { serialize } from '@/lib/superjson.js';
|
||||
import { validateRegionCode } from '@/lib/utils.js';
|
||||
import { cacheLife, cacheTag } from 'next/cache';
|
||||
@ -32,10 +33,41 @@ interface ActionError {
|
||||
|
||||
type ActionResult<T> = ActionSuccess<T> | ActionError;
|
||||
|
||||
/** Unified generation row returned to the client */
|
||||
interface GenerationRow {
|
||||
fuel_type: string;
|
||||
timestamp: Date;
|
||||
generation_mw: number;
|
||||
region_code: string;
|
||||
region_name: string;
|
||||
}
|
||||
|
||||
async function queryGeneration(regionCode: string, startDate: Date, endDate: Date): Promise<GenerationRow[]> {
|
||||
const granularity = getGranularity(startDate, endDate);
|
||||
switch (granularity) {
|
||||
case 'hourly':
|
||||
return prisma.$queryRawTyped(getGenerationHourly(regionCode, startDate, endDate));
|
||||
case 'daily': {
|
||||
const rows = await prisma.$queryRawTyped(getGenerationDaily(regionCode, startDate, endDate));
|
||||
return rows.filter(
|
||||
(r): r is typeof r & { fuel_type: string; timestamp: Date; generation_mw: number } =>
|
||||
r.fuel_type !== null && r.timestamp !== null && r.generation_mw !== null,
|
||||
);
|
||||
}
|
||||
case 'weekly': {
|
||||
const rows = await prisma.$queryRawTyped(getGenerationWeekly(regionCode, startDate, endDate));
|
||||
return rows.filter(
|
||||
(r): r is typeof r & { fuel_type: string; timestamp: Date; generation_mw: number } =>
|
||||
r.fuel_type !== null && r.timestamp !== null && r.generation_mw !== null,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchGenerationMix(
|
||||
regionCode: string,
|
||||
timeRange: TimeRange = '30d',
|
||||
): Promise<ActionResult<getGenerationMix.Result[]>> {
|
||||
): Promise<ActionResult<GenerationRow[]>> {
|
||||
'use cache';
|
||||
cacheLife('demand');
|
||||
cacheTag(`generation-${regionCode}-${timeRange}`);
|
||||
@ -46,7 +78,7 @@ export async function fetchGenerationMix(
|
||||
}
|
||||
const startDate = timeRangeToStartDate(timeRange);
|
||||
const endDate = new Date();
|
||||
const rows = await prisma.$queryRawTyped(getGenerationMix(regionCode, startDate, endDate));
|
||||
const rows = await queryGeneration(regionCode, startDate, endDate);
|
||||
return { ok: true, data: serialize(rows) };
|
||||
} catch (err) {
|
||||
return {
|
||||
|
||||
22
src/actions/power-plants.ts
Normal file
22
src/actions/power-plants.ts
Normal file
@ -0,0 +1,22 @@
|
||||
'use server';
|
||||
|
||||
import { getAllPowerPlants } from '@/generated/prisma/sql.js';
|
||||
import { prisma } from '@/lib/db.js';
|
||||
import { serialize } from '@/lib/superjson.js';
|
||||
import { cacheLife, cacheTag } from 'next/cache';
|
||||
|
||||
export async function fetchAllPowerPlants() {
|
||||
'use cache';
|
||||
cacheLife('seedData');
|
||||
cacheTag('power-plants');
|
||||
|
||||
try {
|
||||
const rows = await prisma.$queryRawTyped(getAllPowerPlants());
|
||||
return { ok: true as const, data: serialize(rows) };
|
||||
} catch (err) {
|
||||
return {
|
||||
ok: false as const,
|
||||
error: `Failed to fetch power plants: ${err instanceof Error ? err.message : String(err)}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -1,7 +1,14 @@
|
||||
'use server';
|
||||
|
||||
import { getLatestPrices, getPriceTrends, getRegionPriceHeatmap } from '@/generated/prisma/sql.js';
|
||||
import {
|
||||
getLatestPrices,
|
||||
getPricesDaily,
|
||||
getPricesHourly,
|
||||
getPricesWeekly,
|
||||
getRegionPriceHeatmap,
|
||||
} from '@/generated/prisma/sql.js';
|
||||
import { prisma } from '@/lib/db.js';
|
||||
import { getGranularity } from '@/lib/granularity.js';
|
||||
import { serialize } from '@/lib/superjson.js';
|
||||
import { validateRegionCode } from '@/lib/utils.js';
|
||||
import { cacheLife, cacheTag } from 'next/cache';
|
||||
@ -48,10 +55,41 @@ export async function fetchLatestPrices(): Promise<ActionResult<getLatestPrices.
|
||||
}
|
||||
}
|
||||
|
||||
/** Unified price trend row returned to the client */
|
||||
interface PriceTrendRow {
|
||||
timestamp: Date;
|
||||
price_mwh: number;
|
||||
demand_mw: number;
|
||||
region_code: string;
|
||||
region_name: string;
|
||||
}
|
||||
|
||||
async function queryPriceTrends(regionCode: string, startDate: Date, endDate: Date): Promise<PriceTrendRow[]> {
|
||||
const granularity = getGranularity(startDate, endDate);
|
||||
switch (granularity) {
|
||||
case 'hourly':
|
||||
return prisma.$queryRawTyped(getPricesHourly(regionCode, startDate, endDate));
|
||||
case 'daily': {
|
||||
const rows = await prisma.$queryRawTyped(getPricesDaily(regionCode, startDate, endDate));
|
||||
return rows.filter(
|
||||
(r): r is typeof r & { timestamp: Date; price_mwh: number; demand_mw: number } =>
|
||||
r.timestamp !== null && r.price_mwh !== null && r.demand_mw !== null,
|
||||
);
|
||||
}
|
||||
case 'weekly': {
|
||||
const rows = await prisma.$queryRawTyped(getPricesWeekly(regionCode, startDate, endDate));
|
||||
return rows.filter(
|
||||
(r): r is typeof r & { timestamp: Date; price_mwh: number; demand_mw: number } =>
|
||||
r.timestamp !== null && r.price_mwh !== null && r.demand_mw !== null,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchPriceTrends(
|
||||
regionCode: string,
|
||||
timeRange: TimeRange = '30d',
|
||||
): Promise<ActionResult<getPriceTrends.Result[]>> {
|
||||
): Promise<ActionResult<PriceTrendRow[]>> {
|
||||
'use cache';
|
||||
cacheLife('prices');
|
||||
cacheTag(`price-trends-${regionCode}-${timeRange}`);
|
||||
@ -62,7 +100,7 @@ export async function fetchPriceTrends(
|
||||
}
|
||||
const startDate = timeRangeToStartDate(timeRange);
|
||||
const endDate = new Date();
|
||||
const rows = await prisma.$queryRawTyped(getPriceTrends(regionCode, startDate, endDate));
|
||||
const rows = await queryPriceTrends(regionCode, startDate, endDate);
|
||||
return { ok: true, data: serialize(rows) };
|
||||
} catch (err) {
|
||||
return {
|
||||
@ -88,9 +126,7 @@ export async function fetchPriceHeatmapData(): Promise<ActionResult<getRegionPri
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchAllRegionPriceTrends(
|
||||
timeRange: TimeRange = '30d',
|
||||
): Promise<ActionResult<getPriceTrends.Result[]>> {
|
||||
export async function fetchAllRegionPriceTrends(timeRange: TimeRange = '30d'): Promise<ActionResult<PriceTrendRow[]>> {
|
||||
'use cache';
|
||||
cacheLife('prices');
|
||||
cacheTag(`all-price-trends-${timeRange}`);
|
||||
@ -99,9 +135,7 @@ export async function fetchAllRegionPriceTrends(
|
||||
const startDate = timeRangeToStartDate(timeRange);
|
||||
const endDate = new Date();
|
||||
const regions = await prisma.gridRegion.findMany({ select: { code: true } });
|
||||
const results = await Promise.all(
|
||||
regions.map(r => prisma.$queryRawTyped(getPriceTrends(r.code, startDate, endDate))),
|
||||
);
|
||||
const results = await Promise.all(regions.map(r => queryPriceTrends(r.code, startDate, endDate)));
|
||||
return { ok: true, data: serialize(results.flat()) };
|
||||
} catch (err) {
|
||||
return {
|
||||
@ -203,7 +237,7 @@ export async function fetchPriceSparklines(): Promise<
|
||||
const regions = await prisma.gridRegion.findMany({ select: { code: true } });
|
||||
const results = await Promise.all(
|
||||
regions.map(async r => {
|
||||
const rows = await prisma.$queryRawTyped(getPriceTrends(r.code, startDate, endDate));
|
||||
const rows = await queryPriceTrends(r.code, startDate, endDate);
|
||||
return {
|
||||
region_code: r.code,
|
||||
points: rows.map(row => ({ value: row.price_mwh })),
|
||||
|
||||
@ -5,7 +5,22 @@ import { getRegionData, getRetailElectricityPrices } from '@/lib/api/eia.js';
|
||||
import { prisma } from '@/lib/db.js';
|
||||
import { EIA_RESPONDENT_CODES, type RegionCode } from '@/lib/schemas/electricity.js';
|
||||
|
||||
const ALL_REGIONS: RegionCode[] = ['PJM', 'ERCOT', 'CAISO', 'NYISO', 'ISONE', 'MISO', 'SPP'];
|
||||
const ALL_REGIONS: RegionCode[] = [
|
||||
'PJM',
|
||||
'ERCOT',
|
||||
'CAISO',
|
||||
'NYISO',
|
||||
'ISONE',
|
||||
'MISO',
|
||||
'SPP',
|
||||
'BPA',
|
||||
'DUKE',
|
||||
'SOCO',
|
||||
'TVA',
|
||||
'FPC',
|
||||
'WAPA',
|
||||
'NWMT',
|
||||
];
|
||||
|
||||
function isRegionCode(value: string): value is RegionCode {
|
||||
return value in EIA_RESPONDENT_CODES;
|
||||
|
||||
@ -5,7 +5,22 @@ import { getFuelTypeData } from '@/lib/api/eia.js';
|
||||
import { prisma } from '@/lib/db.js';
|
||||
import { EIA_RESPONDENT_CODES, type RegionCode } from '@/lib/schemas/electricity.js';
|
||||
|
||||
const ALL_REGIONS: RegionCode[] = ['PJM', 'ERCOT', 'CAISO', 'NYISO', 'ISONE', 'MISO', 'SPP'];
|
||||
const ALL_REGIONS: RegionCode[] = [
|
||||
'PJM',
|
||||
'ERCOT',
|
||||
'CAISO',
|
||||
'NYISO',
|
||||
'ISONE',
|
||||
'MISO',
|
||||
'SPP',
|
||||
'BPA',
|
||||
'DUKE',
|
||||
'SOCO',
|
||||
'TVA',
|
||||
'FPC',
|
||||
'WAPA',
|
||||
'NWMT',
|
||||
];
|
||||
|
||||
function isRegionCode(value: string): value is RegionCode {
|
||||
return value in EIA_RESPONDENT_CODES;
|
||||
|
||||
@ -1,9 +1,15 @@
|
||||
import { fetchAllDatacentersWithLocation } from '@/actions/datacenters.js';
|
||||
import { fetchAllPowerPlants } from '@/actions/power-plants.js';
|
||||
import { fetchPriceHeatmapData } from '@/actions/prices.js';
|
||||
import type { DatacenterMarkerData } from '@/components/map/datacenter-marker.js';
|
||||
import { EnergyMapLoader } from '@/components/map/energy-map-loader.js';
|
||||
import type { PowerPlantMarkerData } from '@/components/map/power-plant-marker.js';
|
||||
import type { RegionHeatmapData } from '@/components/map/region-overlay.js';
|
||||
import type { getAllDatacentersWithLocation, getRegionPriceHeatmap } from '@/generated/prisma/sql.js';
|
||||
import type {
|
||||
getAllDatacentersWithLocation,
|
||||
getAllPowerPlants,
|
||||
getRegionPriceHeatmap,
|
||||
} from '@/generated/prisma/sql.js';
|
||||
import { deserialize } from '@/lib/superjson.js';
|
||||
|
||||
interface GeoJsonPoint {
|
||||
@ -46,7 +52,11 @@ function parseBoundaryGeoJson(geojsonStr: string | null): object | null {
|
||||
}
|
||||
|
||||
export async function MapContent() {
|
||||
const [dcResult, priceResult] = await Promise.all([fetchAllDatacentersWithLocation(), fetchPriceHeatmapData()]);
|
||||
const [dcResult, priceResult, ppResult] = await Promise.all([
|
||||
fetchAllDatacentersWithLocation(),
|
||||
fetchPriceHeatmapData(),
|
||||
fetchAllPowerPlants(),
|
||||
]);
|
||||
|
||||
const datacenters: DatacenterMarkerData[] = [];
|
||||
if (dcResult.ok) {
|
||||
@ -88,5 +98,26 @@ export async function MapContent() {
|
||||
}
|
||||
}
|
||||
|
||||
return <EnergyMapLoader datacenters={datacenters} regions={regions} />;
|
||||
const powerPlants: PowerPlantMarkerData[] = [];
|
||||
if (ppResult.ok) {
|
||||
const rows = deserialize<getAllPowerPlants.Result[]>(ppResult.data);
|
||||
for (const row of rows) {
|
||||
const loc = parseLocationGeoJson(row.location_geojson);
|
||||
if (loc) {
|
||||
powerPlants.push({
|
||||
id: row.id,
|
||||
plant_code: row.plant_code,
|
||||
name: row.name,
|
||||
operator: row.operator,
|
||||
capacity_mw: row.capacity_mw,
|
||||
fuel_type: row.fuel_type,
|
||||
state: row.state,
|
||||
lat: loc.lat,
|
||||
lng: loc.lng,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return <EnergyMapLoader datacenters={datacenters} regions={regions} powerPlants={powerPlants} />;
|
||||
}
|
||||
|
||||
@ -307,6 +307,7 @@ export function DemandChart({ initialData, summaryData }: DemandChartProps) {
|
||||
strokeWidth={2}
|
||||
dot={false}
|
||||
connectNulls
|
||||
isAnimationActive={trendChartData.length <= 200}
|
||||
/>
|
||||
))}
|
||||
</ComposedChart>
|
||||
|
||||
@ -14,7 +14,7 @@ import {
|
||||
ChartTooltipContent,
|
||||
} from '@/components/ui/chart.js';
|
||||
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select.js';
|
||||
import type { getGenerationMix } from '@/generated/prisma/sql.js';
|
||||
import type { getGenerationHourly } from '@/generated/prisma/sql.js';
|
||||
import { deserialize } from '@/lib/superjson.js';
|
||||
import { formatMarketDate, formatMarketDateTime, formatMarketTime } from '@/lib/utils.js';
|
||||
|
||||
@ -106,7 +106,7 @@ interface PivotedRow {
|
||||
other: number;
|
||||
}
|
||||
|
||||
function pivotGenerationData(rows: getGenerationMix.Result[], regionCode: string): PivotedRow[] {
|
||||
function pivotGenerationData(rows: getGenerationHourly.Result[], regionCode: string): PivotedRow[] {
|
||||
const byTimestamp = new Map<number, PivotedRow>();
|
||||
|
||||
for (const row of rows) {
|
||||
@ -165,7 +165,7 @@ function computeGenerationSplit(data: PivotedRow[]): GenerationSplit {
|
||||
}
|
||||
|
||||
interface GenerationChartProps {
|
||||
initialData: ReturnType<typeof import('@/lib/superjson.js').serialize<getGenerationMix.Result[]>>;
|
||||
initialData: ReturnType<typeof import('@/lib/superjson.js').serialize<getGenerationHourly.Result[]>>;
|
||||
initialRegion: string;
|
||||
initialTimeRange: TimeRange;
|
||||
}
|
||||
@ -177,7 +177,7 @@ export function GenerationChart({ initialData, initialRegion, initialTimeRange }
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [isPending, startTransition] = useTransition();
|
||||
|
||||
const rows = useMemo(() => deserialize<getGenerationMix.Result[]>(serializedData), [serializedData]);
|
||||
const rows = useMemo(() => deserialize<getGenerationHourly.Result[]>(serializedData), [serializedData]);
|
||||
const chartData = useMemo(() => pivotGenerationData(rows, regionCode), [rows, regionCode]);
|
||||
const split = useMemo(() => computeGenerationSplit(chartData), [chartData]);
|
||||
|
||||
@ -337,6 +337,7 @@ export function GenerationChart({ initialData, initialRegion, initialTimeRange }
|
||||
fill={`url(#fill-${fuel})`}
|
||||
stroke={`var(--color-${fuel})`}
|
||||
strokeWidth={1.5}
|
||||
isAnimationActive={chartData.length <= 200}
|
||||
/>
|
||||
))}
|
||||
<ChartLegend content={<ChartLegendContent />} />
|
||||
|
||||
@ -407,6 +407,7 @@ export function PriceChart({
|
||||
strokeWidth={2}
|
||||
dot={false}
|
||||
connectNulls
|
||||
isAnimationActive={pivoted.length <= 200}
|
||||
/>
|
||||
))}
|
||||
|
||||
@ -422,6 +423,7 @@ export function PriceChart({
|
||||
strokeDasharray="6 3"
|
||||
dot={false}
|
||||
connectNulls
|
||||
isAnimationActive={pivoted.length <= 200}
|
||||
/>
|
||||
))}
|
||||
|
||||
|
||||
@ -55,9 +55,15 @@ interface DatacenterMarkerProps {
|
||||
datacenter: DatacenterMarkerData;
|
||||
onClick: (datacenter: DatacenterMarkerData) => void;
|
||||
isPulsing?: boolean;
|
||||
isSelected?: boolean;
|
||||
}
|
||||
|
||||
export function DatacenterMarker({ datacenter, onClick, isPulsing = false }: DatacenterMarkerProps) {
|
||||
export function DatacenterMarker({
|
||||
datacenter,
|
||||
onClick,
|
||||
isPulsing = false,
|
||||
isSelected = false,
|
||||
}: DatacenterMarkerProps) {
|
||||
const [hovered, setHovered] = useState(false);
|
||||
const size = getMarkerSize(datacenter.capacity_mw);
|
||||
const color = getOperatorColor(datacenter.operator);
|
||||
@ -71,6 +77,7 @@ export function DatacenterMarker({ datacenter, onClick, isPulsing = false }: Dat
|
||||
<AdvancedMarker
|
||||
position={{ lat: datacenter.lat, lng: datacenter.lng }}
|
||||
onClick={handleClick}
|
||||
zIndex={isSelected ? 1000 : hovered ? 999 : undefined}
|
||||
title={`${datacenter.name} (${datacenter.operator}) - ${datacenter.capacity_mw} MW`}>
|
||||
<div
|
||||
className="relative flex cursor-pointer items-center justify-center transition-transform duration-150"
|
||||
|
||||
@ -3,6 +3,7 @@
|
||||
import dynamic from 'next/dynamic';
|
||||
|
||||
import type { DatacenterMarkerData } from './datacenter-marker.js';
|
||||
import type { PowerPlantMarkerData } from './power-plant-marker.js';
|
||||
import type { RegionHeatmapData } from './region-overlay.js';
|
||||
|
||||
const EnergyMap = dynamic(() => import('./energy-map.js').then(m => m.EnergyMap), {
|
||||
@ -17,8 +18,9 @@ const EnergyMap = dynamic(() => import('./energy-map.js').then(m => m.EnergyMap)
|
||||
interface EnergyMapLoaderProps {
|
||||
datacenters: DatacenterMarkerData[];
|
||||
regions: RegionHeatmapData[];
|
||||
powerPlants: PowerPlantMarkerData[];
|
||||
}
|
||||
|
||||
export function EnergyMapLoader({ datacenters, regions }: EnergyMapLoaderProps) {
|
||||
return <EnergyMap datacenters={datacenters} regions={regions} />;
|
||||
export function EnergyMapLoader({ datacenters, regions, powerPlants }: EnergyMapLoaderProps) {
|
||||
return <EnergyMap datacenters={datacenters} regions={regions} powerPlants={powerPlants} />;
|
||||
}
|
||||
|
||||
@ -1,11 +1,12 @@
|
||||
'use client';
|
||||
|
||||
import { AdvancedMarker, APIProvider, ColorScheme, Map } from '@vis.gl/react-google-maps';
|
||||
import { AdvancedMarker, APIProvider, ColorScheme, ControlPosition, Map, MapControl } from '@vis.gl/react-google-maps';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { DatacenterDetailPanel } from './datacenter-detail-panel.js';
|
||||
import { DatacenterMarker, type DatacenterMarkerData } from './datacenter-marker.js';
|
||||
import { MapControls } from './map-controls.js';
|
||||
import { MapLegend } from './map-legend.js';
|
||||
import { PowerPlantMarker, type PowerPlantMarkerData } from './power-plant-marker.js';
|
||||
import { RegionDetailPanel } from './region-detail-panel.js';
|
||||
import { RegionOverlay, type RegionHeatmapData } from './region-overlay.js';
|
||||
|
||||
@ -35,15 +36,17 @@ function priceToLabelBorderColor(price: number | null): string {
|
||||
interface EnergyMapProps {
|
||||
datacenters: DatacenterMarkerData[];
|
||||
regions: RegionHeatmapData[];
|
||||
powerPlants: PowerPlantMarkerData[];
|
||||
}
|
||||
|
||||
export function EnergyMap({ datacenters, regions }: EnergyMapProps) {
|
||||
export function EnergyMap({ datacenters, regions, powerPlants }: EnergyMapProps) {
|
||||
const apiKey = process.env.NEXT_PUBLIC_GOOGLE_MAPS_API_KEY ?? '';
|
||||
const mapId = process.env.NEXT_PUBLIC_GOOGLE_MAP_ID ?? '';
|
||||
|
||||
const [filteredDatacenters, setFilteredDatacenters] = useState(datacenters);
|
||||
const [selectedDatacenter, setSelectedDatacenter] = useState<DatacenterMarkerData | null>(null);
|
||||
const [selectedRegion, setSelectedRegion] = useState<RegionHeatmapData | null>(null);
|
||||
const [showPowerPlants, setShowPowerPlants] = useState(false);
|
||||
|
||||
const handleDatacenterClick = useCallback((dc: DatacenterMarkerData) => {
|
||||
setSelectedDatacenter(dc);
|
||||
@ -81,7 +84,12 @@ export function EnergyMap({ datacenters, regions }: EnergyMapProps) {
|
||||
return (
|
||||
<APIProvider apiKey={apiKey}>
|
||||
<div className="relative h-full w-full">
|
||||
<MapControls datacenters={datacenters} onFilterChange={handleFilterChange} />
|
||||
<MapControls
|
||||
datacenters={datacenters}
|
||||
onFilterChange={handleFilterChange}
|
||||
showPowerPlants={showPowerPlants}
|
||||
onTogglePowerPlants={setShowPowerPlants}
|
||||
/>
|
||||
|
||||
<Map
|
||||
mapId={mapId}
|
||||
@ -90,6 +98,7 @@ export function EnergyMap({ datacenters, regions }: EnergyMapProps) {
|
||||
gestureHandling="greedy"
|
||||
colorScheme={ColorScheme.DARK}
|
||||
disableDefaultUI={true}
|
||||
clickableIcons={false}
|
||||
className="h-full w-full">
|
||||
<RegionOverlay regions={regions} onRegionClick={handleRegionClick} />
|
||||
|
||||
@ -104,6 +113,8 @@ export function EnergyMap({ datacenters, regions }: EnergyMapProps) {
|
||||
</AdvancedMarker>
|
||||
))}
|
||||
|
||||
{showPowerPlants && powerPlants.map(pp => <PowerPlantMarker key={pp.id} plant={pp} />)}
|
||||
|
||||
{filteredDatacenters.map(dc => {
|
||||
const dcRegion = regions.find(r => r.code === dc.region_code);
|
||||
const isPulsing =
|
||||
@ -113,12 +124,20 @@ export function EnergyMap({ datacenters, regions }: EnergyMapProps) {
|
||||
dcRegion.avgPrice > 0 &&
|
||||
dcRegion.maxPrice > dcRegion.avgPrice * 1.03;
|
||||
return (
|
||||
<DatacenterMarker key={dc.id} datacenter={dc} onClick={handleDatacenterClick} isPulsing={isPulsing} />
|
||||
<DatacenterMarker
|
||||
key={dc.id}
|
||||
datacenter={dc}
|
||||
onClick={handleDatacenterClick}
|
||||
isPulsing={isPulsing}
|
||||
isSelected={selectedDatacenter?.id === dc.id}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</Map>
|
||||
|
||||
<MapLegend />
|
||||
<MapControl position={ControlPosition.LEFT_BOTTOM}>
|
||||
<MapLegend showPowerPlants={showPowerPlants} />
|
||||
</MapControl>
|
||||
</Map>
|
||||
|
||||
<DatacenterDetailPanel datacenter={selectedDatacenter} onClose={() => setSelectedDatacenter(null)} />
|
||||
|
||||
|
||||
@ -7,9 +7,11 @@ import type { DatacenterMarkerData } from './datacenter-marker.js';
|
||||
interface MapControlsProps {
|
||||
datacenters: DatacenterMarkerData[];
|
||||
onFilterChange: (filtered: DatacenterMarkerData[]) => void;
|
||||
showPowerPlants: boolean;
|
||||
onTogglePowerPlants: (show: boolean) => void;
|
||||
}
|
||||
|
||||
export function MapControls({ datacenters, onFilterChange }: MapControlsProps) {
|
||||
export function MapControls({ datacenters, onFilterChange, showPowerPlants, onTogglePowerPlants }: MapControlsProps) {
|
||||
const operators = useMemo(() => {
|
||||
const set = new Set(datacenters.map(d => d.operator));
|
||||
return Array.from(set).sort();
|
||||
@ -105,6 +107,18 @@ export function MapControls({ datacenters, onFilterChange }: MapControlsProps) {
|
||||
className="h-1.5 w-full cursor-pointer appearance-none rounded-full bg-zinc-700 accent-blue-500"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="border-t border-zinc-700/60 pt-2">
|
||||
<label className="flex cursor-pointer items-center gap-2 text-xs font-medium text-zinc-400">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={showPowerPlants}
|
||||
onChange={e => onTogglePowerPlants(e.target.checked)}
|
||||
className="h-3.5 w-3.5 cursor-pointer rounded border-zinc-600 bg-zinc-800 accent-blue-500"
|
||||
/>
|
||||
Show power plants
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@ -1,8 +1,26 @@
|
||||
'use client';
|
||||
|
||||
export function MapLegend() {
|
||||
import { FUEL_TYPE_COLORS } from './power-plant-marker.js';
|
||||
|
||||
interface MapLegendProps {
|
||||
showPowerPlants?: boolean;
|
||||
}
|
||||
|
||||
const FUEL_TYPE_DISPLAY_ORDER = [
|
||||
'Natural Gas',
|
||||
'Coal',
|
||||
'Nuclear',
|
||||
'Hydroelectric',
|
||||
'Wind',
|
||||
'Solar',
|
||||
'Petroleum',
|
||||
'Biomass',
|
||||
'Geothermal',
|
||||
];
|
||||
|
||||
export function MapLegend({ showPowerPlants = false }: MapLegendProps) {
|
||||
return (
|
||||
<div className="absolute right-4 bottom-4 z-10 rounded-lg border border-zinc-700/60 bg-zinc-900/90 p-3 text-xs backdrop-blur-sm">
|
||||
<div className="z-10 rounded-lg border border-zinc-700/60 bg-zinc-900/90 p-3 text-xs backdrop-blur-sm">
|
||||
{/* Price heatmap gradient */}
|
||||
<div className="mb-2.5">
|
||||
<div className="mb-1 font-medium text-zinc-300">Price Heatmap</div>
|
||||
@ -50,13 +68,36 @@ export function MapLegend() {
|
||||
</div>
|
||||
|
||||
{/* Grid stress glow icon */}
|
||||
<div className="flex items-center gap-2">
|
||||
<div className={showPowerPlants ? 'mb-2.5 flex items-center gap-2' : 'flex items-center gap-2'}>
|
||||
<span className="relative flex h-3 w-3">
|
||||
<span className="ambient-glow-slow absolute inline-flex h-full w-full rounded-full bg-red-500/60" />
|
||||
<span className="relative inline-flex h-3 w-3 rounded-full bg-red-500/80" />
|
||||
</span>
|
||||
<span className="text-zinc-400">Grid stress >85%</span>
|
||||
</div>
|
||||
|
||||
{/* Power plant fuel type legend */}
|
||||
{showPowerPlants && (
|
||||
<div className="border-t border-zinc-700/60 pt-2">
|
||||
<div className="mb-1.5 font-medium text-zinc-300">Power Plants</div>
|
||||
<div className="grid grid-cols-2 gap-x-3 gap-y-1">
|
||||
{FUEL_TYPE_DISPLAY_ORDER.map(fuel => (
|
||||
<div key={fuel} className="flex items-center gap-1.5">
|
||||
<div
|
||||
className="h-2.5 w-2.5 shrink-0"
|
||||
style={{
|
||||
backgroundColor: FUEL_TYPE_COLORS[fuel] ?? '#9CA3AF',
|
||||
transform: 'rotate(45deg)',
|
||||
borderRadius: 1,
|
||||
opacity: 0.7,
|
||||
}}
|
||||
/>
|
||||
<span className="truncate text-zinc-400">{fuel}</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
83
src/components/map/power-plant-marker.tsx
Normal file
83
src/components/map/power-plant-marker.tsx
Normal file
@ -0,0 +1,83 @@
|
||||
'use client';
|
||||
|
||||
import { AdvancedMarker } from '@vis.gl/react-google-maps';
|
||||
import { useCallback, useState } from 'react';
|
||||
|
||||
const FUEL_TYPE_COLORS: Record<string, string> = {
|
||||
Coal: '#4A4A4A',
|
||||
'Natural Gas': '#F59E0B',
|
||||
Nuclear: '#8B5CF6',
|
||||
Hydroelectric: '#3B82F6',
|
||||
Wind: '#06B6D4',
|
||||
Solar: '#FBBF24',
|
||||
Petroleum: '#78716C',
|
||||
Biomass: '#22C55E',
|
||||
Geothermal: '#EF4444',
|
||||
};
|
||||
|
||||
function getFuelColor(fuelType: string): string {
|
||||
return FUEL_TYPE_COLORS[fuelType] ?? '#9CA3AF';
|
||||
}
|
||||
|
||||
function getDiamondSize(capacityMw: number): number {
|
||||
if (capacityMw >= 2000) return 20;
|
||||
if (capacityMw >= 1000) return 16;
|
||||
if (capacityMw >= 500) return 13;
|
||||
if (capacityMw >= 200) return 10;
|
||||
return 8;
|
||||
}
|
||||
|
||||
export interface PowerPlantMarkerData {
|
||||
id: string;
|
||||
plant_code: number;
|
||||
name: string;
|
||||
operator: string;
|
||||
capacity_mw: number;
|
||||
fuel_type: string;
|
||||
state: string;
|
||||
lat: number;
|
||||
lng: number;
|
||||
}
|
||||
|
||||
interface PowerPlantMarkerProps {
|
||||
plant: PowerPlantMarkerData;
|
||||
}
|
||||
|
||||
export function PowerPlantMarker({ plant }: PowerPlantMarkerProps) {
|
||||
const [hovered, setHovered] = useState(false);
|
||||
const size = getDiamondSize(plant.capacity_mw);
|
||||
const color = getFuelColor(plant.fuel_type);
|
||||
|
||||
const handleMouseEnter = useCallback(() => setHovered(true), []);
|
||||
const handleMouseLeave = useCallback(() => setHovered(false), []);
|
||||
|
||||
return (
|
||||
<AdvancedMarker position={{ lat: plant.lat, lng: plant.lng }} zIndex={1}>
|
||||
<div
|
||||
className="relative flex items-center justify-center"
|
||||
onMouseEnter={handleMouseEnter}
|
||||
onMouseLeave={handleMouseLeave}>
|
||||
<div
|
||||
style={{
|
||||
width: size,
|
||||
height: size,
|
||||
backgroundColor: color,
|
||||
opacity: 0.7,
|
||||
transform: 'rotate(45deg)',
|
||||
borderRadius: 2,
|
||||
}}
|
||||
/>
|
||||
{hovered && (
|
||||
<div className="absolute bottom-full left-1/2 z-10 mb-2 -translate-x-1/2 rounded-md bg-zinc-900/95 px-3 py-1.5 text-xs whitespace-nowrap text-zinc-100 shadow-xl">
|
||||
<div className="font-semibold">{plant.name}</div>
|
||||
<div className="text-zinc-400">
|
||||
{plant.fuel_type} · {plant.capacity_mw} MW
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</AdvancedMarker>
|
||||
);
|
||||
}
|
||||
|
||||
export { FUEL_TYPE_COLORS };
|
||||
@ -373,13 +373,18 @@ export interface GetRetailPriceOptions {
|
||||
* Endpoint: /v2/electricity/retail-sales/data/
|
||||
* Price is returned in cents/kWh; we convert to $/MWh (* 10).
|
||||
*/
|
||||
/** Pre-built reverse lookup: state abbreviation -> RegionCode */
|
||||
const STATE_TO_REGION: ReadonlyMap<string, RegionCode> = (() => {
|
||||
const map = new Map<string, RegionCode>();
|
||||
let key: RegionCode;
|
||||
for (key in REGION_STATE_MAP) {
|
||||
map.set(REGION_STATE_MAP[key], key);
|
||||
}
|
||||
return map;
|
||||
})();
|
||||
|
||||
export async function getRetailElectricityPrices(options: GetRetailPriceOptions = {}): Promise<RetailPricePoint[]> {
|
||||
const stateIds = Object.values(REGION_STATE_MAP);
|
||||
const regionCodes: RegionCode[] = ['PJM', 'ERCOT', 'CAISO', 'NYISO', 'ISONE', 'MISO', 'SPP'];
|
||||
const stateToRegion = new Map<string, RegionCode>();
|
||||
for (const region of regionCodes) {
|
||||
stateToRegion.set(REGION_STATE_MAP[region], region);
|
||||
}
|
||||
|
||||
const params: EiaQueryParams = {
|
||||
frequency: 'monthly',
|
||||
@ -401,7 +406,7 @@ export async function getRetailElectricityPrices(options: GetRetailPriceOptions
|
||||
const results: RetailPricePoint[] = [];
|
||||
for (const row of rows) {
|
||||
if (row.price === null) continue;
|
||||
const regionCode = stateToRegion.get(row.stateid);
|
||||
const regionCode = STATE_TO_REGION.get(row.stateid);
|
||||
if (!regionCode) continue;
|
||||
|
||||
results.push({
|
||||
|
||||
17
src/lib/granularity.ts
Normal file
17
src/lib/granularity.ts
Normal file
@ -0,0 +1,17 @@
|
||||
export type Granularity = 'hourly' | 'daily' | 'weekly';
|
||||
|
||||
const MS_PER_DAY = 86_400_000;
|
||||
|
||||
/**
|
||||
* Select the appropriate data granularity based on the requested time range.
|
||||
*
|
||||
* - <= 7 days: hourly (raw data, ~168 points per region)
|
||||
* - <= 2 years (730 days): daily (materialized view, ~730 points max)
|
||||
* - > 2 years: weekly (materialized view, ~520 points for 10 years)
|
||||
*/
|
||||
export function getGranularity(startDate: Date, endDate: Date): Granularity {
|
||||
const days = (endDate.getTime() - startDate.getTime()) / MS_PER_DAY;
|
||||
if (days <= 7) return 'hourly';
|
||||
if (days <= 730) return 'daily';
|
||||
return 'weekly';
|
||||
}
|
||||
@ -12,6 +12,13 @@ export const EIA_RESPONDENT_CODES = {
|
||||
ISONE: 'ISNE',
|
||||
MISO: 'MISO',
|
||||
SPP: 'SWPP',
|
||||
BPA: 'BPAT',
|
||||
DUKE: 'DUK',
|
||||
SOCO: 'SC',
|
||||
TVA: 'TVA',
|
||||
FPC: 'FPC',
|
||||
WAPA: 'WACM',
|
||||
NWMT: 'NWMT',
|
||||
} as const;
|
||||
|
||||
export type RegionCode = keyof typeof EIA_RESPONDENT_CODES;
|
||||
@ -26,6 +33,13 @@ export const RESPONDENT_TO_REGION: Record<EiaRespondentCode, RegionCode> = {
|
||||
ISNE: 'ISONE',
|
||||
MISO: 'MISO',
|
||||
SWPP: 'SPP',
|
||||
BPAT: 'BPA',
|
||||
DUK: 'DUKE',
|
||||
SC: 'SOCO',
|
||||
TVA: 'TVA',
|
||||
FPC: 'FPC',
|
||||
WACM: 'WAPA',
|
||||
NWMT: 'NWMT',
|
||||
};
|
||||
|
||||
/** Type guard: check if a string is a valid EIA respondent code */
|
||||
@ -141,6 +155,13 @@ export const REGION_STATE_MAP: Record<RegionCode, string> = {
|
||||
NYISO: 'NY',
|
||||
PJM: 'VA',
|
||||
SPP: 'OK',
|
||||
BPA: 'WA',
|
||||
DUKE: 'NC',
|
||||
SOCO: 'GA',
|
||||
TVA: 'TN',
|
||||
FPC: 'FL',
|
||||
WAPA: 'CO',
|
||||
NWMT: 'MT',
|
||||
};
|
||||
|
||||
/** Row from the EIA retail-sales endpoint */
|
||||
|
||||
@ -5,7 +5,22 @@ export function cn(...inputs: ClassValue[]) {
|
||||
return twMerge(clsx(inputs));
|
||||
}
|
||||
|
||||
export const VALID_REGION_CODES = new Set(['PJM', 'ERCOT', 'CAISO', 'NYISO', 'ISONE', 'MISO', 'SPP'] as const);
|
||||
export const VALID_REGION_CODES = new Set([
|
||||
'PJM',
|
||||
'ERCOT',
|
||||
'CAISO',
|
||||
'NYISO',
|
||||
'ISONE',
|
||||
'MISO',
|
||||
'SPP',
|
||||
'BPA',
|
||||
'DUKE',
|
||||
'SOCO',
|
||||
'TVA',
|
||||
'FPC',
|
||||
'WAPA',
|
||||
'NWMT',
|
||||
] as const);
|
||||
|
||||
export function validateRegionCode(code: string): boolean {
|
||||
return code === 'ALL' || VALID_REGION_CODES.has(code);
|
||||
@ -19,6 +34,13 @@ const REGION_TIMEZONES: Record<string, string> = {
|
||||
ISONE: 'America/New_York',
|
||||
MISO: 'America/Chicago',
|
||||
SPP: 'America/Chicago',
|
||||
BPA: 'America/Los_Angeles',
|
||||
DUKE: 'America/New_York',
|
||||
SOCO: 'America/New_York',
|
||||
TVA: 'America/Chicago',
|
||||
FPC: 'America/New_York',
|
||||
WAPA: 'America/Denver',
|
||||
NWMT: 'America/Denver',
|
||||
};
|
||||
|
||||
export function formatMarketTime(utcDate: Date, regionCode: string): string {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user