Compare commits
10 Commits
19723739ab
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 93d7e6cd4f | |||
| bfb490b1be | |||
| 0b921ae933 | |||
| 8a3fb0545c | |||
| 9a083094e3 | |||
| ecde83c953 | |||
| 406eecc439 | |||
| 4ad2ded266 | |||
| ea2b1ad180 | |||
| 9577e94c4f |
@@ -35,7 +35,7 @@ import sys
|
|||||||
# The zoom level to generate keys for.
|
# The zoom level to generate keys for.
|
||||||
# Level 7 is standard for the "entry" lists in your scraper (e.g. '0320001').
|
# Level 7 is standard for the "entry" lists in your scraper (e.g. '0320001').
|
||||||
# If the utility uses a different zoom level for its top-level clusters, adjust this.
|
# If the utility uses a different zoom level for its top-level clusters, adjust this.
|
||||||
TARGET_ZOOM = 7
|
TARGET_ZOOM = 6
|
||||||
|
|
||||||
# Increase the CSV field size limit to handle massive WKT strings
|
# Increase the CSV field size limit to handle massive WKT strings
|
||||||
import csv
|
import csv
|
||||||
@@ -74,11 +74,13 @@ def generate_keys_from_wkt(wkt_string):
|
|||||||
if service_area.intersects(tile_geom):
|
if service_area.intersects(tile_geom):
|
||||||
valid_keys.append(mercantile.quadkey(tile))
|
valid_keys.append(mercantile.quadkey(tile))
|
||||||
|
|
||||||
# 6. Output formatted for Python list
|
import json
|
||||||
|
|
||||||
|
# 6. Output in JSON format
|
||||||
valid_keys.sort()
|
valid_keys.sort()
|
||||||
print(f"\nFound {len(valid_keys)} intersecting tiles.")
|
print(f"\nFound {len(valid_keys)} intersecting tiles.")
|
||||||
print("-" * 30)
|
print("-" * 30)
|
||||||
print(f"KEY_LIST = {valid_keys}")
|
print(json.dumps(valid_keys))
|
||||||
print("-" * 30)
|
print("-" * 30)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -97,4 +99,4 @@ if __name__ == "__main__":
|
|||||||
except EOFError:
|
except EOFError:
|
||||||
print("Error reading input.")
|
print("Error reading input.")
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print("\nCancelled.")
|
print("\nCancelled.")
|
||||||
|
|||||||
27
lsrfunc.py
27
lsrfunc.py
@@ -1,16 +1,3 @@
|
|||||||
"""
|
|
||||||
import polyline
|
|
||||||
import simplejson
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
import datetime
|
|
||||||
from geojson import Point, Feature, FeatureCollection, dump
|
|
||||||
import re
|
|
||||||
import pandas as pd
|
|
||||||
from dateutil.parser import parse as date_parse
|
|
||||||
import traceback
|
|
||||||
"""
|
|
||||||
|
|
||||||
import psycopg2
|
import psycopg2
|
||||||
import psycopg2.extensions
|
import psycopg2.extensions
|
||||||
import requests
|
import requests
|
||||||
@@ -140,14 +127,14 @@ def find_max_reflectivity(lat, lon, start_time, end_time):
|
|||||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||||
cursor = conn.cursor()
|
cursor = conn.cursor()
|
||||||
|
|
||||||
sql = "update power set vtec = svr.vtec from svr where ST_Contains(svr.nwspoly,power.realgeom) and power.outagen > 5 and power.startguess > svr.issue and power.startguess < svr.endtime + (120 ||'minutes')::interval"
|
sql = "update newpower set vtec = svr.vtec from svr where ST_Contains(svr.nwspoly,newpower.geom) and newpower.outagen > 5 and newpower.start_time > svr.issue and newpower.start_time < svr.endtime + (120 ||'minutes')::interval"
|
||||||
cursor.execute(sql)
|
cursor.execute(sql)
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
#find potentiall verifying reports and add ids to lsrids in svr
|
#find potentiall verifying reports and add ids to lsrids in svr
|
||||||
"""
|
"""
|
||||||
|
-- This comment block seems to have an unrelated query example.
|
||||||
UPDATE svr SET lsrids = array_cat( your_array_column, ARRAY[ (SELECT unnest(ARRAY[1, 2, 3]) EXCEPT SELECT unnest(your_array_column)) ] );
|
UPDATE svr SET lsrids = array_cat( your_array_column, ARRAY[ (SELECT unnest(ARRAY[1, 2, 3]) EXCEPT SELECT unnest(your_array_column)) ] );
|
||||||
|
|
||||||
|
|
||||||
@@ -155,8 +142,8 @@ UPDATE svr SET lsrids = array_cat( your_array_column, ARRAY[ (SELECT unnest(ARRA
|
|||||||
sql = """WITH unique_ids AS (
|
sql = """WITH unique_ids AS (
|
||||||
SELECT svr.vtec, array_agg(DISTINCT power.id) AS new_ids
|
SELECT svr.vtec, array_agg(DISTINCT power.id) AS new_ids
|
||||||
FROM svr
|
FROM svr
|
||||||
JOIN power ON svr.vtec = power.vtec
|
JOIN newpower power ON svr.vtec = power.vtec
|
||||||
WHERE EXTRACT(EPOCH FROM (current_timestamp - svr.endtime ))/60/60/24 < 3
|
WHERE EXTRACT(EPOCH FROM (current_timestamp - svr.endtime ))/60/60/24 < 3
|
||||||
GROUP BY svr.vtec
|
GROUP BY svr.vtec
|
||||||
)
|
)
|
||||||
UPDATE svr
|
UPDATE svr
|
||||||
@@ -171,7 +158,7 @@ cursor.execute(sql)
|
|||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
cursor.execute("SELECT r.id, r.lat, r.lon, s.issue, s.endtime FROM power r JOIN svr s ON r.vtec = s.vtec WHERE r.vtec IS NOT NULL AND r.lsrref is null and s.issue > '2024-07-25' and outagen > 4 limit 50")
|
cursor.execute("SELECT r.id, r.lat, r.lon, s.issue, s.endtime FROM newpower r JOIN svr s ON r.vtec = s.vtec WHERE r.vtec IS NOT NULL AND r.lsrref is null and s.issue > '2024-07-25' and outagen > 4 limit 50")
|
||||||
|
|
||||||
svrreports = cursor.fetchall()
|
svrreports = cursor.fetchall()
|
||||||
|
|
||||||
@@ -182,7 +169,7 @@ for i in svrreports:
|
|||||||
start_time = i[3]
|
start_time = i[3]
|
||||||
end_time = i[4]
|
end_time = i[4]
|
||||||
max_reflectivity, reflectivity_time = find_max_reflectivity(lat, lon, start_time, end_time)
|
max_reflectivity, reflectivity_time = find_max_reflectivity(lat, lon, start_time, end_time)
|
||||||
sql = "UPDATE power set lsrtime = %s, lsrref = %s where id = %s"
|
sql = "UPDATE newpower set lsrtime = %s, lsrref = %s where id = %s"
|
||||||
vals = (reflectivity_time,max_reflectivity,repid)
|
vals = (reflectivity_time,max_reflectivity,repid)
|
||||||
print(vals)
|
print(vals)
|
||||||
cursor.execute(sql,vals)
|
cursor.execute(sql,vals)
|
||||||
@@ -199,5 +186,3 @@ print("Maximum Reflectivity:", max_reflectivity)
|
|||||||
print("Time of Maximum Reflectivity:", reflectivity_time)
|
print("Time of Maximum Reflectivity:", reflectivity_time)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
64
newpower.py
64
newpower.py
@@ -81,53 +81,37 @@ class CountyPowerDB:
|
|||||||
self.conn.autocommit = True
|
self.conn.autocommit = True
|
||||||
def close(self):
|
def close(self):
|
||||||
self.conn.close()
|
self.conn.close()
|
||||||
|
|
||||||
def upsert_and_zero_outages(self, company_name, outage_data, fetch_time):
|
def insert_outage_snapshot(self, outage_data, fetch_time):
|
||||||
"""
|
"""
|
||||||
Atomically updates outage information for a given company.
|
Inserts a snapshot of county outage data for a given fetch time.
|
||||||
1. UPSERTS counties with active outages, updating their counts.
|
This creates a historical record of outages.
|
||||||
2. SETS outage count to 0 for any other county from that company that was not in the active list.
|
|
||||||
"""
|
"""
|
||||||
|
all_values = []
|
||||||
# Prepare data for counties with active outages
|
|
||||||
active_outage_values = []
|
|
||||||
reported_counties = []
|
|
||||||
for item in outage_data:
|
for item in outage_data:
|
||||||
if all(k in item for k in ['county', 'state', 'company']):
|
if all(k in item for k in ['county', 'state', 'company']):
|
||||||
|
# Standardize county name to title case (e.g., "MERCER" -> "Mercer")
|
||||||
|
county_name = item['county'].title() if isinstance(item['county'], str) else item['county']
|
||||||
val = (
|
val = (
|
||||||
item['county'], item['state'], item['company'],
|
item.get('outages'),
|
||||||
item.get('outages'), item.get('served'), fetch_time
|
item.get('served'),
|
||||||
|
county_name,
|
||||||
|
item['state'],
|
||||||
|
fetch_time,
|
||||||
|
item['company']
|
||||||
)
|
)
|
||||||
active_outage_values.append(val)
|
all_values.append(val)
|
||||||
reported_counties.append(item['county'])
|
|
||||||
|
|
||||||
with self.conn.cursor() as cursor:
|
with self.conn.cursor() as cursor:
|
||||||
# Step 1: UPSERT active outages
|
if all_values:
|
||||||
if active_outage_values:
|
# Use a simple INSERT to create a historical record for each run.
|
||||||
upsert_sql = """
|
# The column order matches the old power3.py script for the `countyoutages` table.
|
||||||
INSERT INTO newcountyoutages (county, state, company, outages, served, fetch_time)
|
sql = """
|
||||||
|
INSERT INTO newcountyoutages (outages, served, county, state, fetch_time, company)
|
||||||
VALUES (%s, %s, %s, %s, %s, %s)
|
VALUES (%s, %s, %s, %s, %s, %s)
|
||||||
ON CONFLICT (county, state, company) DO UPDATE SET
|
|
||||||
outages = EXCLUDED.outages,
|
|
||||||
served = COALESCE(EXCLUDED.served, newcountyoutages.served),
|
|
||||||
fetch_time = EXCLUDED.fetch_time;
|
|
||||||
"""
|
"""
|
||||||
cursor.executemany(upsert_sql, active_outage_values)
|
cursor.executemany(sql, all_values)
|
||||||
logger.info(f"Upserted {len(active_outage_values)} active outage records for {company_name}.")
|
logger.info(f"Inserted {len(all_values)} county outage records for this run.")
|
||||||
|
|
||||||
# Step 2: Set outages to 0 for any other county from this company
|
|
||||||
# This correctly creates point-in-time zero records by updating the fetch_time.
|
|
||||||
zero_out_sql = """
|
|
||||||
UPDATE newcountyoutages
|
|
||||||
SET outages = 0, fetch_time = %s
|
|
||||||
WHERE company = %s AND county NOT IN %s;
|
|
||||||
"""
|
|
||||||
# Ensure reported_counties is not empty to avoid "IN (NULL)"
|
|
||||||
if not reported_counties:
|
|
||||||
reported_counties.append("NO_COUNTIES_REPORTED_DUMMY_VALUE")
|
|
||||||
|
|
||||||
cursor.execute(zero_out_sql, (fetch_time, company_name, tuple(reported_counties)))
|
|
||||||
logger.info(f"Zeroed out {cursor.rowcount} resolved outage records for {company_name}.")
|
|
||||||
|
|
||||||
def run_post_processing(self):
|
def run_post_processing(self):
|
||||||
logger.info("Running post-processing for county data...")
|
logger.info("Running post-processing for county data...")
|
||||||
@@ -176,8 +160,8 @@ def main():
|
|||||||
outages = provider.fetch()
|
outages = provider.fetch()
|
||||||
logger.info(f"Found {len(outages)} active outage records for {p_name}.")
|
logger.info(f"Found {len(outages)} active outage records for {p_name}.")
|
||||||
|
|
||||||
# Process this company's data in a single transaction
|
# Insert all collected outages as a new snapshot for this run time.
|
||||||
db.upsert_and_zero_outages(p_name, outages, run_timestamp)
|
db.insert_outage_snapshot(outages, run_timestamp)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error processing {p_name}: {e}")
|
logger.error(f"Error processing {p_name}: {e}")
|
||||||
|
|||||||
70
newpower2.py
70
newpower2.py
@@ -96,13 +96,57 @@ class PowerDB:
|
|||||||
self.conn.close()
|
self.conn.close()
|
||||||
|
|
||||||
def upsert_outage(self, data):
|
def upsert_outage(self, data):
|
||||||
|
incident_id_from_data = data.get('incidentid')
|
||||||
|
utility_name = data.get('utility')
|
||||||
|
lat = data.get('lat')
|
||||||
|
lon = data.get('lon')
|
||||||
|
start_time = data.get('start') # This is expected to be a datetime object
|
||||||
|
|
||||||
|
unique_outage_key = None
|
||||||
|
|
||||||
|
# Prioritize incidentid if it seems valid and is a string
|
||||||
|
# Check for common unreliable/placeholder values
|
||||||
|
if incident_id_from_data and isinstance(incident_id_from_data, str) and \
|
||||||
|
incident_id_from_data.strip() not in ["0", "unknown", "null", "N/A", ""]:
|
||||||
|
unique_outage_key = f"{utility_name}_{incident_id_from_data.strip()}"
|
||||||
|
elif lat is not None and lon is not None and utility_name is not None:
|
||||||
|
# Fallback to a synthesized key based on location and recency
|
||||||
|
try:
|
||||||
|
# Round lat/lon to 4 decimal places (approx 11 meters precision)
|
||||||
|
rounded_lat = round(float(lat), 4)
|
||||||
|
rounded_lon = round(float(lon), 4)
|
||||||
|
|
||||||
|
# If start_time is missing, use the current hour as a fallback bucket.
|
||||||
|
# This groups recent, location-similar outages without a start time together.
|
||||||
|
if start_time:
|
||||||
|
time_bucket = start_time.strftime('%Y%m%d%H') # YYYYMMDDHH
|
||||||
|
else:
|
||||||
|
time_bucket = datetime.now(timezone.utc).strftime('%Y%m%d%H')
|
||||||
|
|
||||||
|
unique_outage_key = f"{utility_name}_{rounded_lat}_{rounded_lon}_{time_bucket}"
|
||||||
|
except (ValueError, TypeError) as e:
|
||||||
|
logger.error(f"Error synthesizing unique_outage_key from lat/lon/start_time: {e}. Data: {data}")
|
||||||
|
raise ValueError("Failed to synthesize unique_outage_key due to missing or malformed data.")
|
||||||
|
else:
|
||||||
|
logger.error(f"Insufficient data to create a unique_outage_key (missing incidentid, or lat/lon/utility). Data: {data}")
|
||||||
|
raise ValueError("Insufficient data to create a unique_outage_key.")
|
||||||
|
|
||||||
|
if unique_outage_key is None:
|
||||||
|
logger.error(f"Failed to generate a unique_outage_key for data: {data}")
|
||||||
|
raise ValueError("Unique outage key could not be generated.")
|
||||||
|
|
||||||
|
# Ensure the utility name is consistently passed in the data dictionary
|
||||||
|
if utility_name is None:
|
||||||
|
logger.warning(f"Utility name missing in outage data for incident {incident_id_from_data}. Using 'UNKNOWN'.")
|
||||||
|
utility_name = "UNKNOWN"
|
||||||
|
|
||||||
sql = """
|
sql = """
|
||||||
INSERT INTO newpower
|
INSERT INTO newpower
|
||||||
(incidentid, utility, lat, lon, pointgeom, areageom, start_time, etr,
|
(incidentid, utility, lat, lon, pointgeom, areageom, start_time, etr,
|
||||||
outagen, peakoutage, cause, crew_status, active, last_change, fetch_time, geom)
|
outagen, peakoutage, cause, crew_status, active, last_change, fetch_time, geom, unique_outage_key)
|
||||||
VALUES
|
VALUES
|
||||||
(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, ST_SetSRID(ST_MakePoint(%s, %s), 4326))
|
(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, ST_SetSRID(ST_MakePoint(%s, %s), 4326), %s)
|
||||||
ON CONFLICT (pointgeom) DO UPDATE SET
|
ON CONFLICT (unique_outage_key) DO UPDATE SET
|
||||||
outagen = EXCLUDED.outagen,
|
outagen = EXCLUDED.outagen,
|
||||||
peakoutage = GREATEST(newpower.peakoutage, EXCLUDED.outagen),
|
peakoutage = GREATEST(newpower.peakoutage, EXCLUDED.outagen),
|
||||||
cause = EXCLUDED.cause,
|
cause = EXCLUDED.cause,
|
||||||
@@ -110,17 +154,23 @@ class PowerDB:
|
|||||||
crew_status = EXCLUDED.crew_status,
|
crew_status = EXCLUDED.crew_status,
|
||||||
last_change = EXCLUDED.last_change,
|
last_change = EXCLUDED.last_change,
|
||||||
fetch_time = EXCLUDED.fetch_time,
|
fetch_time = EXCLUDED.fetch_time,
|
||||||
active = TRUE
|
active = TRUE,
|
||||||
|
lat = EXCLUDED.lat,
|
||||||
|
lon = EXCLUDED.lon,
|
||||||
|
pointgeom = EXCLUDED.pointgeom,
|
||||||
|
areageom = EXCLUDED.areageom,
|
||||||
|
geom = EXCLUDED.geom
|
||||||
"""
|
"""
|
||||||
peak = data.get('outagen', 0)
|
peak = data.get('outagen', 0)
|
||||||
# Clean areageom before insertion, referencing old power2.py logic
|
# Clean areageom before insertion, referencing old power2.py logic
|
||||||
areageom = remove_external_curly_braces(data.get('areageom'))
|
areageom = remove_external_curly_braces(data.get('areageom'))
|
||||||
|
current_time = datetime.now(timezone.utc)
|
||||||
params = (
|
params = (
|
||||||
data.get('incidentid'), data.get('utility'), data.get('lat'), data.get('lon'),
|
data.get('incidentid'), utility_name, lat, lon,
|
||||||
data.get('pointgeom'), areageom, data.get('start'), data.get('etr'),
|
data.get('pointgeom'), areageom, start_time, data.get('etr'),
|
||||||
data.get('outagen'), peak, data.get('cause'), data.get('crew_status'),
|
data.get('outagen'), peak, data.get('cause'), data.get('crew_status'),
|
||||||
True, data.get('last_change', datetime.now(timezone.utc)), datetime.now(timezone.utc),
|
True, current_time, current_time, # last_change, fetch_time (always update last_change when seen in current fetch)
|
||||||
data.get('lon'), data.get('lat')
|
lon, lat, unique_outage_key # geom, unique_outage_key
|
||||||
)
|
)
|
||||||
with self.conn.cursor() as cursor:
|
with self.conn.cursor() as cursor:
|
||||||
cursor.execute(sql, params)
|
cursor.execute(sql, params)
|
||||||
@@ -132,8 +182,8 @@ class PowerDB:
|
|||||||
cursor.execute('UPDATE newpower SET state = c.state FROM public.county c WHERE ST_Contains(c.geom, newpower.geom) AND newpower.state IS NULL')
|
cursor.execute('UPDATE newpower SET state = c.state FROM public.county c WHERE ST_Contains(c.geom, newpower.geom) AND newpower.state IS NULL')
|
||||||
cursor.execute('UPDATE newpower SET cwa = f.cwa FROM public.fzone f WHERE ST_Contains(f.geom, newpower.geom) AND newpower.cwa IS NULL')
|
cursor.execute('UPDATE newpower SET cwa = f.cwa FROM public.fzone f WHERE ST_Contains(f.geom, newpower.geom) AND newpower.cwa IS NULL')
|
||||||
cursor.execute('UPDATE newpower SET realareageom = ST_LineFromEncodedPolyline(areageom) WHERE areageom IS NOT NULL AND realareageom IS NULL')
|
cursor.execute('UPDATE newpower SET realareageom = ST_LineFromEncodedPolyline(areageom) WHERE areageom IS NOT NULL AND realareageom IS NULL')
|
||||||
cursor.execute("UPDATE newpower SET active = TRUE WHERE fetch_time > NOW() - INTERVAL '30 minutes'")
|
cursor.execute("UPDATE newpower SET active = TRUE WHERE last_change > NOW() - INTERVAL '30 minutes'")
|
||||||
cursor.execute("UPDATE newpower SET active = FALSE WHERE fetch_time < NOW() - INTERVAL '30 minutes'")
|
cursor.execute("UPDATE newpower SET active = FALSE WHERE last_change < NOW() - INTERVAL '30 minutes'")
|
||||||
cursor.execute("DELETE FROM newpower WHERE fetch_time < NOW() - INTERVAL '365 days'")
|
cursor.execute("DELETE FROM newpower WHERE fetch_time < NOW() - INTERVAL '365 days'")
|
||||||
logger.info("Post-processing complete.")
|
logger.info("Post-processing complete.")
|
||||||
|
|
||||||
|
|||||||
144
php/main.php
Normal file
144
php/main.php
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
<?php
|
||||||
|
/**
|
||||||
|
* Unified API Gateway
|
||||||
|
*
|
||||||
|
* This file serves as a single entry point for all API services.
|
||||||
|
* Each route is identified by the 'service' parameter.
|
||||||
|
*
|
||||||
|
* Available routes:
|
||||||
|
* - service=cams: Returns active camera information
|
||||||
|
* - service=camapi: Returns camera API endpoints with filtering
|
||||||
|
* - service=camlist: Returns camera list with active status
|
||||||
|
* - service=admin: Admin operations for cameras
|
||||||
|
* - service=camcircle: Returns camera coverage circles
|
||||||
|
* - service=db: Returns weather station data
|
||||||
|
* - service=fire: Returns fire information
|
||||||
|
* - service=individualcam: Returns individual camera images
|
||||||
|
* - service=lsr: Returns local storm reports
|
||||||
|
* - service=nws: Returns NWS personnel stats
|
||||||
|
* - service=powerapi: Returns power outage information
|
||||||
|
* - service=searchapi: Returns search results for power outages
|
||||||
|
* - service=ohgo: Returns Ohio traffic information
|
||||||
|
* - service=newpower: Returns power outage information from the new schema
|
||||||
|
* - service=newpowerapi: Returns power outage information from the new schema
|
||||||
|
* - service=newpowerapitest: Returns extended power outage testing data from the new schema
|
||||||
|
* - service=newsearchapi: Returns search results for power outages from the new schema
|
||||||
|
* - service=newstormdata: Returns storm data from the new schema
|
||||||
|
* - service=power: Returns power outage information
|
||||||
|
* - service=stormdata: Returns storm data
|
||||||
|
* - service=warntrack: Returns warning tracking data
|
||||||
|
* - service=ver: Returns version information
|
||||||
|
* - service=update_field: Updates table fields
|
||||||
|
* - service=mp4: Returns MP4 video information
|
||||||
|
* - service=camobs: Returns camera observations with radius and bbox filtering
|
||||||
|
* - service=single: Returns single camera information by camid
|
||||||
|
* - service=powerapitest: Returns extended power outage testing data
|
||||||
|
*
|
||||||
|
* Migration instructions:
|
||||||
|
* To migrate from the old scripts to the new unified API:
|
||||||
|
*
|
||||||
|
* Old endpoint: cam.php
|
||||||
|
* New endpoint: main.php?service=cams
|
||||||
|
*
|
||||||
|
* Old endpoint: camapi.php?cams&lat1=...&lon1=...&lat2=...&lon2=...
|
||||||
|
* New endpoint: main.php?service=camapi&cams&lat1=...&lon1=...&lat2=...&lon2=...
|
||||||
|
*
|
||||||
|
* Old endpoint: camlist.php
|
||||||
|
* New endpoint: main.php?service=camlist
|
||||||
|
*
|
||||||
|
* Old endpoint: admin.php?action=...
|
||||||
|
* New endpoint: main.php?service=admin&action=...
|
||||||
|
*
|
||||||
|
* Old endpoint: camcircle.php
|
||||||
|
* New endpoint: main.php?service=camcircle
|
||||||
|
*
|
||||||
|
* Old endpoint: camobs.php
|
||||||
|
* New endpoint: main.php?service=camobs
|
||||||
|
*
|
||||||
|
* Old endpoint: single.php?camid=...
|
||||||
|
* New endpoint: main.php?service=single&camid=...
|
||||||
|
*
|
||||||
|
* Old endpoint: powerapitest.php
|
||||||
|
* New endpoint: main.php?service=powerapitest
|
||||||
|
*
|
||||||
|
* Old endpoint: db.php
|
||||||
|
* New endpoint: main.php?service=db
|
||||||
|
*
|
||||||
|
* Old endpoint: db.php?outside
|
||||||
|
* New endpoint: main.php?service=db&outside
|
||||||
|
*
|
||||||
|
* Old endpoint: fire.php
|
||||||
|
* New endpoint: main.php?service=fire
|
||||||
|
*
|
||||||
|
* Old endpoint: individualcam.php?camid=...
|
||||||
|
* New endpoint: main.php?service=individualcam&camid=...
|
||||||
|
*
|
||||||
|
* Old endpoint: lsr.php
|
||||||
|
* New endpoint: main.php?service=lsr
|
||||||
|
*
|
||||||
|
* Old endpoint: lsr.php?ohgo
|
||||||
|
* New endpoint: main.php?service=lsr&ohgo
|
||||||
|
*
|
||||||
|
* Old endpoint: nws.php
|
||||||
|
* New endpoint: main.php?service=nws
|
||||||
|
*
|
||||||
|
* Old endpoint: nws.php?officestats
|
||||||
|
* New endpoint: main.php?service=nws&officestats
|
||||||
|
*
|
||||||
|
* Old endpoint: powerapi.php
|
||||||
|
* New endpoint: main.php?service=powerapi
|
||||||
|
*
|
||||||
|
* Old endpoint: powerapi.php?states
|
||||||
|
* New endpoint: main.php?service=powerapi&states
|
||||||
|
*
|
||||||
|
* Old endpoint: searchapi.php
|
||||||
|
* New endpoint: main.php?service=searchapi
|
||||||
|
*
|
||||||
|
* Old endpoint: ohgo.php
|
||||||
|
* New endpoint: main.php?service=ohgo
|
||||||
|
*
|
||||||
|
* Old endpoint: power.php
|
||||||
|
* New endpoint: main.php?service=power
|
||||||
|
*
|
||||||
|
* Old endpoint: stormdata.php
|
||||||
|
* New endpoint: main.php?service=stormdata
|
||||||
|
*
|
||||||
|
* Old endpoint: warntrack.php
|
||||||
|
* New endpoint: main.php?service=warntrack
|
||||||
|
*
|
||||||
|
* Old endpoint: ver.php
|
||||||
|
* New endpoint: main.php?service=ver
|
||||||
|
*
|
||||||
|
* Old endpoint: update_field.php?table=...&field=...&value=...&where=...
|
||||||
|
* New endpoint: main.php?service=update_field&table=...&field=...&value=...&where=...
|
||||||
|
*
|
||||||
|
* Old endpoint: mp4.php?camid=...
|
||||||
|
* New endpoint: main.php?service=mp4&camid=...
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Get the service parameter to determine which function to execute
|
||||||
|
$service = $_GET['service'] ?? 'default';
|
||||||
|
|
||||||
|
// Route to the appropriate service file
|
||||||
|
$serviceFile = __DIR__ . '/php/' . $service . '.php';
|
||||||
|
|
||||||
|
if (file_exists($serviceFile)) {
|
||||||
|
require_once $serviceFile;
|
||||||
|
} else {
|
||||||
|
// Default behavior - show available services if no valid service provided
|
||||||
|
header('Content-Type: application/json');
|
||||||
|
http_response_code(400);
|
||||||
|
echo json_encode([
|
||||||
|
'error' => 'Invalid service parameter. Please provide a valid service.',
|
||||||
|
'available_services' => [
|
||||||
|
'cams', 'camapi', 'camlist', 'admin', 'camcircle',
|
||||||
|
'db', 'fire', 'individualcam', 'lsr', 'nws',
|
||||||
|
'powerapi', 'searchapi', 'ohgo', 'power', 'newpower',
|
||||||
|
'newpowerapi', 'newpowerapitest', 'newsearchapi', 'newstormdata',
|
||||||
|
'stormdata', 'warntrack', 'ver', 'update_field', 'mp4',
|
||||||
|
'camobs', 'single', 'powerapitest'
|
||||||
|
],
|
||||||
|
'documentation' => 'See main.php file for detailed documentation on each service.'
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
?>
|
||||||
25
php/newpower.php
Normal file
25
php/newpower.php
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
<?php
|
||||||
|
require_once __DIR__ . '/common.php';
|
||||||
|
|
||||||
|
header('Content-Type: application/json');
|
||||||
|
$dbconn = get_db_connection();
|
||||||
|
|
||||||
|
try {
|
||||||
|
$query = "SELECT lat, lon, outagen FROM newpower WHERE active = true AND cwa = 'RLX'";
|
||||||
|
$result = pg_query($dbconn, $query);
|
||||||
|
|
||||||
|
if ($result === false) {
|
||||||
|
throw new Exception('Query failed: ' . pg_last_error());
|
||||||
|
}
|
||||||
|
|
||||||
|
$data = pg_fetch_all($result) ?: [];
|
||||||
|
echo json_encode($data);
|
||||||
|
|
||||||
|
pg_free_result($result);
|
||||||
|
} catch (Exception $e) {
|
||||||
|
http_response_code(500);
|
||||||
|
echo json_encode(['error' => $e->getMessage()]);
|
||||||
|
} finally {
|
||||||
|
pg_close($dbconn);
|
||||||
|
}
|
||||||
|
?>
|
||||||
98
php/newpowerapi.php
Normal file
98
php/newpowerapi.php
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
<?php
|
||||||
|
require_once __DIR__ . '/common.php';
|
||||||
|
|
||||||
|
header('Content-Type: application/json');
|
||||||
|
$dbconn = get_db_connection();
|
||||||
|
|
||||||
|
// Default endpoint: Get current point outages
|
||||||
|
if (empty($_GET)) {
|
||||||
|
try {
|
||||||
|
$query = "
|
||||||
|
SELECT json_build_object(
|
||||||
|
'type', 'FeatureCollection',
|
||||||
|
'features', json_agg(
|
||||||
|
json_build_object(
|
||||||
|
'type', 'Feature',
|
||||||
|
'geometry', ST_AsGeoJSON(geom)::json,
|
||||||
|
'properties', json_build_object(
|
||||||
|
'time', start_time,
|
||||||
|
'county', county,
|
||||||
|
'state', state,
|
||||||
|
'outage', outagen,
|
||||||
|
'lastchange', last_change,
|
||||||
|
'cause', cause,
|
||||||
|
'area_geometry', ST_AsGeoJSON(COALESCE(realareageom, geom))::json
|
||||||
|
)
|
||||||
|
)
|
||||||
|
ORDER BY start_time ASC
|
||||||
|
)
|
||||||
|
) as geojson
|
||||||
|
FROM newpower
|
||||||
|
WHERE cwa = $1 AND active = true
|
||||||
|
";
|
||||||
|
|
||||||
|
$result = pg_query_params($dbconn, $query, array('RLX'));
|
||||||
|
if ($result === false) {
|
||||||
|
throw new Exception('Query failed: ' . pg_last_error());
|
||||||
|
}
|
||||||
|
|
||||||
|
$resultArray = pg_fetch_assoc($result);
|
||||||
|
|
||||||
|
if ($resultArray && $resultArray['geojson']) {
|
||||||
|
echo $resultArray['geojson'];
|
||||||
|
} else {
|
||||||
|
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||||
|
}
|
||||||
|
|
||||||
|
pg_free_result($result);
|
||||||
|
} catch (Exception $e) {
|
||||||
|
http_response_code(500);
|
||||||
|
die(json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current county outages
|
||||||
|
if (isset($_GET['county'])) {
|
||||||
|
try {
|
||||||
|
$query = "
|
||||||
|
WITH latest_fetch AS (
|
||||||
|
SELECT MAX(fetch_time) as max_fetch_time FROM newcountyoutages
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
n.county,
|
||||||
|
n.state,
|
||||||
|
n.outages as outage,
|
||||||
|
n.fetch_time as time,
|
||||||
|
n.served,
|
||||||
|
CASE
|
||||||
|
WHEN n.served > 0 THEN ROUND(CAST((n.outages::FLOAT / n.served) * 100 AS NUMERIC), 2)
|
||||||
|
ELSE 0
|
||||||
|
END as perout
|
||||||
|
FROM newcountyoutages n, latest_fetch
|
||||||
|
WHERE n.fetch_time = latest_fetch.max_fetch_time
|
||||||
|
AND n.cwa = $1
|
||||||
|
";
|
||||||
|
|
||||||
|
$result = pg_query_params($dbconn, $query, ['RLX']);
|
||||||
|
if ($result === false) {
|
||||||
|
throw new Exception('Query failed: ' . pg_last_error());
|
||||||
|
}
|
||||||
|
|
||||||
|
$results = pg_fetch_all($result) ?: [];
|
||||||
|
echo json_encode($results);
|
||||||
|
|
||||||
|
pg_free_result($result);
|
||||||
|
} catch (Exception $e) {
|
||||||
|
http_response_code(500);
|
||||||
|
echo json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: Other endpoints from the original powerapi.php can be migrated here as needed,
|
||||||
|
// such as 'states', 'max', 'countyarchive', 'archivepoint', 'svr', 'svrpolys',
|
||||||
|
// 'powerids', 'poweridsgeojson', and 'polygongeojson'.
|
||||||
|
// The queries would need to be updated to use the 'newpower' and 'newcountyoutages' tables
|
||||||
|
// and their corresponding columns (e.g., start_time, geom, fetch_time).
|
||||||
|
|
||||||
|
pg_close($dbconn);
|
||||||
|
?>
|
||||||
47
php/newpowerapitest.php
Normal file
47
php/newpowerapitest.php
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
<?php
|
||||||
|
require_once __DIR__ . '/common.php';
|
||||||
|
|
||||||
|
header('Content-Type: application/json');
|
||||||
|
$dbconn = get_db_connection();
|
||||||
|
|
||||||
|
if (isset($_GET['county'])) {
|
||||||
|
try {
|
||||||
|
$cwas = ['RLX','JKL','ILN','PBZ','MRX','LWX','RNK'];
|
||||||
|
$placeholders = implode(',', array_map(function($i) { return '$' . $i; }, range(1, count($cwas))));
|
||||||
|
|
||||||
|
$query = "
|
||||||
|
WITH latest_fetch AS (
|
||||||
|
SELECT MAX(fetch_time) as max_fetch_time FROM newcountyoutages
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
n.county,
|
||||||
|
n.state,
|
||||||
|
n.outages as outage,
|
||||||
|
n.fetch_time as time,
|
||||||
|
n.served,
|
||||||
|
CASE
|
||||||
|
WHEN n.served > 0 THEN ROUND(CAST((n.outages::FLOAT / n.served) * 100 AS NUMERIC), 2)
|
||||||
|
ELSE 0
|
||||||
|
END as perout
|
||||||
|
FROM newcountyoutages n, latest_fetch
|
||||||
|
WHERE n.fetch_time = latest_fetch.max_fetch_time
|
||||||
|
AND n.cwa IN ($placeholders)
|
||||||
|
";
|
||||||
|
|
||||||
|
$result = pg_query_params($dbconn, $query, $cwas);
|
||||||
|
if ($result === false) {
|
||||||
|
throw new Exception('Query failed: ' . pg_last_error());
|
||||||
|
}
|
||||||
|
|
||||||
|
$results = pg_fetch_all($result) ?: [];
|
||||||
|
echo json_encode($results);
|
||||||
|
|
||||||
|
pg_free_result($result);
|
||||||
|
} catch (Exception $e) {
|
||||||
|
http_response_code(500);
|
||||||
|
echo json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pg_close($dbconn);
|
||||||
|
?>
|
||||||
95
php/newsearchapi.php
Normal file
95
php/newsearchapi.php
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
<?php
|
||||||
|
require_once __DIR__ . '/common.php';
|
||||||
|
|
||||||
|
header('Content-Type: application/json');
|
||||||
|
$dbconn = get_db_connection();
|
||||||
|
|
||||||
|
// Default endpoint: Get current point outages from newpower table
|
||||||
|
if (empty(array_diff_key($_GET, array('service' => '')))) {
|
||||||
|
try {
|
||||||
|
$query = "
|
||||||
|
SELECT json_build_object(
|
||||||
|
'type', 'FeatureCollection',
|
||||||
|
'features', json_agg(
|
||||||
|
json_build_object(
|
||||||
|
'type', 'Feature',
|
||||||
|
'geometry', ST_AsGeoJSON(geom)::json,
|
||||||
|
'properties', json_build_object(
|
||||||
|
'time', start_time,
|
||||||
|
'county', county,
|
||||||
|
'state', state,
|
||||||
|
'outage', outagen,
|
||||||
|
'lastchange', last_change,
|
||||||
|
'cause', cause
|
||||||
|
)
|
||||||
|
)
|
||||||
|
ORDER BY start_time ASC
|
||||||
|
)
|
||||||
|
) as geojson
|
||||||
|
FROM newpower
|
||||||
|
WHERE cwa = $1 AND active = true
|
||||||
|
";
|
||||||
|
|
||||||
|
$result = pg_query_params($dbconn, $query, array('RLX'));
|
||||||
|
if ($result === false) {
|
||||||
|
throw new Exception('Query failed: ' . pg_last_error());
|
||||||
|
}
|
||||||
|
|
||||||
|
$resultArray = pg_fetch_assoc($result);
|
||||||
|
|
||||||
|
if ($resultArray && $resultArray['geojson']) {
|
||||||
|
echo $resultArray['geojson'];
|
||||||
|
} else {
|
||||||
|
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||||
|
}
|
||||||
|
|
||||||
|
pg_free_result($result);
|
||||||
|
} catch (Exception $e) {
|
||||||
|
http_response_code(500);
|
||||||
|
die(json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current county outages from newcountyoutages table
|
||||||
|
if (isset($_GET['county'])) {
|
||||||
|
try {
|
||||||
|
$query = "
|
||||||
|
WITH latest_fetch AS (
|
||||||
|
SELECT MAX(fetch_time) as max_fetch_time FROM newcountyoutages
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
n.fetch_time as time,
|
||||||
|
n.county,
|
||||||
|
n.state,
|
||||||
|
n.outages as outage,
|
||||||
|
n.served
|
||||||
|
FROM newcountyoutages n, latest_fetch
|
||||||
|
WHERE n.fetch_time = latest_fetch.max_fetch_time
|
||||||
|
AND n.cwa = $1
|
||||||
|
ORDER BY n.county, n.state
|
||||||
|
";
|
||||||
|
|
||||||
|
$result = pg_query_params($dbconn, $query, ['RLX']);
|
||||||
|
if ($result === false) {
|
||||||
|
throw new Exception('Query failed: ' . pg_last_error());
|
||||||
|
}
|
||||||
|
|
||||||
|
$results = pg_fetch_all($result) ?: [];
|
||||||
|
echo json_encode($results);
|
||||||
|
|
||||||
|
pg_free_result($result);
|
||||||
|
} catch (Exception $e) {
|
||||||
|
http_response_code(500);
|
||||||
|
echo json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: The 'countyarchive', 'archivepoint', and 'svr' endpoints from the original
|
||||||
|
// searchapi.php can be migrated here. The queries would need to be updated to use
|
||||||
|
// the 'newpower' and 'newcountyoutages' tables and their corresponding columns
|
||||||
|
// (e.g., start_time, geom, fetch_time).
|
||||||
|
|
||||||
|
pg_close($dbconn);
|
||||||
|
?>
|
||||||
|
|
||||||
|
|
||||||
141
php/newstormdata.php
Normal file
141
php/newstormdata.php
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
<?php
|
||||||
|
require_once __DIR__ . '/common.php';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sends a JSON error response and terminates the script.
|
||||||
|
* @param int $code HTTP status code.
|
||||||
|
* @param string $message User-facing error message.
|
||||||
|
* @param string|null $log_message Optional message for the server log.
|
||||||
|
*/
|
||||||
|
function send_error($code, $message, $log_message = null) {
|
||||||
|
http_response_code($code);
|
||||||
|
header('Content-Type: application/json; charset=utf-8');
|
||||||
|
if ($log_message) {
|
||||||
|
error_log($log_message);
|
||||||
|
}
|
||||||
|
echo json_encode(['error' => $message]);
|
||||||
|
exit;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles power outage requests for the new schema.
|
||||||
|
* @param resource $dbconn The database connection.
|
||||||
|
* @param array $input_data The decoded JSON input data.
|
||||||
|
*/
|
||||||
|
function handle_new_power_request($dbconn, $input_data) {
|
||||||
|
$poly = $input_data['poly'] ?? null;
|
||||||
|
$start = $input_data['start'] ?? null;
|
||||||
|
$end = $input_data['end'] ?? null;
|
||||||
|
|
||||||
|
if (!$poly || !$start || !$end) {
|
||||||
|
send_error(400, 'Missing required fields: poly, start, and end are required for power requests.');
|
||||||
|
}
|
||||||
|
|
||||||
|
$query = "
|
||||||
|
SELECT
|
||||||
|
SUM(p.outagen) as total_outages,
|
||||||
|
COUNT(p.id) as outage_events,
|
||||||
|
SUM(p.peakoutage) as peak_outages
|
||||||
|
FROM newpower p
|
||||||
|
WHERE ST_Within(p.geom, ST_GeomFromText($1, 4326))
|
||||||
|
AND p.start_time >= $2
|
||||||
|
AND p.start_time <= $3
|
||||||
|
";
|
||||||
|
|
||||||
|
try {
|
||||||
|
$result = pg_query_params($dbconn, $query, [$poly, $start, $end]);
|
||||||
|
if ($result === false) {
|
||||||
|
throw new Exception('Database query failed: ' . pg_last_error($dbconn));
|
||||||
|
}
|
||||||
|
$data = pg_fetch_assoc($result);
|
||||||
|
pg_free_result($result);
|
||||||
|
|
||||||
|
header('Content-Type: application/json');
|
||||||
|
echo json_encode($data ?: new stdClass()); // Return empty JSON object if no results
|
||||||
|
|
||||||
|
} catch (Exception $e) {
|
||||||
|
send_error(500, 'An error occurred while processing the power request.', $e->getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if ($_SERVER['REQUEST_METHOD'] === 'POST') {
|
||||||
|
$input_data = null;
|
||||||
|
$request_type = null;
|
||||||
|
$contentType = trim(strtolower($_SERVER['HTTP_CONTENT_TYPE'] ?? $_SERVER['CONTENT_TYPE'] ?? ''));
|
||||||
|
|
||||||
|
if (strpos($contentType, 'application/json') === 0) {
|
||||||
|
$raw_post_data = file_get_contents('php://input');
|
||||||
|
|
||||||
|
if ($raw_post_data === false || $raw_post_data === '') {
|
||||||
|
send_error(400, 'Received empty request body or could not read input.', "Error: Could not read php://input or it was empty.");
|
||||||
|
}
|
||||||
|
|
||||||
|
$input_data = json_decode($raw_post_data, true);
|
||||||
|
|
||||||
|
if (json_last_error() !== JSON_ERROR_NONE) {
|
||||||
|
send_error(400, 'Invalid JSON payload received.', 'JSON Decode Error: ' . json_last_error_msg() . " | Raw data snippet: " . substr($raw_post_data, 0, 100));
|
||||||
|
} elseif (!is_array($input_data)) {
|
||||||
|
send_error(400, 'Invalid JSON payload: Expected a JSON object.', "JSON Decode Warning: Result is not an array. Data: " . print_r($input_data, true));
|
||||||
|
} else {
|
||||||
|
$request_type = $input_data['request_type'] ?? null;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
send_error(415, 'Unsupported Media Type. This endpoint requires application/json.', "Unsupported Media Type Received: " . $contentType);
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($request_type === null) {
|
||||||
|
if (is_array($input_data) && !isset($input_data['request_type'])) {
|
||||||
|
send_error(400, 'Missing "request_type" field within the request payload.');
|
||||||
|
} else {
|
||||||
|
send_error(400, 'Missing required parameter: request_type (or processing error).');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$dbconn = get_db_connection();
|
||||||
|
|
||||||
|
switch ($request_type) {
|
||||||
|
// Retaining legacy endpoints from stormdata.php but pointing to new handlers if needed
|
||||||
|
// For now, only implementing the 'power' endpoint for the new schema
|
||||||
|
case 'power':
|
||||||
|
handle_new_power_request($dbconn, $input_data);
|
||||||
|
break;
|
||||||
|
// The 'powernopoly' case from the original file can be implemented here if needed.
|
||||||
|
// It would be similar to handle_new_power_request but without the ST_Within clause.
|
||||||
|
|
||||||
|
/*
|
||||||
|
// Legacy endpoints can be added here if they need to be migrated.
|
||||||
|
case 'ohgo':
|
||||||
|
// handle_ohgo_request($dbconn, $input_data);
|
||||||
|
send_error(501, 'The "ohgo" request type is not yet implemented for newstormdata.');
|
||||||
|
break;
|
||||||
|
case 'ohgonopoly':
|
||||||
|
// handle_ohgo_request_no_poly($dbconn, $input_data);
|
||||||
|
send_error(501, 'The "ohgonopoly" request type is not yet implemented for newstormdata.');
|
||||||
|
break;
|
||||||
|
case 'wupoly':
|
||||||
|
// handle_wu_request_poly($dbconn, $input_data);
|
||||||
|
send_error(501, 'The "wupoly" request type is not yet implemented for newstormdata.');
|
||||||
|
break;
|
||||||
|
case 'campoly':
|
||||||
|
// handle_cam_request($dbconn, $input_data);
|
||||||
|
send_error(501, 'The "campoly" request type is not yet implemented for newstormdata.');
|
||||||
|
break;
|
||||||
|
*/
|
||||||
|
default:
|
||||||
|
send_error(400, 'Invalid request_type specified: ' . htmlspecialchars($request_type));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
pg_close($dbconn);
|
||||||
|
|
||||||
|
} else {
|
||||||
|
http_response_code(405);
|
||||||
|
header('Allow: POST');
|
||||||
|
header('Content-Type: application/json; charset=utf-8');
|
||||||
|
echo json_encode(['error' => 'Invalid request method. Only POST is allowed.']);
|
||||||
|
exit;
|
||||||
|
}
|
||||||
|
?>
|
||||||
|
|
||||||
|
|
||||||
@@ -4,7 +4,20 @@
|
|||||||
"type": "kubra",
|
"type": "kubra",
|
||||||
"meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false",
|
"meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false",
|
||||||
"layer": "cluster-2",
|
"layer": "cluster-2",
|
||||||
"quadkeys": [ "0320001", "0320003", "0320010", "0320011", "0320012", "0320013", "0320021", "0320030", "0320031", "0320100", "0320102", "0320120" ],
|
"quadkeys": [
|
||||||
|
"0320001",
|
||||||
|
"0320003",
|
||||||
|
"0320010",
|
||||||
|
"0320011",
|
||||||
|
"0320012",
|
||||||
|
"0320013",
|
||||||
|
"0320021",
|
||||||
|
"0320030",
|
||||||
|
"0320031",
|
||||||
|
"0320100",
|
||||||
|
"0320102",
|
||||||
|
"0320120"
|
||||||
|
],
|
||||||
"county_type": "kubra_county",
|
"county_type": "kubra_county",
|
||||||
"county_meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false",
|
"county_meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false",
|
||||||
"county_report_suffix": "/public/reports/7929429f-635d-4761-b6c7-78f646cef3c2_report.json"
|
"county_report_suffix": "/public/reports/7929429f-635d-4761-b6c7-78f646cef3c2_report.json"
|
||||||
@@ -14,7 +27,20 @@
|
|||||||
"type": "kubra",
|
"type": "kubra",
|
||||||
"meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false",
|
"meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false",
|
||||||
"layer": "cluster-1",
|
"layer": "cluster-1",
|
||||||
"quadkeys": [ "0320013", "0320010", "0320011", "0320012", "0320003", "0320001", "0302322", "0302233", "0302232", "0302223", "0320102", "0320100" ],
|
"quadkeys": [
|
||||||
|
"0320013",
|
||||||
|
"0320010",
|
||||||
|
"0320011",
|
||||||
|
"0320012",
|
||||||
|
"0320003",
|
||||||
|
"0320001",
|
||||||
|
"0302322",
|
||||||
|
"0302233",
|
||||||
|
"0302232",
|
||||||
|
"0302223",
|
||||||
|
"0320102",
|
||||||
|
"0320100"
|
||||||
|
],
|
||||||
"county_type": "kubra_county",
|
"county_type": "kubra_county",
|
||||||
"county_meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false",
|
"county_meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false",
|
||||||
"county_report_suffix": "/public/reports/1bc6bd19-2315-4548-980a-6df73b93b355_report.json"
|
"county_report_suffix": "/public/reports/1bc6bd19-2315-4548-980a-6df73b93b355_report.json"
|
||||||
@@ -24,7 +50,17 @@
|
|||||||
"type": "kubra",
|
"type": "kubra",
|
||||||
"meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false",
|
"meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false",
|
||||||
"layer": "cluster-2",
|
"layer": "cluster-2",
|
||||||
"quadkeys": [ "0320031", "0320030", "0320021", "0320013", "0320012", "0320011", "0320010", "0320003", "0320001" ],
|
"quadkeys": [
|
||||||
|
"0320031",
|
||||||
|
"0320030",
|
||||||
|
"0320021",
|
||||||
|
"0320013",
|
||||||
|
"0320012",
|
||||||
|
"0320011",
|
||||||
|
"0320010",
|
||||||
|
"0320003",
|
||||||
|
"0320001"
|
||||||
|
],
|
||||||
"county_type": "kubra_county",
|
"county_type": "kubra_county",
|
||||||
"county_meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false",
|
"county_meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false",
|
||||||
"county_report_suffix": "/public/reports/03da18cc-51e0-4662-98e5-e9d49dcf08c8_report.json",
|
"county_report_suffix": "/public/reports/03da18cc-51e0-4662-98e5-e9d49dcf08c8_report.json",
|
||||||
@@ -35,7 +71,7 @@
|
|||||||
"type": "kubra",
|
"type": "kubra",
|
||||||
"meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false",
|
"meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false",
|
||||||
"layer": "cluster-4",
|
"layer": "cluster-4",
|
||||||
"quadkeys": [ "030223", "030232", "032001", "032003", "032010", "032012" ],
|
"quadkeys": ["032001", "032010", "032023", "032030", "032031", "032032", "032033", "032102", "032120", "032122", "032210", "032211", "032300"],
|
||||||
"county_type": "kubra_county",
|
"county_type": "kubra_county",
|
||||||
"county_meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false",
|
"county_meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false",
|
||||||
"county_report_suffix": "/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json",
|
"county_report_suffix": "/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json",
|
||||||
@@ -43,7 +79,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "SouthCentral",
|
"name": "SouthCentral",
|
||||||
"type": "simple_json",
|
"type": "simple_json",
|
||||||
"url": "https://outage.southcentralpower.com/data/outages.json",
|
"url": "https://outage.southcentralpower.com/data/outages.json",
|
||||||
"county_type": "simple_county_json",
|
"county_type": "simple_county_json",
|
||||||
"county_url": "https://outage.southcentralpower.com/data/boundaries.json",
|
"county_url": "https://outage.southcentralpower.com/data/boundaries.json",
|
||||||
@@ -51,15 +87,15 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "Grayson",
|
"name": "Grayson",
|
||||||
"type": "simple_json",
|
"type": "simple_json",
|
||||||
"url": "https://outages.graysonrecc.com/data/outages.json",
|
"url": "https://outages.graysonrecc.com/data/outages.json",
|
||||||
"county_type": "simple_county_json",
|
"county_type": "simple_county_json",
|
||||||
"county_url": "https://outages.graysonrecc.com/data/boundaries.json",
|
"county_url": "https://outages.graysonrecc.com/data/boundaries.json",
|
||||||
"state_filter": "KY"
|
"state_filter": "KY"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "Sandy",
|
"name": "Sandy",
|
||||||
"type": "simple_json",
|
"type": "simple_json",
|
||||||
"url": "https://outagemap.bigsandyrecc.com/data/outages.json",
|
"url": "https://outagemap.bigsandyrecc.com/data/outages.json",
|
||||||
"county_type": "simple_county_json",
|
"county_type": "simple_county_json",
|
||||||
"county_url": "https://outagemap.bigsandyrecc.com/data/boundaries.json",
|
"county_url": "https://outagemap.bigsandyrecc.com/data/boundaries.json",
|
||||||
@@ -100,14 +136,14 @@
|
|||||||
},
|
},
|
||||||
"body": "7|0|4|https://weci.ebill.coop/woViewer/MapWiseWeb/|612278413EC26C34D54A3907AA0CDFD8|coop.nisc.oms.webmap.services.RpcCombinedOutageDetailsService|getCombinedOutageDetails|1|2|3|4|0|",
|
"body": "7|0|4|https://weci.ebill.coop/woViewer/MapWiseWeb/|612278413EC26C34D54A3907AA0CDFD8|coop.nisc.oms.webmap.services.RpcCombinedOutageDetailsService|getCombinedOutageDetails|1|2|3|4|0|",
|
||||||
"county_type": "gwt_rpc_county",
|
"county_type": "gwt_rpc_county",
|
||||||
"last_auto_update": "2025-12-07T03:56:27.722877+00:00",
|
"last_auto_update": "2025-12-10T06:35:27.651117+00:00",
|
||||||
"cookies": [
|
"cookies": [
|
||||||
{
|
{
|
||||||
"name": "__utma",
|
"name": "__utma",
|
||||||
"value": "105963909.1267880890.1765079787.1765079787.1765079787.1",
|
"value": "105963909.968371538.1765348516.1765348516.1765348516.1",
|
||||||
"domain": ".weci.ebill.coop",
|
"domain": ".weci.ebill.coop",
|
||||||
"path": "/",
|
"path": "/",
|
||||||
"expires": 1799639786.874286,
|
"expires": 1799908516.478746,
|
||||||
"httpOnly": false,
|
"httpOnly": false,
|
||||||
"secure": false,
|
"secure": false,
|
||||||
"sameSite": "Lax"
|
"sameSite": "Lax"
|
||||||
@@ -124,10 +160,10 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "__utmz",
|
"name": "__utmz",
|
||||||
"value": "105963909.1765079787.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none)",
|
"value": "105963909.1765348516.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none)",
|
||||||
"domain": ".weci.ebill.coop",
|
"domain": ".weci.ebill.coop",
|
||||||
"path": "/",
|
"path": "/",
|
||||||
"expires": 1780847786,
|
"expires": 1781116516,
|
||||||
"httpOnly": false,
|
"httpOnly": false,
|
||||||
"secure": false,
|
"secure": false,
|
||||||
"sameSite": "Lax"
|
"sameSite": "Lax"
|
||||||
@@ -137,7 +173,7 @@
|
|||||||
"value": "1",
|
"value": "1",
|
||||||
"domain": ".weci.ebill.coop",
|
"domain": ".weci.ebill.coop",
|
||||||
"path": "/",
|
"path": "/",
|
||||||
"expires": 1765080386,
|
"expires": 1765349116,
|
||||||
"httpOnly": false,
|
"httpOnly": false,
|
||||||
"secure": false,
|
"secure": false,
|
||||||
"sameSite": "Lax"
|
"sameSite": "Lax"
|
||||||
@@ -147,37 +183,37 @@
|
|||||||
"value": "1",
|
"value": "1",
|
||||||
"domain": ".weci.ebill.coop",
|
"domain": ".weci.ebill.coop",
|
||||||
"path": "/",
|
"path": "/",
|
||||||
"expires": 1765080386,
|
"expires": 1765349116,
|
||||||
"httpOnly": false,
|
"httpOnly": false,
|
||||||
"secure": false,
|
"secure": false,
|
||||||
"sameSite": "Lax"
|
"sameSite": "Lax"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "__utmb",
|
"name": "__utmb",
|
||||||
"value": "105963909.2.10.1765079787",
|
"value": "105963909.2.10.1765348516",
|
||||||
"domain": ".weci.ebill.coop",
|
"domain": ".weci.ebill.coop",
|
||||||
"path": "/",
|
"path": "/",
|
||||||
"expires": 1765081586,
|
"expires": 1765350316,
|
||||||
"httpOnly": false,
|
"httpOnly": false,
|
||||||
"secure": false,
|
"secure": false,
|
||||||
"sameSite": "Lax"
|
"sameSite": "Lax"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "__utma",
|
"name": "__utma",
|
||||||
"value": "105963909.1267880890.1765079787.1765079787.1765079787.1",
|
"value": "105963909.968371538.1765348516.1765348516.1765348516.1",
|
||||||
"domain": "weci.ebill.coop",
|
"domain": "weci.ebill.coop",
|
||||||
"path": "/",
|
"path": "/",
|
||||||
"expires": 1799639786.87497,
|
"expires": 1799908516.486623,
|
||||||
"httpOnly": false,
|
"httpOnly": false,
|
||||||
"secure": false,
|
"secure": false,
|
||||||
"sameSite": "Lax"
|
"sameSite": "Lax"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "__utmb",
|
"name": "__utmb",
|
||||||
"value": "105963909.3.9.1765079787",
|
"value": "105963909.3.9.1765348516",
|
||||||
"domain": "weci.ebill.coop",
|
"domain": "weci.ebill.coop",
|
||||||
"path": "/",
|
"path": "/",
|
||||||
"expires": 1765081586,
|
"expires": 1765350316,
|
||||||
"httpOnly": false,
|
"httpOnly": false,
|
||||||
"secure": false,
|
"secure": false,
|
||||||
"sameSite": "Lax"
|
"sameSite": "Lax"
|
||||||
@@ -194,10 +230,10 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "__utmz",
|
"name": "__utmz",
|
||||||
"value": "105963909.1765079787.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none)",
|
"value": "105963909.1765348516.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none)",
|
||||||
"domain": "weci.ebill.coop",
|
"domain": "weci.ebill.coop",
|
||||||
"path": "/",
|
"path": "/",
|
||||||
"expires": 1780847786,
|
"expires": 1781116516,
|
||||||
"httpOnly": false,
|
"httpOnly": false,
|
||||||
"secure": false,
|
"secure": false,
|
||||||
"sameSite": "Lax"
|
"sameSite": "Lax"
|
||||||
|
|||||||
Binary file not shown.
@@ -144,15 +144,25 @@ class GwtRpcCountyProvider(GwtRpcBaseProvider, BaseCountyProvider):
|
|||||||
if stream[i] == region_type_id:
|
if stream[i] == region_type_id:
|
||||||
try:
|
try:
|
||||||
p = i + 1
|
p = i + 1
|
||||||
served = stream[p] if stream[p+1] == integer_type_id else 0
|
served = 0
|
||||||
p += 2 if served > 0 else 1
|
out = 0
|
||||||
out = stream[p] if stream[p+1] == integer_type_id else 0
|
|
||||||
p += 2 if out > 0 else 1
|
# Check for served customers. Only advance pointer if found.
|
||||||
|
if p + 1 < len(stream) and stream[p+1] == integer_type_id:
|
||||||
|
served = stream[p]
|
||||||
|
p += 2
|
||||||
|
|
||||||
|
# Check for customers out. Only advance pointer if found.
|
||||||
|
if p + 1 < len(stream) and stream[p+1] == integer_type_id:
|
||||||
|
out = stream[p]
|
||||||
|
p += 2
|
||||||
|
|
||||||
name_idx, cat_idx = stream[p], stream[p+1]
|
name_idx, cat_idx = stream[p], stream[p+1]
|
||||||
|
|
||||||
if cat_idx == county_type_id:
|
if cat_idx == county_type_id:
|
||||||
name = string_table[name_idx - 1] if 0 < name_idx <= len(string_table) else "Unknown"
|
name = string_table[name_idx - 1] if 0 < name_idx <= len(string_table) else "Unknown"
|
||||||
results.append({'county': name, 'state': self.state_filter, 'company': self.name, 'outages': out, 'served': served})
|
results.append({'county': name, 'state': self.state_filter, 'company': self.name, 'outages': out, 'served': served})
|
||||||
|
i = p + 1 # Advance main loop counter past this processed region
|
||||||
except IndexError: pass
|
except IndexError: pass
|
||||||
i += 1
|
i += 1
|
||||||
return results
|
return results
|
||||||
|
|||||||
Reference in New Issue
Block a user