This commit is contained in:
2025-12-07 03:25:14 +00:00
parent a6a69f47f8
commit 32638df3a9
6 changed files with 603 additions and 150 deletions

Binary file not shown.

87
get_rpc_config_auto.py Normal file
View File

@@ -0,0 +1,87 @@
import json
import time
from playwright.sync_api import sync_playwright
def analyze_gwt_response(response_text):
"""Finds potential coordinates to validate response data."""
candidates = []
try:
if response_text.startswith("//OK"):
response_text = response_text[4:]
data = json.loads(response_text)
if isinstance(data, list):
for i in range(len(data) - 2):
val1 = data[i]
val2 = data[i+2]
if (isinstance(val1, (int, float)) and isinstance(val2, (int, float))):
if abs(val1) > 100000 and abs(val2) > 100000:
candidates.append((val1, val2))
if len(candidates) > 5: break
except:
pass
return candidates
def get_fresh_config(map_url):
"""
Launches headless browser to scrape headers, body, AND cookies.
"""
print(f"--- Auto-Repair: Launching Browser for {map_url} ---")
captured_request = None
captured_cookies = []
with sync_playwright() as p:
browser = p.chromium.launch(headless=True)
# Create a persistent context to ensure cookies are tracked
context = browser.new_context()
page = context.new_page()
def handle_request(request):
nonlocal captured_request
if ".rpc" in request.url and request.method == "POST":
try:
if "getCombinedOutageDetails" in request.post_data or "getOutages" in request.post_data:
captured_request = {
'url': request.url,
'headers': request.headers,
'body': request.post_data
}
except:
pass
page.on("request", handle_request)
try:
page.goto(map_url, wait_until="networkidle", timeout=45000)
time.sleep(5)
# Capture cookies from the browser context
captured_cookies = context.cookies()
except Exception as e:
print(f"Auto-Repair Browser Error: {e}")
finally:
browser.close()
if captured_request:
req_headers = captured_request['headers']
# Clean headers (keep specific GWT ones, discard dynamic browser ones that requests handles)
clean_headers = {
'content-type': req_headers.get('content-type', 'text/x-gwt-rpc; charset=UTF-8'),
'x-gwt-module-base': req_headers.get('x-gwt-module-base'),
'x-gwt-permutation': req_headers.get('x-gwt-permutation'),
'Referer': map_url
}
return {
'headers': clean_headers,
'body': captured_request['body'],
'url': captured_request['url'],
'cookies': captured_cookies # <--- Return cookies
}
return None
if __name__ == "__main__":
url = input("Enter Map URL: ")
print(get_fresh_config(url))

6
newpower.py Normal file
View File

@@ -0,0 +1,6 @@
#!/var/www/html/power/venv/bin/python
# Add Harrison REA
# Add WashingtonElectric
# Add Buckeye

View File

@@ -4,10 +4,19 @@ import json
import psycopg2
import mercantile
import logging
from datetime import datetime, timezone
import os
from datetime import datetime, timezone, timedelta
from abc import ABC, abstractmethod
from urllib.parse import urlparse
from pyproj import Transformer
from requests.packages.urllib3.exceptions import InsecureRequestWarning
# --- LOGGING SETUP ---
# Import the helper module
import get_rpc_config_auto
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
# --- LOGGING ---
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s',
@@ -15,57 +24,51 @@ logging.basicConfig(
)
logger = logging.getLogger(__name__)
# --- CONFIGURATION ---
# --- CONFIG ---
DB_CONFIG = {'host': 'localhost', 'database': 'nws', 'user': 'nws', 'password': 'nws'}
KUBRA_BASE_TEMPLATE = 'https://kubra.io/cluster-data/'
CONFIG_FILE = 'providers.json'
AUTO_UPDATE_COOLDOWN_HOURS = 4 # Only try to repair once every 4 hours
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
# --- CONFIG MANAGEMENT ---
def load_providers():
if not os.path.exists(CONFIG_FILE):
logger.error(f"{CONFIG_FILE} not found!")
return []
with open(CONFIG_FILE, 'r') as f:
return json.load(f)
# --- PROVIDER CONFIGURATION ---
# To add a new site, just add a dictionary here.
PROVIDERS = [
{
'name': 'AEP-WV',
'type': 'kubra',
'meta_url': "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false",
'layer': 'cluster-2',
'quadkeys': ['0320001','0320003','0320010','0320011','0320012','0320013','0320021','0320030','0320031','0320100','0320102','0320120']
},
{
'name': 'AEP-OH',
'type': 'kubra',
'meta_url': 'https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false',
'layer': 'cluster-1',
'quadkeys': ['0320013','0320010','0320011','0320012','0320003','0320001','0302322','0302233','0302232','0302223','0320102','0320100']
},
{
'name': 'AEP-KY',
'type': 'kubra',
'meta_url': 'https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false',
'layer': 'cluster-2',
'quadkeys': ['0320031','0320030','0320021','0320013','0320012','0320011','0320010','0320003','0320001']
},
{
'name': 'FirstEnergy',
'type': 'kubra',
'meta_url': 'https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false',
'layer': 'cluster-4',
'quadkeys': ['030223','030232','032001','032003','032010','032012']
},
{
'name': 'SouthCentral',
'type': 'simple_json',
'url': 'https://outage.southcentralpower.com/data/outages.json'
},
{
'name': 'Grayson',
'type': 'simple_json',
'url': 'https://outages.graysonrecc.com/data/outages.json'
}
]
def save_providers(providers):
with open(CONFIG_FILE, 'w') as f:
json.dump(providers, f, indent=4)
logger.info("Configuration saved to providers.json")
# --- DATABASE HANDLER ---
def update_provider_config(provider_name, new_settings):
providers = load_providers()
updated = False
for p in providers:
if p.get('name') == provider_name:
if 'headers' in new_settings:
p['headers'] = new_settings['headers']
if 'body' in new_settings:
p['body'] = new_settings['body']
if 'url' in new_settings:
p['url'] = new_settings['url']
# <--- NEW: Save Cookies
if 'cookies' in new_settings:
p['cookies'] = new_settings['cookies']
p['last_auto_update'] = datetime.now(timezone.utc).isoformat()
updated = True
break
if updated:
save_providers(providers)
return True
return False
# --- DATABASE ---
class PowerDB:
def __init__(self, config):
self.conn = psycopg2.connect(**config)
@@ -114,11 +117,9 @@ class PowerDB:
cursor.execute("DELETE FROM newpower WHERE fetch_time < NOW() - INTERVAL '365 days'")
logger.info("Post-processing complete.")
# --- PROVIDER ARCHITECTURE ---
# --- PROVIDERS ---
class BaseProvider(ABC):
"""Abstract base class for all providers"""
def __init__(self, config, session):
self.config = config
self.session = session
@@ -126,23 +127,14 @@ class BaseProvider(ABC):
@abstractmethod
def fetch(self):
"""Must return a list of standardized outage dictionaries"""
pass
class SimpleJsonProvider(BaseProvider):
"""Handles sites that return a flat JSON list of outages"""
def fetch(self):
url = self.config.get('url')
if not url:
logger.error(f"Missing URL for {self.name}")
return []
try:
resp = self.session.get(url, verify=False)
if not resp.ok:
logger.error(f"{self.name} returned {resp.status_code}")
return []
if not resp.ok: return []
data = resp.json()
results = []
for item in data:
@@ -153,49 +145,38 @@ class SimpleJsonProvider(BaseProvider):
return []
def _normalize(self, item):
# Helper to parse ISO strings safely
def safe_parse(ts):
if not ts: return None
try: return datetime.fromisoformat(ts.replace('Z', '+00:00'))
except: return None
return {
'incidentid': str(item.get('outageRecID')),
'utility': self.name,
'lat': item.get('outagePoint', {}).get('lat'),
'lon': item.get('outagePoint', {}).get('lng'),
'incidentid': str(item.get('outageRecID')), 'utility': self.name,
'lat': item.get('outagePoint', {}).get('lat'), 'lon': item.get('outagePoint', {}).get('lng'),
'pointgeom': f"{item.get('outagePoint', {}).get('lat')},{item.get('outagePoint', {}).get('lng')}",
'areageom': None,
'start': safe_parse(item.get('outageStartTime')),
'etr': safe_parse(item.get('outageEndTime')),
'outagen': item.get('customersOutNow'),
'cause': item.get('cause'),
'crew_status': item.get('outageWorkStatus'),
'areageom': None, 'start': safe_parse(item.get('outageStartTime')),
'etr': safe_parse(item.get('outageEndTime')), 'outagen': item.get('customersOutNow'),
'cause': item.get('cause'), 'crew_status': item.get('outageWorkStatus'),
'last_change': safe_parse(item.get('outageModifiedTime'))
}
class KubraProvider(BaseProvider):
"""Handles Kubra StormCenter recursive quadkey fetching"""
def __init__(self, config, session):
super().__init__(config, session)
self.max_zoom = 14
self.results = []
self.base_url_template = 'https://kubra.io/cluster-data/'
def fetch(self):
# 1. Dynamic Setup: Get hex keys
meta_url = self.config.get('meta_url')
if not meta_url: return []
hex1, hex2 = self._get_hexes(meta_url)
if not hex1:
logger.error(f"{self.name}: Could not fetch hex keys")
return []
if not hex1: return []
self.base_url = f"{KUBRA_BASE_TEMPLATE}{hex1}/{hex2}/"
self.base_url = f"{self.base_url_template}{hex1}/{hex2}/"
self.layer = self.config.get('layer')
quadkeys = self.config.get('quadkeys', [])
# 2. Recursive Fetch
self.results = []
self._fetch_recursive(quadkeys, set(), zoom=len(quadkeys[0]))
return self.results
@@ -206,114 +187,270 @@ class KubraProvider(BaseProvider):
path = resp.json().get('data', {}).get('cluster_interval_generation_data')
parts = path.split('/')
return parts[2], parts[3]
except Exception as e:
logger.error(f"Hex fetch error {self.name}: {e}")
return None, None
except: return None, None
def _fetch_recursive(self, quadkeys, seen, zoom):
for q in quadkeys:
suffix = q[-3:][::-1]
url = f"{self.base_url}{suffix}/public/{self.layer}/{q}.json"
if url in seen: continue
seen.add(url)
try:
resp = self.session.get(url)
if not resp.ok or not 'application/json' in resp.headers.get('Content-Type', ''):
continue
if not resp.ok: continue
for item in resp.json().get('file_data', []):
desc = item.get('desc', {})
if desc.get('cluster', False):
if zoom + 1 > self.max_zoom:
self.results.append(self._normalize(item))
else:
if item.get('desc', {}).get('cluster', False):
if zoom + 1 <= self.max_zoom:
p_geom = item.get('geom', {}).get('p', [])
if p_geom:
next_key = self._get_quadkey_for_point(p_geom[0], zoom + 1)
self._fetch_recursive([next_key], seen, zoom + 1)
else: self.results.append(self._normalize(item))
else:
self.results.append(self._normalize(item))
neighbors = self._get_neighboring_quadkeys(q)
self._fetch_recursive(neighbors, seen, zoom)
except Exception as e:
logger.error(f"Error reading quadkey {q}: {e}")
self._fetch_recursive(self._get_neighbors(q), seen, zoom)
except: pass
def _normalize(self, item):
desc = item.get('desc', {})
geom = item.get('geom', {})
point_poly = geom.get('p', [None])[0]
if not point_poly: return {}
latlon = polyline.decode(point_poly)[0]
def parse_ts(ts_str):
if not ts_str or ts_str == 'ETR-NULL': return None
try: return datetime.strptime(ts_str, "%Y-%m-%dT%H:%M:%S%z")
except:
try: return datetime.strptime(ts_str, "%Y-%m-%dT%H:%M%z")
except: return None
cause = desc.get('cause', {})
if isinstance(cause, dict): cause = cause.get('EN-US')
p = geom.get('p', [None])[0]
if not p: return {}
latlon = polyline.decode(p)[0]
def ts(s):
if not s or s=='ETR-NULL': return None
try: return datetime.strptime(s, "%Y-%m-%dT%H:%M:%S%z")
except: return None
return {
'incidentid': desc.get('inc_id'),
'utility': self.name,
'lat': latlon[0],
'lon': latlon[1],
'pointgeom': point_poly,
'areageom': geom.get('a'),
'start': parse_ts(desc.get('start_time')),
'etr': parse_ts(desc.get('etr')),
'outagen': desc.get('cust_a', {}).get('val', 0),
'cause': cause or "Pending Investigation",
'crew_status': desc.get('crew_status', {}).get('EN-US'),
'active': True
'incidentid': desc.get('inc_id'), 'utility': self.name,
'lat': latlon[0], 'lon': latlon[1], 'pointgeom': p, 'areageom': geom.get('a'),
'start': ts(desc.get('start_time')), 'etr': ts(desc.get('etr')),
'outagen': desc.get('cust_a', {}).get('val', 0), 'cause': desc.get('cause', {}).get('EN-US', "Pending"),
'crew_status': desc.get('crew_status', {}).get('EN-US'), 'active': True
}
def _get_quadkey_for_point(self, p, z):
ll = polyline.decode(p)[0]
return mercantile.quadkey(mercantile.tile(lng=ll[1], lat=ll[0], zoom=z))
def _get_neighbors(self, q):
t = mercantile.quadkey_to_tile(q)
return [mercantile.quadkey(n) for n in mercantile.neighbors(t)]
class GwtRpcProvider(BaseProvider):
def __init__(self, config, session):
super().__init__(config, session)
self.transformer = None
self.state_filter = config.get('state_filter')
self.map_url = config.get('map_url')
# 1. Base Headers
self.session.headers.update({
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
'Accept': '*/*',
'Sec-Fetch-Site': 'same-origin'
})
parsed_url = urlparse(config.get('url'))
self.session.headers.update({'Origin': f"{parsed_url.scheme}://{parsed_url.netloc}"})
# 2. Load Cookies (if available, but don't rely solely on them)
if config.get('cookies'):
for cookie in config['cookies']:
# Handle expiry mapping if needed, or ignore errors
try:
self.session.cookies.set(
cookie['name'],
cookie['value'],
domain=cookie['domain'],
path=cookie['path']
)
except: pass
self.STATE_BOUNDS = {
'WV': {'lat_min': 37.0, 'lat_max': 40.7, 'lon_min': -82.7, 'lon_max': -77.7},
'OH': {'lat_min': 38.4, 'lat_max': 42.0, 'lon_min': -84.9, 'lon_max': -80.5},
'KY': {'lat_min': 36.4, 'lat_max': 39.2, 'lon_min': -89.6, 'lon_max': -81.9},
'IA': {'lat_min': 40.3, 'lat_max': 43.6, 'lon_min': -96.7, 'lon_max': -90.1}
}
def _get_quadkey_for_point(self, polyline_str, zoom):
latlon = polyline.decode(polyline_str)[0]
return mercantile.quadkey(mercantile.tile(lng=latlon[1], lat=latlon[0], zoom=zoom))
if config.get('epsg'):
try:
self.transformer = Transformer.from_crs(f"EPSG:{config['epsg']}", "EPSG:4326", always_xy=True)
except: logger.error(f"EPSG Error for {self.name}")
def attempt_auto_repair(self):
if not self.map_url: return False
# --- Cooldown Check ---
last_update = self.config.get('last_auto_update')
if last_update:
try:
last_dt = datetime.fromisoformat(last_update)
if last_dt.tzinfo is None: last_dt = last_dt.replace(tzinfo=timezone.utc)
if datetime.now(timezone.utc) - last_dt < timedelta(hours=AUTO_UPDATE_COOLDOWN_HOURS):
logger.info(f"Skipping auto-repair for {self.name} (Cooldown active).")
return False
except ValueError: pass
logger.info(f"Attempting Auto-Repair for {self.name}...")
try:
new_settings = get_rpc_config_auto.get_fresh_config(self.map_url)
if new_settings:
logger.info(f"Repair successful! Updating {self.name}.")
# Update In-Memory Config (CRITICAL: prevents loop)
current_time = datetime.now(timezone.utc).isoformat()
self.config['headers'] = new_settings['headers']
self.config['body'] = new_settings['body']
self.config['url'] = new_settings['url']
self.config['cookies'] = new_settings.get('cookies', [])
self.config['last_auto_update'] = current_time
# Force updates to session
# We clear cookies to ensure we don't mix old/new session logic
self.session.cookies.clear()
if new_settings.get('cookies'):
for cookie in new_settings['cookies']:
self.session.cookies.set(cookie['name'], cookie['value'], domain=cookie['domain'], path=cookie['path'])
update_provider_config(self.name, new_settings)
return True
except Exception as e:
logger.error(f"Auto-repair failed: {e}")
return False
def fetch(self, is_retry=False):
url = self.config.get('url')
headers = self.config.get('headers', {})
body = self.config.get('body')
if not url or not body: return []
try:
# 3. Dynamic Origin Update
parsed_url = urlparse(url)
origin = f"{parsed_url.scheme}://{parsed_url.netloc}"
self.session.headers.update({'Origin': origin})
# 4. ALWAYS PRIME SESSION (Fixes the regression)
# Even if we have cookies, they might be stale or missing JSESSIONID.
# Hitting the page refreshes the jar.
prime_url = headers.get('Referer') or headers.get('x-gwt-module-base') or origin
if prime_url:
try:
self.session.get(prime_url, verify=False, timeout=10)
except: pass
req_headers = headers.copy()
if 'Content-Type' not in req_headers: req_headers['Content-Type'] = 'text/x-gwt-rpc; charset=UTF-8'
resp = self.session.post(url, headers=req_headers, data=body, verify=False)
# 5. Error Handling & Retry
failed = False
if "//EX" in resp.text: failed = True
if resp.status_code == 500: failed = True
if failed:
logger.error(f"GWT Failure for {self.name} (Status: {resp.status_code}).")
# Check recursion limit
if is_retry:
logger.error(f"Retry failed for {self.name}. Aborting.")
return []
if self.attempt_auto_repair():
logger.info("Retrying fetch with new settings...")
return self.fetch(is_retry=True)
else:
return []
if not resp.ok: return []
text = resp.text
if text.startswith('//OK'): text = text[4:]
return self._extract_outages(json.loads(text))
except Exception as e:
logger.error(f"Fetch error {self.name}: {e}")
return []
# ... Keep _extract_outages and _is_valid as is ...
def _extract_outages(self, data_list):
results = []
if not self.transformer: return []
processed = set()
stride = 2
for i in range(len(data_list) - stride):
val1 = data_list[i]
val2 = data_list[i+stride]
if (isinstance(val1, (int, float)) and isinstance(val2, (int, float)) and
abs(val1) > 100000 and abs(val2) > 100000):
lat, lon = None, None
try:
res_lon, res_lat = self.transformer.transform(val2, val1)
if self._is_valid(res_lat, res_lon): lat, lon = res_lat, res_lon
except: pass
if not lat:
try:
res_lon, res_lat = self.transformer.transform(val1, val2)
if self._is_valid(res_lat, res_lon): lat, lon = res_lat, res_lon
except: pass
if lat and lon:
k = f"{lat:.4f},{lon:.4f}"
if k in processed: continue
processed.add(k)
oid = str(abs(hash(k)))
for o in range(1, 15):
idx = i - o
if idx >= 0 and isinstance(data_list[idx], str):
s = data_list[idx]
if len(s) < 20 and "java" not in s and "http" not in s: oid = s; break
results.append({
'incidentid': oid, 'utility': self.name,
'lat': lat, 'lon': lon, 'pointgeom': k, 'areageom': None,
'start': datetime.now(timezone.utc), 'etr': None, 'outagen': 1,
'cause': "Unknown", 'crew_status': "Unknown", 'active': True,
'last_change': datetime.now(timezone.utc)
})
return results
def _is_valid(self, lat, lon):
if not self.state_filter: return True
b = self.STATE_BOUNDS.get(self.state_filter)
if not b: return True
return b['lat_min'] <= lat <= b['lat_max'] and b['lon_min'] <= lon <= b['lon_max']
def _get_neighboring_quadkeys(self, quadkey):
tile = mercantile.quadkey_to_tile(quadkey)
neighbors = [
mercantile.Tile(x=tile.x, y=tile.y - 1, z=tile.z),
mercantile.Tile(x=tile.x + 1, y=tile.y, z=tile.z),
mercantile.Tile(x=tile.x, y=tile.y + 1, z=tile.z),
mercantile.Tile(x=tile.x - 1, y=tile.y, z=tile.z),
mercantile.Tile(x=tile.x + 1, y=tile.y - 1, z=tile.z),
mercantile.Tile(x=tile.x + 1, y=tile.y + 1, z=tile.z),
mercantile.Tile(x=tile.x - 1, y=tile.y - 1, z=tile.z),
mercantile.Tile(x=tile.x - 1, y=tile.y + 1, z=tile.z),
]
return [mercantile.quadkey(t) for t in neighbors if t.x >= 0 and t.y >= 0]
# --- REGISTRY ---
# Map string types to Classes
PROVIDER_REGISTRY = {
'kubra': KubraProvider,
'simple_json': SimpleJsonProvider
'simple_json': SimpleJsonProvider,
'gwt_rpc': GwtRpcProvider
}
# --- MAIN ---
def main():
S = requests.Session()
S.verify = False
db = PowerDB(DB_CONFIG)
logger.info("Starting Power Scraper...")
providers = load_providers()
for config in PROVIDERS:
for config in providers:
p_type = config.get('type')
p_name = config.get('name')
ProviderClass = PROVIDER_REGISTRY.get(p_type)
if ProviderClass:
try:
provider = ProviderClass(config, S)
@@ -322,15 +459,14 @@ def main():
count = 0
for outage in outages:
if outage: # Ensure valid data
if outage:
db.upsert_outage(outage)
count += 1
logger.info(f"Saved {count} records for {p_name}")
except Exception as e:
logger.error(f"Critical error running {p_name}: {e}")
logger.error(f"Error processing {p_name}: {e}")
else:
logger.warning(f"Unknown provider type '{p_type}' for {p_name}")
logger.warning(f"Unknown provider type {p_type} for {p_name}")
db.run_post_processing()
db.close()

View File

@@ -9,6 +9,13 @@ import pandas as pd
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
# TODO
# Add Buckeye REC https://buckeyerec.outagemap.coop/
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
cursor = conn.cursor()

217
providers.json Normal file
View File

@@ -0,0 +1,217 @@
[
{
"name": "AEP-WV",
"type": "kubra",
"meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false",
"layer": "cluster-2",
"quadkeys": [
"0320001",
"0320003",
"0320010",
"0320011",
"0320012",
"0320013",
"0320021",
"0320030",
"0320031",
"0320100",
"0320102",
"0320120"
]
},
{
"name": "AEP-OH",
"type": "kubra",
"meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false",
"layer": "cluster-1",
"quadkeys": [
"0320013",
"0320010",
"0320011",
"0320012",
"0320003",
"0320001",
"0302322",
"0302233",
"0302232",
"0302223",
"0320102",
"0320100"
]
},
{
"name": "AEP-KY",
"type": "kubra",
"meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false",
"layer": "cluster-2",
"quadkeys": [
"0320031",
"0320030",
"0320021",
"0320013",
"0320012",
"0320011",
"0320010",
"0320003",
"0320001"
]
},
{
"name": "FirstEnergy",
"type": "kubra",
"meta_url": "https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false",
"layer": "cluster-4",
"quadkeys": [
"030223",
"030232",
"032001",
"032003",
"032010",
"032012"
]
},
{
"name": "SouthCentral",
"type": "simple_json",
"url": "https://outage.southcentralpower.com/data/outages.json"
},
{
"name": "Grayson",
"type": "simple_json",
"url": "https://outages.graysonrecc.com/data/outages.json"
},
{
"name": "Harrison REA",
"type": "gwt_rpc",
"map_url": "https://harrisonrea.ebill.coop/maps/external/OutageWebMap/",
"url": "https://harrisonrea.ebill.coop/maps/external/OutageWebMap/maps/GWT.rpc",
"epsg": 26853,
"state_filter": "WV",
"headers": {
"content-type": "text/x-gwt-rpc; charset=UTF-8",
"x-gwt-module-base": "https://harrisonrea.ebill.coop/maps/external/OutageWebMap/maps/",
"x-gwt-permutation": "1D9BF682AF8D25FA7709856C83E9910F",
"Referer": "https://harrisonrea.ebill.coop/maps/external/OutageWebMap/"
},
"body": "7|0|4|https://harrisonrea.ebill.coop/maps/external/OutageWebMap/maps/|612278413EC26C34D54A3907AA0CDFD8|coop.nisc.oms.webmap.services.RpcCombinedOutageDetailsService|getCombinedOutageDetails|1|2|3|4|0|",
"last_auto_update": "2025-12-07T03:09:05.982825+00:00"
},
{
"name": "WashingtonElectric",
"type": "gwt_rpc",
"map_url": "https://weci.ebill.coop/woViewer/mapviewer.html?config=Outage+Web+Map",
"url": "https://weci.ebill.coop/woViewer/MapWiseWeb/GWT.rpc",
"epsg": 3735,
"state_filter": "OH",
"headers": {
"content-type": "text/x-gwt-rpc; charset=UTF-8",
"x-gwt-module-base": "https://weci.ebill.coop/woViewer/MapWiseWeb/",
"x-gwt-permutation": "92F322F8E48548F604D2E1BE43DB1F13",
"Referer": "https://weci.ebill.coop/woViewer/mapviewer.html?config=Outage+Web+Map"
},
"body": "7|0|4|https://weci.ebill.coop/woViewer/MapWiseWeb/|612278413EC26C34D54A3907AA0CDFD8|coop.nisc.oms.webmap.services.RpcCombinedOutageDetailsService|getCombinedOutageDetails|1|2|3|4|0|",
"last_auto_update": "2025-12-07T03:24:46.435173+00:00",
"cookies": [
{
"name": "__utma",
"value": "105963909.535514741.1765077881.1765077881.1765077881.1",
"domain": ".weci.ebill.coop",
"path": "/",
"expires": 1799637880.601006,
"httpOnly": false,
"secure": false,
"sameSite": "Lax"
},
{
"name": "__utmc",
"value": "105963909",
"domain": ".weci.ebill.coop",
"path": "/",
"expires": -1,
"httpOnly": false,
"secure": false,
"sameSite": "Lax"
},
{
"name": "__utmz",
"value": "105963909.1765077881.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none)",
"domain": ".weci.ebill.coop",
"path": "/",
"expires": 1780845880,
"httpOnly": false,
"secure": false,
"sameSite": "Lax"
},
{
"name": "__utmt_nisc",
"value": "1",
"domain": ".weci.ebill.coop",
"path": "/",
"expires": 1765078480,
"httpOnly": false,
"secure": false,
"sameSite": "Lax"
},
{
"name": "__utmt_user",
"value": "1",
"domain": ".weci.ebill.coop",
"path": "/",
"expires": 1765078480,
"httpOnly": false,
"secure": false,
"sameSite": "Lax"
},
{
"name": "__utmb",
"value": "105963909.2.10.1765077881",
"domain": ".weci.ebill.coop",
"path": "/",
"expires": 1765079680,
"httpOnly": false,
"secure": false,
"sameSite": "Lax"
},
{
"name": "__utma",
"value": "105963909.535514741.1765077881.1765077881.1765077881.1",
"domain": "weci.ebill.coop",
"path": "/",
"expires": 1799637880.601622,
"httpOnly": false,
"secure": false,
"sameSite": "Lax"
},
{
"name": "__utmb",
"value": "105963909.3.9.1765077881",
"domain": "weci.ebill.coop",
"path": "/",
"expires": 1765079680,
"httpOnly": false,
"secure": false,
"sameSite": "Lax"
},
{
"name": "__utmc",
"value": "105963909",
"domain": "weci.ebill.coop",
"path": "/",
"expires": -1,
"httpOnly": false,
"secure": false,
"sameSite": "Lax"
},
{
"name": "__utmz",
"value": "105963909.1765077881.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none)",
"domain": "weci.ebill.coop",
"path": "/",
"expires": 1780845880,
"httpOnly": false,
"secure": false,
"sameSite": "Lax"
}
]
}
]