This commit is contained in:
2025-12-07 14:49:10 +00:00
parent d24587c321
commit 367274085b
4 changed files with 0 additions and 1010 deletions

447
power3.py
View File

@@ -1,447 +0,0 @@
import requests
import polyline
import json
import psycopg2
import psycopg2.extensions
from datetime import datetime, timezone
from geojson import Point, Feature, FeatureCollection, dump
import re
import logging
# Configure logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s',
handlers=[
logging.FileHandler('power3.log'),
logging.StreamHandler()
]
)
logger = logging.getLogger(__name__)
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
cursor = conn.cursor()
proxies = {"http":"http://nws:nws@localhost:9000"}
aepohmeta = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
aepwvmeta = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
#firstpowerwvmeta = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/metadata.json"
aepkymeta = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json'
wvfemeta= 'https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false'
aepwvkubrameta = "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false"
aepohkubrameta = 'https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false'
aepkykubrameta = 'https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false'
aepwvbase = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
aepohbase = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
#firstpowerwvbase = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/"
aepkybase = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/'
graysoncounty = 'https://outages.graysonrecc.com/data/boundaries.json'
flemingjson = 'https://outage.fme.coop/data/boundaries.json'
#buckeye rec
#https://outage.buckeyerec.coop/maps/OutageWebMap/maps/GWT.rpc
#washington
#https://weci.ebill.coop/woViewer/MapWiseWeb/GWT.rpc
allcountyoutages = []
S = requests.Session()
def fleming():
logger.info("Starting fleming()")
state = 'KY'
company = 'FLEM'
try:
temp = S.get(flemingjson)
temp.raise_for_status()
tempdata = json.loads(temp.text)
for j in tempdata[0]['boundaries']:
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
allcountyoutages.append(outageinfo)
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} boundaries from fleming")
except requests.exceptions.RequestException as e:
logger.error(f"Request failed for fleming: {e}")
except json.JSONDecodeError as e:
logger.error(f"JSON decode error for fleming: {e}")
except Exception as e:
logger.error(f"Unexpected error in fleming: {e}")
def bigsandy():
logger.info("Starting bigsandy()")
state = 'OH'
company = 'BS'
try:
temp = S.get('https://outagemap.bigsandyrecc.com/data/boundaries.json')
temp.raise_for_status()
tempdata = json.loads(temp.text)
for j in tempdata[0]['boundaries']:
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
allcountyoutages.append(outageinfo)
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} boundaries from bigsandy")
except requests.exceptions.RequestException as e:
logger.error(f"Request failed for bigsandy: {e}")
except json.JSONDecodeError as e:
logger.error(f"JSON decode error for bigsandy: {e}")
except Exception as e:
logger.error(f"Unexpected error in bigsandy: {e}")
def southcentralpower():
logger.info("Starting southcentralpower()")
company = 'SCP'
url = 'https://outage.southcentralpower.com/data/boundaries.json'
Sp = requests.Session()
# Sp.proxies.update(proxies)
try:
response = Sp.get(url)
response.raise_for_status()
tempdata = json.loads(response.text)
state = 'OH'
for j in tempdata[0]['boundaries']:
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
allcountyoutages.append(outageinfo)
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} boundaries from southcentralpower")
except requests.exceptions.RequestException as e:
logger.error(f"Request failed for southcentralpower: {e}")
except json.JSONDecodeError as e:
logger.error(f"JSON decode error for southcentralpower: {e}")
except Exception as e:
logger.error(f"Unexpected error in southcentralpower: {e}")
#wv https://kubra.io/data/e2ae0326-9912-436a-9355-eb2687e798b1/public/reports/7929429f-635d-4761-b6c7-78f646cef3c2_report.json
def ku_get_url():
logger.info("Starting ku_get_url()")
try:
url = 'https://stormcenter.lge-ku.com/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40'
r = requests.get(url)
r.raise_for_status()
x = re.search(r"instanceId: '(.*?)',", r.text)
if not x:
logger.error("Could not find instanceId in ku_get_url")
return None
urlcom = x.group(1)
urlcom = 'https://kubra.io/stormcenter/api/v1/stormcenters/' + urlcom + '/views/a6cee9e4-312b-4b77-9913-2ae371eb860d/currentState?preview=false'
stuff = S.get(urlcom)
stuff.raise_for_status()
jsonstuff = json.loads(stuff.text)
interval_data = jsonstuff.get('data').get('interval_generation_data')
if not interval_data:
logger.error("Could not find interval_generation_data in ku_get_url")
return None
urlcom = 'https://kubra.io/' + interval_data + '/public/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40_report.json'
logger.info("Successfully generated URL for ku")
return urlcom
except requests.exceptions.RequestException as e:
logger.error(f"Request failed in ku_get_url: {e}")
return None
except json.JSONDecodeError as e:
logger.error(f"JSON decode error in ku_get_url: {e}")
return None
except Exception as e:
logger.error(f"Unexpected error in ku_get_url: {e}")
return None
def county_json(meta,url,jsonname):
logger.info(f"Starting county_json for meta: {meta}")
try:
response = S.get(meta)
response.raise_for_status()
metainfo = json.loads(response.text)
metadir = metainfo['directory']
url = url + metadir + jsonname
outage = S.get(url)
outage.raise_for_status()
logger.info(f"Successfully fetched county JSON from {url}")
return outage
except requests.exceptions.RequestException as e:
logger.error(f"Request failed in county_json: {e}")
return None
except json.JSONDecodeError as e:
logger.error(f"JSON decode error in county_json: {e}")
return None
except KeyError as e:
logger.error(f"Key error in county_json (missing 'directory'): {e}")
return None
except Exception as e:
logger.error(f"Unexpected error in county_json: {e}")
return None
def ku():
ku = []
url = ku_get_url()
data = S.get(url).text
tempdata = json.loads(data)
temp = tempdata['file_data']['areas'][2]['areas'][0]['areas']
temp1 = tempdata['file_data']['areas'][2]['areas'][1]['areas']
temp2 = tempdata['file_data']['areas'][1]['areas'][0]['areas']
for i in temp:
ku.append(i)
for i in temp1:
ku.append(i)
for i in temp2:
ku.append(i)
for o in ku:
outageinfo = o['cust_a']['val'],o['cust_s'],o['name'].capitalize(),o['state'],o['utility']
allcountyoutages.append(outageinfo)
def grayson():
company = 'GRE'
outage = S.get(graysoncounty)
if outage.headers.get('Content-Type').startswith('application/json'):
tempdata = json.loads(outage.text)
state = 'KY'
for j in tempdata[0]['boundaries']:
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
allcountyoutages.append(outageinfo)
def aep_county_vawv(meta,url,jsonname):
company = 'AEP'
outage = county_json(meta,url,jsonname)
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
tempdata = json.loads(outage.text)
state = 'WV'
for j in tempdata['file_data']['areas'][0]['areas'][2]['areas']:
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name'), state, company
allcountyoutages.append(outageinfo)
state = 'VA'
for j in tempdata['file_data']['areas'][0]['areas'][1]['areas']:
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
allcountyoutages.append(outageinfo)
def aep_county_oh(meta,url,jsonname):
company = 'AEP'
state = 'OH'
outage = county_json(meta,url,jsonname)
tempdata = json.loads(outage.text)
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
allcountyoutages.append(outageinfo)
def aep_county_ky(meta,url,jsonname):
company = 'AEP'
state = 'KY'
outage = county_json(meta,url,jsonname)
tempdata = json.loads(outage.text)
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
allcountyoutages.append(outageinfo)
def firstenergy_county(meta,url,jsonname):
company = 'FE'
state = 'WV'
outage = county_json(meta,url,jsonname)
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
tempdata = json.loads(outage.text)
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(),state, company
allcountyoutages.append(outageinfo)
def get_kubra_hexes(url):
outage = S.get(url)
if outage.headers.get('Content-Type').startswith('application/json'):
tempdata = json.loads(outage.text)
bothhex = tempdata.get('data').get('cluster_interval_generation_data')
hexes = bothhex.split('/')
return hexes[2],hexes[3]
def kubra_fe(baseurl1,baseurl2,meta):
hex2 = get_kubra_hexes(meta)
url = baseurl1 + hex2[1] + baseurl2
company = 'FE'
state = 'WV'
outage = S.get(url)
if outage.headers.get('Content-Type').startswith('application/json'):
tempdata = json.loads(outage.text)
for j in tempdata['file_data']['areas']:
if j.get('key') == "county":
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('name').capitalize(),state,company
allcountyoutages.append(outageinfo)
def kubra_aep(baseurl1,baseurl2,meta,company='AEP'):
hex2 = get_kubra_hexes(meta)
url = baseurl1 + hex2[1] + baseurl2
outage = S.get(url)
if outage.headers.get('Content-Type').startswith('application/json'):
tempdata = json.loads(outage.text)
process_outage_data(tempdata,company)
def process_outage_data(data,company):
"""
Identifies the data structure and loops through the county-level data.
Args:
data (dict): The parsed JSON data as a Python dictionary.
"""
# Navigate to the primary list of areas
primary_areas = data.get("file_data", {}).get("areas", [])
# If the list is empty, there's nothing to process
if not primary_areas:
print("No 'areas' data found.")
return
# --- This is the key logic to handle both formats ---
# Check the key of the first item to determine the format
first_item_key = primary_areas[0].get("key")
if first_item_key == "state":
# Format 1: Loop through each state object
for state_area in primary_areas:
state_name = state_area.get("name", "Unknown State")
# Get the nested list of counties for this state
county_list = state_area.get("areas", [])
for county in county_list:
# We are now at the county level
if county.get("key") == "county":
outageinfo = county.get('cust_a').get('val'), county.get('cust_s'), county.get('name').capitalize(),county.get('state'),company
allcountyoutages.append(outageinfo)
elif first_item_key == "county":
# Format 2: The primary list is already the county list
for county in primary_areas:
# We are now at the county level
if county.get("key") == "county":
outageinfo = county.get('cust_a').get('val'), county.get('cust_s'), county.get('name').capitalize(),county.get('state'),company
allcountyoutages.append(outageinfo)
else:
print("Unknown data format. Could not find 'state' or 'county' key.")
try:
logger.info("Attempting kubra_fe for FE WV")
kubra_fe('https://kubra.io/data/','/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json',wvfemeta)
except Exception as e:
logger.error(f"Error in kubra_fe for FE WV: {e}")
try:
logger.info("Attempting kubra_aep for AEP WV")
kubra_aep('https://kubra.io/data/','/public/reports/7929429f-635d-4761-b6c7-78f646cef3c2_report.json',aepwvkubrameta)
except Exception as e:
logger.error(f"Error in kubra_aep for AEP WV: {e}")
try:
logger.info("Attempting kubra_aep for AEP OH")
kubra_aep('https://kubra.io/data/','/public/reports/1bc6bd19-2315-4548-980a-6df73b93b355_report.json',aepohkubrameta)
except Exception as e:
logger.error(f"Error in kubra_aep for AEP OH: {e}")
try:
logger.info("Attempting kubra_aep for AEP KY")
kubra_aep('https://kubra.io/data/','/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json',aepkykubrameta)
except Exception as e:
logger.error(f"Error in kubra_aep for AEP KY: {e}")
try:
logger.info("Attempting grayson")
grayson()
except Exception as e:
logger.error(f"Error in grayson: {e}")
try:
logger.info("Attempting ku")
ku()
except Exception as e:
logger.error(f"Error in ku: {e}")
try:
logger.info("Attempting southcentralpower")
southcentralpower()
except Exception as e:
logger.error(f"Error in southcentralpower: {e}")
try:
logger.info("Attempting bigsandy")
bigsandy()
except Exception as e:
logger.error(f"Error in bigsandy: {e}")
try:
logger.info("Attempting fleming")
fleming()
except Exception as e:
logger.error(f"Error in fleming: {e}")
current_timestamp = str(datetime.utcnow())
#for i in allcountyoutages:
# sql = 'insert into countyoutages (outages, served, county, state, update, company) values (%s, %s, %s, %s, %s, %s)'
# val = (i[0], i[1], i[2], i[3], current_timestamp, i[4])
# cursor.execute(sql,val)
#conn.commit()
all_values = []
for i in allcountyoutages:
# Make sure the order matches the SQL placeholders
val = (i[0], i[1], i[2], i[3], current_timestamp, i[4])
all_values.append(val)
# 2. Define the SQL statement ONCE
sql = 'INSERT INTO countyoutages (outages, served, county, state, update, company) VALUES (%s, %s, %s, %s, %s, %s)'
# 3. Execute the command ONCE with all the data
if all_values: # Only execute if there's data to insert
try:
cursor.executemany(sql, all_values)
conn.commit() # Commit after successful execution
logger.info(f"Successfully inserted {len(all_values)} records into the database")
except Exception as e:
logger.error(f"Database error during insert: {e}")
conn.rollback() # Rollback in case of error
else:
logger.warning("No data to insert into the database")
cursor.execute('update countyoutages set cwa = county.cwa from county where county.countyname = countyoutages.county and county.state = countyoutages.state and countyoutages.cwa is null')
conn.commit()
#cursor.execute("delete from countyoutages where cwa != 'RLX'")
cursor.execute("delete from countyoutages where cwa is null")
#cursor.execute("delete from countyoutages where update < now () - interval '365 days'")
conn.commit()
#print(allcountyoutages)
cursor.close()
conn.close()