initial commit
This commit is contained in:
24
power.php
Normal file
24
power.php
Normal file
@@ -0,0 +1,24 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
// Performing SQL query
|
||||
//$query = "SELECT distinct on (camid) camid, filepath FROM camdb order by camid,dateutc desc";
|
||||
$query = "SELECT lat,lon,outagen FROM power WHERE active = true and cwa = 'RLX'";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Printing results in HTML
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Closing connection
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
555
power2.py
Normal file
555
power2.py
Normal file
@@ -0,0 +1,555 @@
|
||||
import requests
|
||||
import polyline
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from datetime import datetime, timezone
|
||||
from geojson import Point, Feature, FeatureCollection, dump
|
||||
import pandas as pd
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
proxies = {"http":"http://nws:nws@localhost:9000"}
|
||||
|
||||
aepwvnew = ['0320001','0320003','0320010','0320011','0320012','0320013','0320021','0320030','0320031','0320100','0320102','0320120']
|
||||
aepohnew = ['0320013','0320010','0320011','0320012','0320003','0320001','0302322','0302233','0302232','0302223','0320102','0320100']
|
||||
aepkynew = ['0320031','0320030','0320021','0320013','0320012','0320011','0320010','0320003','0320001']
|
||||
firstenergy = ['030223','030232','032001','032003','032010','032012']
|
||||
dominionva = ['0320121','0320120','0300103','0320102','0320101','0320100','0320031','0320013','0320011']
|
||||
baltimore = ['0320011','0320100','0320101','0320013','0320102','0320103']
|
||||
pepco = ['03201002','03201003','03201020','03201021']
|
||||
|
||||
|
||||
aepohmeta = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
aepwvmeta = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
aepwvkubra = "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false"
|
||||
aepohkubra = 'https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false'
|
||||
aepkykubra = 'https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false'
|
||||
#firstpowerwvmeta = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/pa/interval_generation_data/metadata.json"
|
||||
aepkymeta = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json'
|
||||
domvameta = 'https://outagemap.dominionenergy.com/resources/data/external/interval_generation_data/metadata.json'
|
||||
wvfemeta = 'https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false'
|
||||
|
||||
aepwvcluster = 'cluster-2'
|
||||
aepohcluster = 'cluster-1'
|
||||
aepkycluster = 'cluster-2'
|
||||
aepwvbase = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
aepbasewv = 'https://kubra.io/cluster-data/'
|
||||
aepohbase = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
#firstpowerwvbase = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/"
|
||||
|
||||
aepkybase = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/'
|
||||
domvabase = 'https://outagemap.dominionenergy.com/resources/data/external/interval_generation_data/'
|
||||
graysonrecc = 'https://outages.graysonrecc.com/data/outages.json'
|
||||
|
||||
kubrabase = 'https://kubra.io/cluster-data/'
|
||||
firstenergybase = 'https://kubra.io/cluster-data/'
|
||||
firstenergycluster = 'cluster-4'
|
||||
firstenergyhex1 = 'f5f94943-5df4-4752-a0a7-8ef4baded880'
|
||||
firstenergyhex2 = 'e2986f8a-5a69-4d2f-821c-e5db03932b68'
|
||||
|
||||
southcentraljson = 'https://outage.southcentralpower.com/data/outages.json'
|
||||
|
||||
allcountyoutages = []
|
||||
allkubraoutages = []
|
||||
allaepkubracoutages = []
|
||||
|
||||
def remove_external_curly_braces(s):
|
||||
try:
|
||||
p = s[0]
|
||||
return p
|
||||
except Error as e:
|
||||
print('error in curly ' + e)
|
||||
return s
|
||||
|
||||
|
||||
|
||||
def get_kubra_hexes(url):
|
||||
outage = S.get(url)
|
||||
try:
|
||||
tempdata = json.loads(outage.text)
|
||||
bothhex = tempdata.get('data').get('cluster_interval_generation_data')
|
||||
hexes = bothhex.split('/')
|
||||
returndata = (hexes[2],hexes[3])
|
||||
return returndata
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
||||
|
||||
def kubra(baseurl,cluster,namearray,meta):
|
||||
try:
|
||||
data = get_kubra_hexes(meta)
|
||||
#print(data)
|
||||
hex1, hex2 = get_kubra_hexes(meta)
|
||||
|
||||
newnamearray = []
|
||||
for i in namearray:
|
||||
dir = str(i)
|
||||
dir = dir[-3:]
|
||||
dir = str(dir[::-1])
|
||||
url = baseurl + dir + '/' + hex1 + '/' + hex2 + '/public/' + cluster + '/' + i + '.json'
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']:
|
||||
outageinfo = None
|
||||
try:
|
||||
outageinfo = j.get('desc').get('cluster')
|
||||
except:
|
||||
continue
|
||||
if outageinfo == True:
|
||||
for k in range(4):
|
||||
newnamearray.append(str(i)+ str(k))
|
||||
if outageinfo == False:
|
||||
allkubraoutages.append(j)
|
||||
|
||||
|
||||
newnamearray = list(dict.fromkeys(newnamearray))
|
||||
if len(newnamearray) > 0:
|
||||
kubra(baseurl,cluster,newnamearray,meta)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
||||
def kubra_aep(baseurl,cluster,namearray,meta):
|
||||
data = get_kubra_hexes(meta)
|
||||
#print(data)
|
||||
hex1, hex2 = get_kubra_hexes(meta)
|
||||
newnamearray = []
|
||||
for i in namearray:
|
||||
dir = str(i)
|
||||
dir = dir[-3:]
|
||||
dir = str(dir[::-1])
|
||||
url = baseurl + dir + '/' + hex1 + '/' + hex2 + '/public/' + cluster + '/' + i + '.json'
|
||||
outage = S.get(url)
|
||||
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']:
|
||||
outageinfo = None
|
||||
try:
|
||||
outageinfo = j.get('desc').get('cluster')
|
||||
except:
|
||||
continue
|
||||
if outageinfo == True:
|
||||
for k in range(4):
|
||||
newnamearray.append(str(i)+ str(k))
|
||||
if outageinfo == False:
|
||||
allaepkubracoutages.append(j)
|
||||
#allkubraoutages.append(j)
|
||||
|
||||
|
||||
newnamearray = list(dict.fromkeys(newnamearray))
|
||||
if len(newnamearray) > 0:
|
||||
kubra_aep(baseurl,cluster,newnamearray,meta)
|
||||
|
||||
def insertkubra(data):
|
||||
for j in data:
|
||||
try:
|
||||
custa = j.get('desc').get('cust_a').get('val')
|
||||
except:
|
||||
continue
|
||||
pointgeom = j.get('geom').get('p')
|
||||
if len(pointgeom) == 1:
|
||||
pointlatlon = polyline.decode(pointgeom[0])
|
||||
lat = pointlatlon[0][0]
|
||||
lon = pointlatlon[0][1]
|
||||
else:
|
||||
continue
|
||||
areageom = j.get('geom').get('a')
|
||||
if areageom != None:
|
||||
areageom = remove_external_curly_braces(areageom)
|
||||
else:
|
||||
areageom = None
|
||||
cause = j.get('desc').get('cause').get('EN-US')
|
||||
#cause = j.get('desc').get('cause')
|
||||
start = j.get('desc').get('start_time')
|
||||
if start != None:
|
||||
try:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M:%S%z")
|
||||
except ValueError:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M%z")
|
||||
etr = j.get('desc').get('etr')
|
||||
if etr == 'ETR-NULL' or etr == 'ETR-EXP': etr = None
|
||||
if etr != None:
|
||||
try:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M:%S%z")
|
||||
except ValueError:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M%z")
|
||||
incid = j.get('desc').get('inc_id')
|
||||
crew_status = j.get('desc').get('crew_status').get('EN-US')
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,pointgeom,areageom,start,cause,outagen,crew_status,incidentid,peakoutage,etr,derivedstart,lastchange,active) values (%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s, %s, %s, %s, %s) on conflict (pointgeom) do update set (outagen, cause, start, etr, crew_status,lastchange) = (%s, %s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon,pointgeom,areageom, start, cause, custa, crew_status, incid, custa,etr,current_timestamp, current_timestamp,'True',custa, cause, start, etr, crew_status,current_timestamp)
|
||||
val = (sql,vals)
|
||||
# print(val)
|
||||
cursor.execute(sql,vals)
|
||||
conn.commit()
|
||||
|
||||
def insert_kubra_aep(data):
|
||||
for j in data:
|
||||
try:
|
||||
custa = j.get('desc').get('cust_a').get('val')
|
||||
except:
|
||||
continue
|
||||
pointgeom = j.get('geom').get('p')
|
||||
if len(pointgeom) == 1:
|
||||
pointlatlon = polyline.decode(pointgeom[0])
|
||||
lat = pointlatlon[0][0]
|
||||
lon = pointlatlon[0][1]
|
||||
else:
|
||||
continue
|
||||
areageom = j.get('geom').get('a')
|
||||
if areageom != None:
|
||||
areageom = remove_external_curly_braces(areageom)
|
||||
else:
|
||||
areageom = None
|
||||
cause = "Pending Investigation" # Default to Pending if no cause is found
|
||||
cause_dict = j.get('desc').get('cause')
|
||||
if cause_dict:
|
||||
cause = cause_dict.get('EN-US')
|
||||
#cause = j.get('desc').get('cause')
|
||||
start = j.get('desc').get('start_time')
|
||||
if start != None:
|
||||
try:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M:%S%z")
|
||||
except ValueError:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M%z")
|
||||
etr = j.get('desc').get('etr')
|
||||
if etr == 'ETR-NULL' or etr == 'ETR-EXP': etr = None
|
||||
if etr != None:
|
||||
try:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M:%S%z")
|
||||
except ValueError:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M%z")
|
||||
incid = j.get('desc').get('inc_id')
|
||||
crew_status = j.get('desc').get('crew_status').get('EN-US')
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,pointgeom,areageom,start,cause,outagen,crew_status,incidentid,peakoutage,etr,derivedstart,lastchange,active) values (%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s, %s, %s, %s, %s) on conflict (pointgeom) do update set (outagen, cause, start, etr, crew_status,lastchange) = (%s, %s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon,pointgeom,areageom, start, cause, custa, crew_status, incid, custa,etr,current_timestamp, current_timestamp,'True',custa, cause, start, etr, crew_status,current_timestamp)
|
||||
val = (sql,vals)
|
||||
#print(val)
|
||||
cursor.execute(sql,vals)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def remove_dupes(l):
|
||||
b = []
|
||||
for i in range(0, len(l)):
|
||||
if l[i] not in l[i+1:]:
|
||||
b.append(l[i])
|
||||
return b
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def newaep(meta,namearray,baseurl):
|
||||
newnamearray = []
|
||||
metainfo = json.loads(S.get(meta).text)
|
||||
metadir = metainfo['directory']
|
||||
for i in namearray:
|
||||
url = baseurl + metadir + '/outages/' + i + '.json'
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']:
|
||||
outageinfo = None
|
||||
try:
|
||||
outageinfo = j.get('title')
|
||||
except:
|
||||
continue
|
||||
if outageinfo == 'Area Outage':
|
||||
for k in range(4):
|
||||
newnamearray.append(str(i)+ str(k))
|
||||
if outageinfo == 'Outage Information':
|
||||
allkubraoutages.append(j)
|
||||
|
||||
|
||||
newnamearray = list(dict.fromkeys(newnamearray))
|
||||
if len(newnamearray) > 0:
|
||||
newaep(meta,newnamearray,baseurl)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def check_bad_offset(offset):
|
||||
try:
|
||||
if ":" == offset[-3:-2]:
|
||||
offset = offset[:-3]+offset[-2:]
|
||||
return offset
|
||||
except:
|
||||
return offset
|
||||
|
||||
def fix_bad_timestamp(timestamp):
|
||||
parsed_timestamp = pd.to_datetime(timestamp)
|
||||
return parsed_timestamp
|
||||
|
||||
S = requests.Session()
|
||||
S.verify = False
|
||||
|
||||
|
||||
|
||||
def southcentral():
|
||||
Sp = requests.Session()
|
||||
Sp.verify = False
|
||||
#Sp.proxies.update(proxies)
|
||||
temp = Sp.get(southcentraljson).text
|
||||
outageinfo = json.loads(temp)
|
||||
# print(outageinfo)
|
||||
if len(outageinfo) != 0:
|
||||
|
||||
for i in outageinfo:
|
||||
id = i.get('outageRecID')
|
||||
lat = i.get('outagePoint').get('lat')
|
||||
lon = i.get('outagePoint').get('lng')
|
||||
start = i.get('outageStartTime')
|
||||
end = i.get('outageEndTime')
|
||||
cause = i.get('cause')
|
||||
initial = i.get('customersOutInitially')
|
||||
now = i.get('customersOutNow')
|
||||
change = i.get('outageModifiedTime')
|
||||
crew = i.get('outageWorkStatus')
|
||||
# change = check_bad_offset(change)
|
||||
# start = check_bad_offset(start)
|
||||
# end = check_bad_offset(end)
|
||||
|
||||
|
||||
if start != None:
|
||||
start = fix_bad_timestamp(start)
|
||||
if end != None:
|
||||
end = fix_bad_timestamp(end)
|
||||
if change != None:
|
||||
change = fix_bad_timestamp(change)
|
||||
# change = datetime.strptime(change,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
|
||||
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,start,derivedstart,cause,outagen,crew_status,peakoutage,pointgeom,lastchange,active) values (%s,%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s) on conflict (pointgeom) do update set (outagen, cause, start, crew_status,lastchange) = (%s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon, start, current_timestamp, cause, now, crew, initial,id,change,'True',now,cause,start,crew,change)
|
||||
val = (sql,vals)
|
||||
cursor.execute(sql,vals)
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def grayson():
|
||||
outageinfo = json.loads(S.get(graysonrecc).text)
|
||||
if len(outageinfo) != 0:
|
||||
|
||||
for i in outageinfo:
|
||||
id = i.get('outageRecID')
|
||||
lat = i.get('outagePoint').get('lat')
|
||||
lon = i.get('outagePoint').get('lng')
|
||||
start = i.get('outageStartTime')
|
||||
end = i.get('outageEndTime')
|
||||
cause = i.get('cause')
|
||||
initial = i.get('customersOutInitially')
|
||||
now = i.get('customersOutNow')
|
||||
change = i.get('outageModifiedTime')
|
||||
crew = i.get('outageWorkStatus')
|
||||
# change = check_bad_offset(change)
|
||||
# start = check_bad_offset(start)
|
||||
# end = check_bad_offset(end)
|
||||
|
||||
|
||||
if start != None:
|
||||
start = fix_bad_timestamp(start)
|
||||
if end != None:
|
||||
end = fix_bad_timestamp(end)
|
||||
if change != None:
|
||||
change = fix_bad_timestamp(change)
|
||||
# change = datetime.strptime(change,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
|
||||
|
||||
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,start,derivedstart,cause,outagen,crew_status,peakoutage,pointgeom,lastchange,active) values (%s,%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s) on conflict (pointgeom) do update set (outagen, cause, start, crew_status,lastchange) = (%s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon, start, current_timestamp, cause, now, crew, initial,id,change,'True',now,cause,start,crew,change)
|
||||
val = (sql,vals)
|
||||
cursor.execute(sql,vals)
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def check_outages(meta,namearray,baseurl):
|
||||
metainfo = json.loads(S.get(meta).text)
|
||||
metadir = metainfo['directory']
|
||||
for i in namearray:
|
||||
url = baseurl + metadir + '/outages/' + i + '.json'
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']:
|
||||
id = j.get('id')
|
||||
try:
|
||||
custa = j.get('desc').get('cust_a').get('val')
|
||||
except:
|
||||
continue
|
||||
pointgeom = j.get('geom').get('p')
|
||||
if len(pointgeom) == 1:
|
||||
pointlatlon = polyline.decode(pointgeom[0])
|
||||
lat = pointlatlon[0][0]
|
||||
lon = pointlatlon[0][1]
|
||||
else:
|
||||
continue
|
||||
areageom = j.get('geom').get('a')
|
||||
if areageom != None:
|
||||
areageom = remove_external_curly_braces(areageom)
|
||||
else:
|
||||
areageom = None
|
||||
|
||||
cause = j.get('desc').get('cause')
|
||||
start = j.get('desc').get('start')
|
||||
if start != None:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
etr = j.get('desc').get('etr')
|
||||
if etr == 'ETR-NULL' or etr == 'ETR-EXP': etr = None
|
||||
if etr != None:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
incid = j.get('desc').get('inc_id')
|
||||
crew_status = j.get('desc').get('crew_status')
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,pointgeom,areageom,start,cause,outagen,crew_status,incidentid,peakoutage,etr,genericid,derivedstart,lastchange,active) values (%s,%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s, %s, %s, %s, %s) on conflict (pointgeom) do update set (outagen, cause, start, etr, crew_status,lastchange) = (%s, %s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon,pointgeom,areageom, start, cause, custa, crew_status, incid, custa,etr,id,current_timestamp, current_timestamp,'True',custa, cause, start, etr, crew_status,current_timestamp)
|
||||
val = (sql,vals)
|
||||
cursor.execute(sql,vals)
|
||||
|
||||
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
try:
|
||||
southcentral()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
try:
|
||||
grayson()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
#try:
|
||||
# newaep(aepwvmeta,aepwvnew,aepwvbase)
|
||||
#except Exception as e:
|
||||
# print(e)
|
||||
#try:
|
||||
# newaep(aepohmeta,aepohnew,aepohbase)
|
||||
#except Exception as e:
|
||||
# print(e)
|
||||
#try:
|
||||
# newaep(aepkymeta,aepkynew,aepkybase)
|
||||
#except Exception as e:
|
||||
# print(e)
|
||||
try:
|
||||
kubra_aep(kubrabase,aepwvcluster,aepwvnew,aepwvkubra)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
try:
|
||||
kubra_aep(kubrabase,aepohcluster,aepohnew,aepohkubra)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
try:
|
||||
kubra_aep(kubrabase,aepkycluster,aepkynew,aepkykubra)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
try:
|
||||
#newaep(firstpowerwvmeta,firstpowerwvnew,firstpowerwvbase)
|
||||
kubra(firstenergybase,firstenergycluster,firstenergy,wvfemeta)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
#try:
|
||||
# newaep(domvameta,dominionva,domvabase)
|
||||
#except Exception as e:
|
||||
# print(e)
|
||||
|
||||
#kubra(kubrabase,aepwvcluster,aepwvnew,aepwvmeta)
|
||||
|
||||
|
||||
nodupe = remove_dupes(allcountyoutages)
|
||||
nodupekubra = remove_dupes(allkubraoutages)
|
||||
nodupeaepkubra = remove_dupes(allaepkubracoutages)
|
||||
#print(nodupe)
|
||||
|
||||
def insertaep(data):
|
||||
for j in data:
|
||||
try:
|
||||
custa = j.get('desc').get('cust_a').get('val')
|
||||
except:
|
||||
continue
|
||||
pointgeom = j.get('geom').get('p')
|
||||
if len(pointgeom) == 1:
|
||||
pointlatlon = polyline.decode(pointgeom[0])
|
||||
lat = pointlatlon[0][0]
|
||||
lon = pointlatlon[0][1]
|
||||
else:
|
||||
continue
|
||||
areageom = j.get('geom').get('a')
|
||||
if areageom != None:
|
||||
areageom = remove_external_curly_braces(areageom)
|
||||
else:
|
||||
areageom = None
|
||||
cause = j.get('desc').get('cause')
|
||||
start = j.get('desc').get('start')
|
||||
if start != None:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
etr = j.get('desc').get('etr')
|
||||
if etr == 'ETR-NULL' or etr == 'ETR-EXP': etr = None
|
||||
if etr != None:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
incid = j.get('desc').get('inc_id')
|
||||
crew_status = j.get('desc').get('crew_status')
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
#sql = "INSERT INTO power (lat,lon,pointgeom,areageom,start,cause,outagen,crew_status,incidentid,peakoutage,etr,derivedstart,lastchange,active) values (%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s, %s, %s, %s, %s) on conflict (pointgeom) do update set (outagen, cause, start, etr, crew_status,lastchange) = (%s, %s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon,pointgeom,areageom, start, cause, custa, crew_status, incid, custa,etr,current_timestamp, current_timestamp,'True',custa, cause, start, etr, crew_status,current_timestamp)
|
||||
#val = (sql,vals)
|
||||
print(vals)
|
||||
#cursor.execute(sql,vals)
|
||||
#conn.commit()
|
||||
|
||||
|
||||
if len(nodupe) > 0:
|
||||
insertaep(nodupe)
|
||||
if len(nodupekubra) > 0:
|
||||
insertkubra(nodupekubra)
|
||||
if len(nodupeaepkubra) > 0:
|
||||
insert_kubra_aep(nodupeaepkubra)
|
||||
|
||||
cursor.execute('UPDATE public.power SET realgeom = ST_SetSRID(ST_MakePoint(lon, lat), 4326) where (lat is not null and lon is not null and realgeom is null)')
|
||||
cursor.execute('UPDATE public.power SET peakoutage = outagen where outagen > peakoutage')
|
||||
cursor.execute('update public.power set county = county.countyname from public.county where ST_contains(county.geom,power.realgeom) and power.county is null')
|
||||
cursor.execute('update public.power set cwa = fzone.cwa from public.fzone where ST_contains(fzone.geom,power.realgeom) and power.cwa is null')
|
||||
cursor.execute('update public.power set state = county.state from public.county where ST_contains(county.geom,power.realgeom) and power.state is null')
|
||||
cursor.execute('update public.power set startguess = least(start,derivedstart)')
|
||||
cursor.execute('update public.power set realareageom = st_linefromencodedpolyline(areageom) where areageom is not null and realareageom is null')
|
||||
cursor.execute("update power set pointgeom = NULL where lastchange < now() - interval '2 hours'")
|
||||
conn.commit()
|
||||
|
||||
|
||||
cursor.execute("update power set active = true where lastchange > now() - interval '30 minutes'")
|
||||
cursor.execute("update power set active = false where lastchange < now() - interval '30 minutes'")
|
||||
conn.commit()
|
||||
#cursor.execute("delete from power where cwa != 'RLX'")
|
||||
cursor.execute("delete from power where lastchange < now () - interval '365 days'")
|
||||
conn.commit()
|
||||
|
||||
#print(allkubraoutages)
|
||||
cursor.close()
|
||||
conn.close()
|
||||
447
power3.py
Normal file
447
power3.py
Normal file
@@ -0,0 +1,447 @@
|
||||
import requests
|
||||
import polyline
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from datetime import datetime, timezone
|
||||
from geojson import Point, Feature, FeatureCollection, dump
|
||||
import re
|
||||
import logging
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||
handlers=[
|
||||
logging.FileHandler('power3.log'),
|
||||
logging.StreamHandler()
|
||||
]
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
proxies = {"http":"http://nws:nws@localhost:9000"}
|
||||
|
||||
aepohmeta = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
aepwvmeta = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
#firstpowerwvmeta = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/metadata.json"
|
||||
aepkymeta = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json'
|
||||
wvfemeta= 'https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false'
|
||||
aepwvkubrameta = "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false"
|
||||
aepohkubrameta = 'https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false'
|
||||
aepkykubrameta = 'https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false'
|
||||
|
||||
aepwvbase = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
aepohbase = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
#firstpowerwvbase = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/"
|
||||
aepkybase = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/'
|
||||
graysoncounty = 'https://outages.graysonrecc.com/data/boundaries.json'
|
||||
flemingjson = 'https://outage.fme.coop/data/boundaries.json'
|
||||
|
||||
#buckeye rec
|
||||
#https://outage.buckeyerec.coop/maps/OutageWebMap/maps/GWT.rpc
|
||||
#washington
|
||||
#https://weci.ebill.coop/woViewer/MapWiseWeb/GWT.rpc
|
||||
allcountyoutages = []
|
||||
|
||||
S = requests.Session()
|
||||
|
||||
|
||||
|
||||
|
||||
def fleming():
|
||||
logger.info("Starting fleming()")
|
||||
state = 'KY'
|
||||
company = 'FLEM'
|
||||
try:
|
||||
temp = S.get(flemingjson)
|
||||
temp.raise_for_status()
|
||||
tempdata = json.loads(temp.text)
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} boundaries from fleming")
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed for fleming: {e}")
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error for fleming: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in fleming: {e}")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def bigsandy():
|
||||
logger.info("Starting bigsandy()")
|
||||
state = 'OH'
|
||||
company = 'BS'
|
||||
try:
|
||||
temp = S.get('https://outagemap.bigsandyrecc.com/data/boundaries.json')
|
||||
temp.raise_for_status()
|
||||
tempdata = json.loads(temp.text)
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} boundaries from bigsandy")
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed for bigsandy: {e}")
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error for bigsandy: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in bigsandy: {e}")
|
||||
|
||||
|
||||
|
||||
|
||||
def southcentralpower():
|
||||
logger.info("Starting southcentralpower()")
|
||||
company = 'SCP'
|
||||
url = 'https://outage.southcentralpower.com/data/boundaries.json'
|
||||
Sp = requests.Session()
|
||||
# Sp.proxies.update(proxies)
|
||||
try:
|
||||
response = Sp.get(url)
|
||||
response.raise_for_status()
|
||||
tempdata = json.loads(response.text)
|
||||
state = 'OH'
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} boundaries from southcentralpower")
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed for southcentralpower: {e}")
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error for southcentralpower: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in southcentralpower: {e}")
|
||||
|
||||
#wv https://kubra.io/data/e2ae0326-9912-436a-9355-eb2687e798b1/public/reports/7929429f-635d-4761-b6c7-78f646cef3c2_report.json
|
||||
def ku_get_url():
|
||||
logger.info("Starting ku_get_url()")
|
||||
try:
|
||||
url = 'https://stormcenter.lge-ku.com/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40'
|
||||
r = requests.get(url)
|
||||
r.raise_for_status()
|
||||
x = re.search(r"instanceId: '(.*?)',", r.text)
|
||||
if not x:
|
||||
logger.error("Could not find instanceId in ku_get_url")
|
||||
return None
|
||||
urlcom = x.group(1)
|
||||
urlcom = 'https://kubra.io/stormcenter/api/v1/stormcenters/' + urlcom + '/views/a6cee9e4-312b-4b77-9913-2ae371eb860d/currentState?preview=false'
|
||||
stuff = S.get(urlcom)
|
||||
stuff.raise_for_status()
|
||||
jsonstuff = json.loads(stuff.text)
|
||||
interval_data = jsonstuff.get('data').get('interval_generation_data')
|
||||
if not interval_data:
|
||||
logger.error("Could not find interval_generation_data in ku_get_url")
|
||||
return None
|
||||
urlcom = 'https://kubra.io/' + interval_data + '/public/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40_report.json'
|
||||
logger.info("Successfully generated URL for ku")
|
||||
return urlcom
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed in ku_get_url: {e}")
|
||||
return None
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error in ku_get_url: {e}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in ku_get_url: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def county_json(meta,url,jsonname):
|
||||
logger.info(f"Starting county_json for meta: {meta}")
|
||||
try:
|
||||
response = S.get(meta)
|
||||
response.raise_for_status()
|
||||
metainfo = json.loads(response.text)
|
||||
metadir = metainfo['directory']
|
||||
url = url + metadir + jsonname
|
||||
outage = S.get(url)
|
||||
outage.raise_for_status()
|
||||
logger.info(f"Successfully fetched county JSON from {url}")
|
||||
return outage
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed in county_json: {e}")
|
||||
return None
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error in county_json: {e}")
|
||||
return None
|
||||
except KeyError as e:
|
||||
logger.error(f"Key error in county_json (missing 'directory'): {e}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in county_json: {e}")
|
||||
return None
|
||||
|
||||
|
||||
|
||||
def ku():
|
||||
ku = []
|
||||
url = ku_get_url()
|
||||
data = S.get(url).text
|
||||
tempdata = json.loads(data)
|
||||
temp = tempdata['file_data']['areas'][2]['areas'][0]['areas']
|
||||
temp1 = tempdata['file_data']['areas'][2]['areas'][1]['areas']
|
||||
temp2 = tempdata['file_data']['areas'][1]['areas'][0]['areas']
|
||||
|
||||
for i in temp:
|
||||
ku.append(i)
|
||||
for i in temp1:
|
||||
ku.append(i)
|
||||
for i in temp2:
|
||||
ku.append(i)
|
||||
for o in ku:
|
||||
outageinfo = o['cust_a']['val'],o['cust_s'],o['name'].capitalize(),o['state'],o['utility']
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def grayson():
|
||||
company = 'GRE'
|
||||
outage = S.get(graysoncounty)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
state = 'KY'
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
def aep_county_vawv(meta,url,jsonname):
|
||||
company = 'AEP'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
|
||||
tempdata = json.loads(outage.text)
|
||||
state = 'WV'
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][2]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name'), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
state = 'VA'
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][1]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
def aep_county_oh(meta,url,jsonname):
|
||||
company = 'AEP'
|
||||
state = 'OH'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
def aep_county_ky(meta,url,jsonname):
|
||||
company = 'AEP'
|
||||
state = 'KY'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
def firstenergy_county(meta,url,jsonname):
|
||||
company = 'FE'
|
||||
state = 'WV'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(),state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
def get_kubra_hexes(url):
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
bothhex = tempdata.get('data').get('cluster_interval_generation_data')
|
||||
hexes = bothhex.split('/')
|
||||
return hexes[2],hexes[3]
|
||||
|
||||
|
||||
|
||||
|
||||
def kubra_fe(baseurl1,baseurl2,meta):
|
||||
|
||||
hex2 = get_kubra_hexes(meta)
|
||||
url = baseurl1 + hex2[1] + baseurl2
|
||||
company = 'FE'
|
||||
state = 'WV'
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas']:
|
||||
if j.get('key') == "county":
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('name').capitalize(),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
def kubra_aep(baseurl1,baseurl2,meta,company='AEP'):
|
||||
|
||||
hex2 = get_kubra_hexes(meta)
|
||||
url = baseurl1 + hex2[1] + baseurl2
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
process_outage_data(tempdata,company)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def process_outage_data(data,company):
|
||||
"""
|
||||
Identifies the data structure and loops through the county-level data.
|
||||
|
||||
Args:
|
||||
data (dict): The parsed JSON data as a Python dictionary.
|
||||
"""
|
||||
# Navigate to the primary list of areas
|
||||
primary_areas = data.get("file_data", {}).get("areas", [])
|
||||
|
||||
# If the list is empty, there's nothing to process
|
||||
if not primary_areas:
|
||||
print("No 'areas' data found.")
|
||||
return
|
||||
|
||||
# --- This is the key logic to handle both formats ---
|
||||
# Check the key of the first item to determine the format
|
||||
first_item_key = primary_areas[0].get("key")
|
||||
|
||||
if first_item_key == "state":
|
||||
# Format 1: Loop through each state object
|
||||
for state_area in primary_areas:
|
||||
state_name = state_area.get("name", "Unknown State")
|
||||
# Get the nested list of counties for this state
|
||||
county_list = state_area.get("areas", [])
|
||||
for county in county_list:
|
||||
# We are now at the county level
|
||||
if county.get("key") == "county":
|
||||
outageinfo = county.get('cust_a').get('val'), county.get('cust_s'), county.get('name').capitalize(),county.get('state'),company
|
||||
allcountyoutages.append(outageinfo)
|
||||
elif first_item_key == "county":
|
||||
# Format 2: The primary list is already the county list
|
||||
for county in primary_areas:
|
||||
# We are now at the county level
|
||||
if county.get("key") == "county":
|
||||
outageinfo = county.get('cust_a').get('val'), county.get('cust_s'), county.get('name').capitalize(),county.get('state'),company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
else:
|
||||
print("Unknown data format. Could not find 'state' or 'county' key.")
|
||||
|
||||
|
||||
try:
|
||||
logger.info("Attempting kubra_fe for FE WV")
|
||||
kubra_fe('https://kubra.io/data/','/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json',wvfemeta)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in kubra_fe for FE WV: {e}")
|
||||
try:
|
||||
logger.info("Attempting kubra_aep for AEP WV")
|
||||
kubra_aep('https://kubra.io/data/','/public/reports/7929429f-635d-4761-b6c7-78f646cef3c2_report.json',aepwvkubrameta)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in kubra_aep for AEP WV: {e}")
|
||||
try:
|
||||
logger.info("Attempting kubra_aep for AEP OH")
|
||||
kubra_aep('https://kubra.io/data/','/public/reports/1bc6bd19-2315-4548-980a-6df73b93b355_report.json',aepohkubrameta)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in kubra_aep for AEP OH: {e}")
|
||||
try:
|
||||
logger.info("Attempting kubra_aep for AEP KY")
|
||||
kubra_aep('https://kubra.io/data/','/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json',aepkykubrameta)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in kubra_aep for AEP KY: {e}")
|
||||
|
||||
|
||||
try:
|
||||
logger.info("Attempting grayson")
|
||||
grayson()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in grayson: {e}")
|
||||
try:
|
||||
logger.info("Attempting ku")
|
||||
ku()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in ku: {e}")
|
||||
try:
|
||||
logger.info("Attempting southcentralpower")
|
||||
southcentralpower()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in southcentralpower: {e}")
|
||||
try:
|
||||
logger.info("Attempting bigsandy")
|
||||
bigsandy()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in bigsandy: {e}")
|
||||
try:
|
||||
logger.info("Attempting fleming")
|
||||
fleming()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in fleming: {e}")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
#for i in allcountyoutages:
|
||||
# sql = 'insert into countyoutages (outages, served, county, state, update, company) values (%s, %s, %s, %s, %s, %s)'
|
||||
# val = (i[0], i[1], i[2], i[3], current_timestamp, i[4])
|
||||
# cursor.execute(sql,val)
|
||||
#conn.commit()
|
||||
|
||||
all_values = []
|
||||
for i in allcountyoutages:
|
||||
# Make sure the order matches the SQL placeholders
|
||||
val = (i[0], i[1], i[2], i[3], current_timestamp, i[4])
|
||||
all_values.append(val)
|
||||
|
||||
# 2. Define the SQL statement ONCE
|
||||
sql = 'INSERT INTO countyoutages (outages, served, county, state, update, company) VALUES (%s, %s, %s, %s, %s, %s)'
|
||||
|
||||
# 3. Execute the command ONCE with all the data
|
||||
if all_values: # Only execute if there's data to insert
|
||||
try:
|
||||
cursor.executemany(sql, all_values)
|
||||
conn.commit() # Commit after successful execution
|
||||
logger.info(f"Successfully inserted {len(all_values)} records into the database")
|
||||
except Exception as e:
|
||||
logger.error(f"Database error during insert: {e}")
|
||||
conn.rollback() # Rollback in case of error
|
||||
else:
|
||||
logger.warning("No data to insert into the database")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
cursor.execute('update countyoutages set cwa = county.cwa from county where county.countyname = countyoutages.county and county.state = countyoutages.state and countyoutages.cwa is null')
|
||||
|
||||
conn.commit()
|
||||
|
||||
#cursor.execute("delete from countyoutages where cwa != 'RLX'")
|
||||
cursor.execute("delete from countyoutages where cwa is null")
|
||||
#cursor.execute("delete from countyoutages where update < now () - interval '365 days'")
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#print(allcountyoutages)
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
967
powerapi.php
Normal file
967
powerapi.php
Normal file
@@ -0,0 +1,967 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
//$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
// or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
try {
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws");
|
||||
if ($dbconn === false) {
|
||||
throw new Exception('Could not connect: ' . pg_last_error());
|
||||
}
|
||||
} catch (Exception $e) {
|
||||
http_response_code(500);
|
||||
die('Database connection failed: ' . $e->getMessage());
|
||||
}
|
||||
|
||||
//no gets, curent point outage info
|
||||
//if(empty($_GET)) {
|
||||
//$result = pg_query_params($dbconn,
|
||||
//"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause,'area_geometry', ST_AsGeoJSON(COALESCE(realareageom, realgeom))::json))order by startguess asc)) FROM power WHERE cwa = $1 and active = true",
|
||||
//array('RLX')) or die('Query failed: ' . pg_last_error());
|
||||
//$resultArray = pg_fetch_all($result);
|
||||
//echo($resultArray[0]['json_build_object']);
|
||||
//pg_free_result($result);
|
||||
//}
|
||||
|
||||
|
||||
if (empty($_GET)) {
|
||||
try {
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(realgeom)::json,
|
||||
'properties', json_build_object(
|
||||
'time', startguess,
|
||||
'county', county,
|
||||
'state', state,
|
||||
'outage', outagen,
|
||||
'lastchange', lastchange,
|
||||
'cause', cause,
|
||||
'area_geometry', ST_AsGeoJSON(COALESCE(realareageom, realgeom))::json
|
||||
)
|
||||
)
|
||||
ORDER BY startguess ASC
|
||||
)
|
||||
)
|
||||
FROM power
|
||||
WHERE cwa = $1 AND active = true
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, array('RLX'));
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
// Check if we got results
|
||||
if ($resultArray && isset($resultArray[0]['json_build_object'])) {
|
||||
header('Content-Type: application/json');
|
||||
echo $resultArray[0]['json_build_object'];
|
||||
} else {
|
||||
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
http_response_code(500);
|
||||
die('Query execution failed: ' . $e->getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
//if (isset($_GET['states'])) {
|
||||
//$result = pg_query($dbconn,
|
||||
//"SELECT jsonb_build_object('type', 'FeatureCollection','features', jsonb_agg(features.feature)) FROM (SELECT jsonb_build_object('type', 'Feature','geometry', ST_AsGeoJSON(ST_Transform(geom, 4326))::jsonb,'properties', to_jsonb(properties) - 'geom') AS feature FROM (SELECT * FROM states where state = 'WV' or state = 'VA' or state = 'KY' or state ='VA' or state = 'MD' or state = 'PA' or state = 'OH') AS properties) AS features") or die('Query failed: ' . pg_last_error());
|
||||
// $resultArray = pg_fetch_all($result);
|
||||
//echo($resultArray[0]['jsonb_build_object']);
|
||||
//pg_free_result($result);
|
||||
//}
|
||||
|
||||
if (isset($_GET['states'])) {
|
||||
try {
|
||||
$query = "
|
||||
SELECT jsonb_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', jsonb_agg(features.feature)
|
||||
)
|
||||
FROM (
|
||||
SELECT jsonb_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(ST_Transform(geom, 4326))::jsonb,
|
||||
'properties', to_jsonb(properties) - 'geom'
|
||||
) AS feature
|
||||
FROM (
|
||||
SELECT *
|
||||
FROM states
|
||||
WHERE state IN ('WV', 'VA', 'KY', 'MD', 'PA', 'OH')
|
||||
) AS properties
|
||||
) AS features
|
||||
";
|
||||
|
||||
$result = pg_query($dbconn, $query);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
// Set proper JSON header and handle output
|
||||
header('Content-Type: application/json');
|
||||
if ($resultArray && isset($resultArray[0]['jsonb_build_object'])) {
|
||||
echo $resultArray[0]['jsonb_build_object'];
|
||||
} else {
|
||||
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
http_response_code(500);
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
//county/state max
|
||||
//if($_GET['max'] ?? null) {
|
||||
|
||||
//if($_GET['start'] ?? null) {
|
||||
//$starttime = pg_escape_string($_GET['start']);
|
||||
//if($_GET['end'] ?? null) {
|
||||
//$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
|
||||
//$result = pg_query_params($dbconn,
|
||||
////select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > '2023-04-01' and update < '2023-04-02' and cwa = 'RLX' group by county,state,update) as potato group by county,state;
|
||||
////"select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update) as potato group by county,state",
|
||||
//"select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update) as potato group by county,state",
|
||||
//array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
//
|
||||
//while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
// $array[] = $line;
|
||||
//}
|
||||
//echo json_encode($array);
|
||||
//pg_free_result($result);
|
||||
//}}}
|
||||
|
||||
if (isset($_GET['max'])) {
|
||||
if (isset($_GET['start']) && isset($_GET['end'])) {
|
||||
try {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
$query = "
|
||||
SELECT DISTINCT ON (county, state)
|
||||
max(outage) as max_outage,
|
||||
county,
|
||||
state
|
||||
FROM (
|
||||
SELECT DISTINCT ON (county, state, update)
|
||||
county,
|
||||
state,
|
||||
SUM(outages) as outage,
|
||||
update as time,
|
||||
SUM(served) as served
|
||||
FROM countyoutages
|
||||
WHERE update > $2
|
||||
AND update < $3
|
||||
AND cwa = $1
|
||||
GROUP BY county, state, update
|
||||
) as subquery
|
||||
GROUP BY county, state
|
||||
";
|
||||
|
||||
$result = pg_query_params(
|
||||
$dbconn,
|
||||
$query,
|
||||
['RLX', $starttime, $endtime]
|
||||
);
|
||||
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
} else {
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(400);
|
||||
echo json_encode(['error' => 'Both start and end parameters are required']);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//county current
|
||||
//"SELECT distinct on (county,state) update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 order by county,state,update desc",
|
||||
//if($_GET['county'] ?? null) {
|
||||
//$result = pg_query_params($dbconn,
|
||||
//"SELECT DISTINCT ON (county, state) county, state, SUM(outages) as outage, update as time, SUM(served) as served, round((SUM(outages) / SUM(served))*100,2) as perout FROM countyoutages WHERE update = (SELECT MAX(update) FROM countyoutages) AND cwa = $1 GROUP BY county, state, update",
|
||||
//array('RLX')) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
//while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
//$array[] = $line;
|
||||
//}
|
||||
//echo json_encode($array ?? null);
|
||||
//pg_free_result($result);
|
||||
//}
|
||||
|
||||
if (isset($_GET['county'])) {
|
||||
try {
|
||||
$query = "
|
||||
SELECT DISTINCT ON (county, state)
|
||||
county,
|
||||
state,
|
||||
SUM(outages) as outage,
|
||||
update as time,
|
||||
SUM(served) as served,
|
||||
ROUND(CAST((SUM(outages)::FLOAT / SUM(served)) * 100 AS NUMERIC), 2) as perout
|
||||
FROM countyoutages
|
||||
WHERE update = (SELECT MAX(update) FROM countyoutages)
|
||||
AND cwa = $1
|
||||
GROUP BY county, state, update
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, ['RLX']);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//county archive delete after testing
|
||||
if($_GET['countyarchiveold'] ?? null) {
|
||||
|
||||
if($_GET['start'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end'] ?? null) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
|
||||
"select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update",
|
||||
array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}}
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['countyarchive'])) {
|
||||
if (isset($_GET['start']) && isset($_GET['end'])) {
|
||||
try {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
$query = "
|
||||
SELECT DISTINCT ON (county, state, update)
|
||||
county,
|
||||
state,
|
||||
SUM(outages) as outage,
|
||||
update as time,
|
||||
SUM(served) as served
|
||||
FROM countyoutages
|
||||
WHERE update > $2
|
||||
AND update < $3
|
||||
AND cwa = $1
|
||||
GROUP BY county, state, update
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, ['RLX', $starttime, $endtime]);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]);
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
exit;
|
||||
}
|
||||
} else {
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(400);
|
||||
echo json_encode(['error' => 'Both start and end parameters are required']);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
//Archive point data
|
||||
if($_GET['archivepointold'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and startguess > $2 and lastchange < $3",
|
||||
array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
pg_free_result($result);
|
||||
|
||||
}
|
||||
|
||||
if (isset($_GET['archivepoint'])) {
|
||||
try {
|
||||
if (!isset($_GET['start']) || !isset($_GET['end'])) {
|
||||
throw new Exception('Both start and end parameters are required');
|
||||
}
|
||||
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(realgeom)::json,
|
||||
'properties', json_build_object(
|
||||
'time', startguess,
|
||||
'county', county,
|
||||
'state', state,
|
||||
'outage', outagen,
|
||||
'lastchange', lastchange,
|
||||
'cause', cause
|
||||
)
|
||||
)
|
||||
ORDER BY startguess ASC
|
||||
)
|
||||
)
|
||||
FROM power
|
||||
WHERE cwa = $1
|
||||
AND startguess > $2
|
||||
AND lastchange < $3
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, ['RLX', $starttime, $endtime]);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
header('Content-Type: application/json');
|
||||
if ($resultArray && isset($resultArray[0]['json_build_object'])) {
|
||||
echo $resultArray[0]['json_build_object'];
|
||||
} else {
|
||||
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
header('Content-Type: application/json');
|
||||
$statusCode = strpos($e->getMessage(), 'required') !== false ? 400 : 500;
|
||||
http_response_code($statusCode);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//if($_GET['svr']=='current') {
|
||||
//$result = pg_query_params($dbconn,
|
||||
//"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()"
|
||||
//,array('2023-01-01 01:00','2023-02-12 10:00')) or die('Query failed: ' . pg_last_error());
|
||||
//$resultArray = pg_fetch_all($result);
|
||||
//echo($resultArray[0]['json_build_object']);
|
||||
//}
|
||||
|
||||
if(@$_GET['svr'] =='current') {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
if(@$_GET['svr'] == 'archiveold') {
|
||||
if($_GET['start'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end'] ?? null) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue > $1 and endtime < $2"
|
||||
,array($starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
//echo '<pre>'; print_r($resultAarray); echo '</pre>';
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if(!isset($_GET['start']) && !isset($_GET['end'])) {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() - interval '24 hours' and endtime > now() - interval '24 hours'") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
|
||||
}
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['svr']) && $_GET['svr'] === 'archive') {
|
||||
try {
|
||||
$result = null;
|
||||
|
||||
if (isset($_GET['start']) && isset($_GET['end'])) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(nwspoly)::json,
|
||||
'properties', json_build_object(
|
||||
'issue', issue,
|
||||
'end', endtime,
|
||||
'vtec', vtec,
|
||||
'type', warntype
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
FROM svr
|
||||
WHERE issue > $1
|
||||
AND endtime < $2
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, [$starttime, $endtime]);
|
||||
} elseif (!isset($_GET['start']) && !isset($_GET['end'])) {
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(nwspoly)::json,
|
||||
'properties', json_build_object(
|
||||
'issue', issue,
|
||||
'end', endtime,
|
||||
'vtec', vtec,
|
||||
'type', warntype
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
FROM svr
|
||||
WHERE issue < NOW() - INTERVAL '24 hours'
|
||||
AND endtime > NOW() - INTERVAL '24 hours'
|
||||
";
|
||||
|
||||
$result = pg_query($dbconn, $query);
|
||||
} else {
|
||||
throw new Exception('Both start and end parameters are required together');
|
||||
}
|
||||
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
header('Content-Type: application/json');
|
||||
if ($resultArray && isset($resultArray[0]['json_build_object'])) {
|
||||
echo $resultArray[0]['json_build_object'];
|
||||
} else {
|
||||
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
$statusCode = strpos($e->getMessage(), 'required') !== false ? 400 : 500;
|
||||
http_response_code($statusCode);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
if($_GET['svrpolysold'] ?? null) {
|
||||
$query = "select vtec,outagesvalid,polygonpop,outagesbuffer,lsrids from svr where EXTRACT(EPOCH FROM (current_timestamp - endtime ))/60/60/24 < 60";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['svrpolys'])) {
|
||||
try {
|
||||
$query = "
|
||||
SELECT
|
||||
vtec,
|
||||
outagesvalid,
|
||||
polygonpop,
|
||||
outagesbuffer,
|
||||
lsrids
|
||||
FROM svr
|
||||
WHERE EXTRACT(EPOCH FROM (CURRENT_TIMESTAMP - endtime)) / 60 / 60 / 24 < 60
|
||||
";
|
||||
|
||||
$result = pg_query($dbconn, $query);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (isset($_GET['poweridsold'])) {
|
||||
$powerids = $_GET['powerids'];
|
||||
|
||||
// Convert the comma-separated string to an array
|
||||
$poweridArray = explode(',', $powerids);
|
||||
|
||||
// Sanitize and prepare array values for SQL query
|
||||
$sanitizedIds = array_map('intval', $poweridArray);
|
||||
|
||||
// Prepare placeholders for the query
|
||||
$placeholders = implode(',', array_map(function($i) { return '$' . $i; }, range(1, count($sanitizedIds))));
|
||||
|
||||
// Set up your database connection here
|
||||
|
||||
|
||||
// Prepare and execute the query with pg_query_params
|
||||
$sql = "SELECT lat,lon,lastchange,startguess,peakoutage,cause,lsrtime,lsrref,(lsrtime AT TIME ZONE 'America/New_York')::timestamp as lsrlocal FROM power WHERE id IN ($placeholders)";
|
||||
$result = pg_query_params($dbconn, $sql, $sanitizedIds);
|
||||
|
||||
if (!$result) {
|
||||
echo 'Query failed: ' . pg_last_error();
|
||||
exit;
|
||||
}
|
||||
|
||||
// Fetch and output the results
|
||||
$results = pg_fetch_all($result);
|
||||
echo json_encode($results);
|
||||
|
||||
// Free resultset
|
||||
|
||||
// Close the connection
|
||||
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
|
||||
if (isset($_GET['powerids'])) {
|
||||
try {
|
||||
$powerids = $_GET['powerids'];
|
||||
|
||||
// Validate input exists and isn't empty
|
||||
if (empty($powerids)) {
|
||||
throw new Exception('No power IDs provided');
|
||||
}
|
||||
|
||||
// Convert comma-separated string to array and sanitize
|
||||
$poweridArray = explode(',', $powerids);
|
||||
$sanitizedIds = array_filter(array_map('intval', $poweridArray));
|
||||
|
||||
if (empty($sanitizedIds)) {
|
||||
throw new Exception('Invalid power ID format');
|
||||
}
|
||||
|
||||
// Prepare placeholders for the query
|
||||
$placeholders = implode(',', array_map(function($i) { return '$' . $i; }, range(1, count($sanitizedIds))));
|
||||
|
||||
$query = "
|
||||
SELECT
|
||||
lat,
|
||||
lon,
|
||||
lastchange,
|
||||
startguess,
|
||||
peakoutage,
|
||||
cause,
|
||||
lsrtime,
|
||||
lsrref,
|
||||
(lsrtime AT TIME ZONE 'America/New_York')::timestamp as lsrlocal
|
||||
FROM power
|
||||
WHERE id IN ($placeholders)
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, $sanitizedIds);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = pg_fetch_all($result) ?: [];
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
$statusCode = strpos($e->getMessage(), 'Invalid') !== false ? 400 : 500;
|
||||
http_response_code($statusCode);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
if (isset($_GET['poweridsgeojsonold'])) {
|
||||
$powerids = $_GET['poweridsgeojson'];
|
||||
|
||||
$poweridArray = explode(',', $powerids);
|
||||
|
||||
$sanitizedIds = array_map('intval', $poweridArray);
|
||||
|
||||
$placeholders = implode(',', array_map(function($i) { return '$' . $i; }, range(1, count($sanitizedIds))));
|
||||
|
||||
$sql = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(realgeom)::json,
|
||||
'properties', json_build_object(
|
||||
'id', id,
|
||||
'time', (startguess AT TIME ZONE 'UTC')::timestamp,
|
||||
'county', county,
|
||||
'state', state,
|
||||
'cause', cause,
|
||||
'outage', peakoutage,
|
||||
'lsrtime', (lsrtime AT TIME ZONE 'UTC')::timestamp
|
||||
)
|
||||
) ORDER BY startguess ASC
|
||||
)
|
||||
)
|
||||
FROM power
|
||||
WHERE id IN ($placeholders);";
|
||||
|
||||
// $sql = "SELECT lat,lon,lastchange,startguess,peakoutage,cause,lsrtime,lsrref,(lsrtime AT TIME ZONE 'America/New_York')::timestamp as lsrlocal FROM power WHERE id IN ($placeholders)";
|
||||
$result = pg_query_params($dbconn, $sql, $sanitizedIds);
|
||||
|
||||
if (!$result) {
|
||||
echo 'Query failed: ' . pg_last_error();
|
||||
exit;
|
||||
}
|
||||
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
// Output the JSON object
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
|
||||
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['poweridsgeojson'])) {
|
||||
try {
|
||||
$powerids = $_GET['poweridsgeojson'];
|
||||
|
||||
if (empty($powerids)) {
|
||||
throw new Exception('No power IDs provided');
|
||||
}
|
||||
|
||||
// Convert and sanitize power IDs
|
||||
$poweridArray = explode(',', $powerids);
|
||||
$sanitizedIds = array_filter(array_map('intval', $poweridArray));
|
||||
|
||||
if (empty($sanitizedIds)) {
|
||||
throw new Exception('Invalid power ID format');
|
||||
}
|
||||
|
||||
// Prepare placeholders
|
||||
$placeholders = implode(',', array_map(function($i) { return '$' . $i; }, range(1, count($sanitizedIds))));
|
||||
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(realgeom)::json,
|
||||
'properties', json_build_object(
|
||||
'id', id,
|
||||
'time', (startguess AT TIME ZONE 'UTC')::timestamp,
|
||||
'county', county,
|
||||
'state', state,
|
||||
'cause', cause,
|
||||
'outage', peakoutage,
|
||||
'lsrtime', (lsrtime AT TIME ZONE 'UTC')::timestamp
|
||||
)
|
||||
) ORDER BY startguess ASC
|
||||
)
|
||||
)
|
||||
FROM power
|
||||
WHERE id IN ($placeholders)
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, $sanitizedIds);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
header('Content-Type: application/json');
|
||||
if ($resultArray && isset($resultArray[0]['json_build_object'])) {
|
||||
echo $resultArray[0]['json_build_object'];
|
||||
} else {
|
||||
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
$statusCode = strpos($e->getMessage(), 'Invalid') !== false ? 400 : 500;
|
||||
http_response_code($statusCode);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// Assume $dbconn is your established PostgreSQL connection handle
|
||||
// Example: $dbconn = pg_connect("host=localhost dbname=yourdb user=youruser password=yourpass");
|
||||
// if (!$dbconn) { die("Connection failed"); }
|
||||
|
||||
if (isset($_GET['polygongeojson'])) {
|
||||
$result = null; // Initialize result to null for catch block safety
|
||||
try {
|
||||
$polygonGeoJsonString = $_GET['polygongeojson'];
|
||||
|
||||
if (empty($polygonGeoJsonString)) {
|
||||
throw new Exception('No GeoJSON polygon provided', 400); // Use exception code for status
|
||||
}
|
||||
|
||||
// 1. Validate if the input is valid JSON
|
||||
// We decode here primarily to check JSON validity.
|
||||
// We'll pass the *original string* to PostGIS's ST_GeomFromGeoJSON for robustness.
|
||||
$polygonGeoJson = json_decode($polygonGeoJsonString);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) {
|
||||
throw new Exception('Invalid JSON format: ' . json_last_error_msg(), 400);
|
||||
}
|
||||
|
||||
// 2. Optional: Basic structural validation (can rely on PostGIS for full validation)
|
||||
if (!is_object($polygonGeoJson) || !isset($polygonGeoJson->type) || !in_array($polygonGeoJson->type, ['MultiPolygon', 'Polygon'])) {
|
||||
// Allow both Polygon and MultiPolygon for flexibility? Or stick to MultiPolygon?
|
||||
// Let's allow Polygon too, as ST_Within works with both.
|
||||
// If you strictly need *only* MultiPolygon, change the check.
|
||||
throw new Exception('Input GeoJSON must be of type Polygon or MultiPolygon.', 400);
|
||||
}
|
||||
if (!isset($polygonGeoJson->coordinates) || !is_array($polygonGeoJson->coordinates)) {
|
||||
throw new Exception('Input GeoJSON must have a coordinates array.', 400);
|
||||
}
|
||||
|
||||
// 3. Prepare the PostgreSQL Query using PostGIS functions
|
||||
// - ST_GeomFromGeoJSON($1): Parses the input GeoJSON string.
|
||||
// - ST_SetSRID(..., 4326): Assigns the WGS84 SRID (standard for GeoJSON). Adjust if your data uses a different SRID.
|
||||
// - ST_Within(realgeom, ...): Checks if the power outage geometry is within the provided polygon geometry.
|
||||
// - Ensure your 'realgeom' column has a spatial index for performance!
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(realgeom)::json,
|
||||
'properties', json_build_object(
|
||||
'id', id,
|
||||
'time', (startguess AT TIME ZONE 'UTC')::timestamp,
|
||||
'county', county,
|
||||
'state', state,
|
||||
'cause', cause,
|
||||
'outage', peakoutage,
|
||||
'lsrtime', (lsrtime AT TIME ZONE 'UTC')::timestamp
|
||||
)
|
||||
) ORDER BY startguess ASC -- Optional ordering
|
||||
)
|
||||
)
|
||||
FROM power
|
||||
WHERE ST_Within(realgeom, ST_SetSRID(ST_GeomFromGeoJSON($1), 4326))
|
||||
";
|
||||
// Note: If 'realgeom' might be NULL, you might add "AND realgeom IS NOT NULL"
|
||||
|
||||
// 4. Execute the query with the GeoJSON string as a parameter
|
||||
$params = [$polygonGeoJsonString];
|
||||
$result = pg_query_params($dbconn, $query, $params);
|
||||
|
||||
if ($result === false) {
|
||||
// Check for specific PostGIS errors related to invalid GeoJSON input
|
||||
$pgError = pg_last_error($dbconn);
|
||||
if (strpos($pgError, 'invalid GeoJSON representation') !== false || strpos($pgError, 'ParseException') !== false || strpos($pgError, 'Invalid polygon') !== false) {
|
||||
throw new Exception('Invalid GeoJSON geometry data provided: ' . $pgError, 400);
|
||||
} else {
|
||||
// Throw a generic server error for other query failures
|
||||
throw new Exception('Query failed: ' . $pgError, 500);
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Fetch and Output Results
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
header('Content-Type: application/json');
|
||||
if ($resultArray && isset($resultArray[0]['json_build_object'])) {
|
||||
// Ensure null result from json_agg (no features found) returns empty array
|
||||
$outputJson = $resultArray[0]['json_build_object'];
|
||||
$outputData = json_decode($outputJson, true);
|
||||
if (isset($outputData['features']) && $outputData['features'] === null) {
|
||||
$outputData['features'] = [];
|
||||
echo json_encode($outputData);
|
||||
} else {
|
||||
echo $outputJson; // Output the JSON directly from Postgres
|
||||
}
|
||||
} else {
|
||||
// Should ideally be handled by the check above, but as a fallback
|
||||
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
|
||||
} catch (Exception $e) {
|
||||
// 6. Error Handling
|
||||
if (isset($result) && is_resource($result)) { // Check if $result is a valid resource before freeing
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
// Use exception code for status if provided (>=400), default to 500
|
||||
$statusCode = ($e->getCode() >= 400 && $e->getCode() < 600) ? $e->getCode() : 500;
|
||||
http_response_code($statusCode);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit; // Stop script execution after error
|
||||
}
|
||||
}
|
||||
|
||||
// Add else block if needed for when the parameter is not set
|
||||
// else {
|
||||
// // Handle case where $_GET['polygongeojson'] is not present
|
||||
// header('Content-Type: application/json');
|
||||
// http_response_code(400); // Bad Request
|
||||
// echo json_encode(['error' => 'Required parameter "polygongeojson" is missing.']);
|
||||
// exit;
|
||||
// }
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
172
powerapitest.php
Normal file
172
powerapitest.php
Normal file
@@ -0,0 +1,172 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
//no gets, curent point outage info
|
||||
if(empty($_GET)) {
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and active = true",
|
||||
array('RLX')) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
|
||||
|
||||
//county/state max
|
||||
if($_GET['max'] ?? null) {
|
||||
|
||||
if($_GET['start'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end'] ?? null) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
//select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > '2023-04-01' and update < '2023-04-02' and cwa = 'RLX' group by county,state,update) as potato group by county,state;
|
||||
"select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update) as potato group by county,state",
|
||||
array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}}}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (isset($_GET['county'])) {
|
||||
try {
|
||||
$query = "
|
||||
SELECT DISTINCT ON (county, state)
|
||||
county,
|
||||
state,
|
||||
SUM(outages) as outage,
|
||||
update as time,
|
||||
SUM(served) as served,
|
||||
ROUND(
|
||||
CAST(
|
||||
CASE
|
||||
WHEN SUM(served) = 0 THEN NULL
|
||||
ELSE (SUM(outages)::FLOAT / SUM(served)) * 100
|
||||
END AS NUMERIC
|
||||
), 2
|
||||
) as perout
|
||||
FROM countyoutages
|
||||
WHERE update = (SELECT MAX(update) FROM countyoutages)
|
||||
AND (cwa = $1 OR cwa = $2 OR cwa = $3 OR cwa = $4 OR cwa = $5 OR cwa = $6 OR cwa = $7)
|
||||
GROUP BY county, state, update
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, ['RLX','JKL','ILN','PBZ','MRX','LWX','RNK']);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
//county archive
|
||||
if($_GET['countyarchive'] ?? null) {
|
||||
|
||||
if($_GET['start'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end'] ?? null) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
//"SELECT county,state, update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 and update > $2 and update < $3 order by update asc",
|
||||
"select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $9 and update < $10 and (cwa = $1 or cwa = $2 or cwa = $3 or cwa = $4 or cwa = $5 or cwa = $6 or cwa = $7 or cwa = $8) group by county,state,update",
|
||||
array('RLX','JKL','ILN','PBZ','MRX','LWX','RNK','CTP',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}}}
|
||||
|
||||
|
||||
//Archive point data
|
||||
if($_GET['archivepoint'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and startguess > $2 and lastchange < $3",
|
||||
array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
//if($_GET['svr']=='current') {
|
||||
//$result = pg_query_params($dbconn,
|
||||
//"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()"
|
||||
//,array('2023-01-01 01:00','2023-02-12 10:00')) or die('Query failed: ' . pg_last_error());
|
||||
//$resultArray = pg_fetch_all($result);
|
||||
//echo($resultArray[0]['json_build_object']);
|
||||
//}
|
||||
|
||||
if($_GET['svr'] ?? null =='current') {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
if($_GET['svr'] ?? null == 'archive') {
|
||||
if($_GET['start'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end'] ?? null) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue > $1 and endtime < $2"
|
||||
,array($starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
}
|
||||
|
||||
if(!isset($_GET['start']) && !isset($_GET['end'])) {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() - interval '24 hours' and endtime > now() - interval '24 hours'") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
593
powercounty.py
Normal file
593
powercounty.py
Normal file
@@ -0,0 +1,593 @@
|
||||
# powercounty.py
|
||||
|
||||
import logging
|
||||
import requests
|
||||
import json
|
||||
import psycopg2
|
||||
from datetime import datetime
|
||||
import re
|
||||
from collections import defaultdict
|
||||
import threading
|
||||
|
||||
|
||||
# Set up logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||
handlers=[
|
||||
logging.FileHandler('powercounty.log'),
|
||||
logging.StreamHandler()
|
||||
]
|
||||
)
|
||||
|
||||
# Set up a logger for this module
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Database connection parameters
|
||||
DB_PARAMS = {
|
||||
'host': 'localhost',
|
||||
'database': 'nws',
|
||||
'user': 'nws',
|
||||
'password': 'nws'
|
||||
}
|
||||
|
||||
# Set up a requests session
|
||||
S = requests.Session()
|
||||
|
||||
# Power company metadata and URLs (from power3.py)
|
||||
AEP_OH_META = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
AEP_WV_META = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
AEP_KY_META = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json'
|
||||
WV_FE_META = 'https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false'
|
||||
AEP_WV_KUBRA_META = "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false"
|
||||
AEP_OH_KUBRA_META = 'https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false'
|
||||
AEP_KY_KUBRA_META = 'https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false'
|
||||
|
||||
AEP_WV_BASE = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
AEP_OH_BASE = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
AEP_KY_BASE = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/'
|
||||
GRAYSON_COUNTY = 'https://outages.graysonrecc.com/data/boundaries.json'
|
||||
|
||||
# Additional URLs from power3.py
|
||||
flemingjson = 'https://outage.fme.coop/data/boundaries.json'
|
||||
bigsandy_url = 'http://outagemap.bigsandyrecc.com/data/boundaries.json'
|
||||
southcentralpower_url = 'https://outage.southcentralpower.com/data/boundaries.json'
|
||||
|
||||
# Global list to collect all outage data
|
||||
allcountyoutages = []
|
||||
|
||||
|
||||
# This function will try to get a URL and log any errors
|
||||
def safe_request(url, description="Fetching data"):
|
||||
try:
|
||||
logger.info(f"{description}: {url}")
|
||||
response = S.get(url)
|
||||
response.raise_for_status() # Raise an exception for bad status codes
|
||||
logger.info(f"Successfully fetched data from {url}")
|
||||
return response
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Failed to {description} from {url}: {e}")
|
||||
return None
|
||||
|
||||
# This function will parse a JSON response and log errors
|
||||
def safe_json_load(response, description="Parsing JSON"):
|
||||
try:
|
||||
logger.info(f"{description}")
|
||||
data = json.loads(response.text)
|
||||
logger.info("Successfully parsed JSON data")
|
||||
return data
|
||||
except (json.JSONDecodeError, AttributeError) as e:
|
||||
logger.error(f"Failed to {description}: {e}")
|
||||
return None
|
||||
|
||||
# Ported functions from power3.py with enhanced logging
|
||||
|
||||
def fleming():
|
||||
"""Fetch outage data for Fleming County, KY"""
|
||||
logger.info("Fetching Fleming County outage data")
|
||||
state = 'KY'
|
||||
company = 'FLEM'
|
||||
temp = safe_request(flemingjson, "fetching Fleming data")
|
||||
if temp is None:
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(temp, "parsing Fleming JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
try:
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'), j.get('customersServed'), j.get('name'), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} Fleming County boundaries")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing Fleming County data: {e}")
|
||||
|
||||
def bigsandy():
|
||||
"""Fetch outage data for Big Sandy RECC"""
|
||||
logger.info("Fetching Big Sandy RECC outage data")
|
||||
state = 'OH'
|
||||
company = 'BS'
|
||||
temp = safe_request(bigsandy_url, "fetching Big Sandy data")
|
||||
if temp is None:
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(temp, "parsing Big Sandy JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
try:
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'), j.get('customersServed'), j.get('name'), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} Big Sandy boundaries")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing Big Sandy data: {e}")
|
||||
|
||||
def southcentralpower():
|
||||
"""Fetch outage data for South Central Power"""
|
||||
logger.info("Fetching South Central Power outage data")
|
||||
company = 'SCP'
|
||||
url = southcentralpower_url
|
||||
temp = safe_request(url, "fetching South Central Power data")
|
||||
if temp is None:
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(temp, "parsing South Central Power JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
state = 'OH'
|
||||
try:
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'), j.get('customersServed'), j.get('name'), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} South Central Power boundaries")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing South Central Power data: {e}")
|
||||
|
||||
def ku_get_url():
|
||||
"""Get KU outage data URL"""
|
||||
logger.info("Getting KU outage data URL")
|
||||
url = 'https://stormcenter.lge-ku.com/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40'
|
||||
r = safe_request(url, "fetching KU report page")
|
||||
if r is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
x = re.search(r"instanceId: '(.*?)',", r.text)
|
||||
if not x:
|
||||
logger.error("Could not extract instanceId from KU report page")
|
||||
return None
|
||||
|
||||
urlcom = x.group(1)
|
||||
urlcom = 'https://kubra.io/stormcenter/api/v1/stormcenters/' + urlcom + '/views/a6cee9e4-312b-4b77-9913-2ae371eb860d/currentState?preview=false'
|
||||
stuff = safe_request(urlcom, "fetching KU stormcenter data")
|
||||
if stuff is None:
|
||||
return None
|
||||
|
||||
jsonstuff = safe_json_load(stuff, "parsing KU stormcenter JSON")
|
||||
if jsonstuff is None:
|
||||
return None
|
||||
|
||||
interval_data = jsonstuff.get('data').get('interval_generation_data')
|
||||
urlcom = 'https://kubra.io/' + interval_data + '/public/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40_report.json'
|
||||
logger.info(f"Successfully constructed KU data URL: {urlcom}")
|
||||
return urlcom
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting KU URL: {e}")
|
||||
return None
|
||||
|
||||
def county_json(meta, url, jsonname):
|
||||
"""Generic function to get county JSON data"""
|
||||
metainfo_response = safe_request(meta, "fetching metadata for county JSON")
|
||||
if metainfo_response is None:
|
||||
return None
|
||||
|
||||
metainfo = safe_json_load(metainfo_response, "parsing metadata for county JSON")
|
||||
if metainfo is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
metadir = metainfo['directory']
|
||||
url = url + metadir + jsonname
|
||||
outage_response = safe_request(url, "fetching county JSON data")
|
||||
return outage_response
|
||||
except KeyError as e:
|
||||
logger.error(f"Error accessing metadata directory: {e}")
|
||||
return None
|
||||
|
||||
def ku():
|
||||
"""Fetch KU outage data"""
|
||||
logger.info("Fetching KU outage data")
|
||||
ku_list = []
|
||||
url = ku_get_url()
|
||||
if url is None:
|
||||
return
|
||||
|
||||
data_response = safe_request(url, "fetching KU data")
|
||||
if data_response is None:
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(data_response, "parsing KU data JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
try:
|
||||
temp = tempdata['file_data']['areas'][2]['areas'][0]['areas']
|
||||
temp1 = tempdata['file_data']['areas'][2]['areas'][1]['areas']
|
||||
temp2 = tempdata['file_data']['areas'][1]['areas'][0]['areas']
|
||||
|
||||
for i in temp:
|
||||
ku_list.append(i)
|
||||
for i in temp1:
|
||||
ku_list.append(i)
|
||||
for i in temp2:
|
||||
ku_list.append(i)
|
||||
|
||||
for o in ku_list:
|
||||
outageinfo = o['cust_a']['val'], o['cust_s'], o['name'].capitalize(), o['state'], o['utility']
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
logger.info(f"Successfully processed {len(ku_list)} KU outage records")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing KU data: {e}")
|
||||
|
||||
def grayson():
|
||||
"""Fetch Grayson County outage data"""
|
||||
logger.info("Fetching Grayson County outage data")
|
||||
company = 'GRE'
|
||||
outage_response = safe_request(GRAYSON_COUNTY, "fetching Grayson County data")
|
||||
if outage_response is None:
|
||||
return
|
||||
|
||||
if not outage_response.headers.get('Content-Type', '').startswith('application/json'):
|
||||
logger.error(f"Unexpected content type from Grayson County: {outage_response.headers.get('Content-Type')}")
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(outage_response, "parsing Grayson County JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
state = 'KY'
|
||||
try:
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'), j.get('customersServed'), j.get('name'), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} Grayson County boundaries")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing Grayson County data: {e}")
|
||||
|
||||
def aep_county_vawv(meta, url, jsonname):
|
||||
"""Fetch AEP county data for VA and WV"""
|
||||
logger.info("Fetching AEP county data for VA and WV")
|
||||
company = 'AEP'
|
||||
outage_response = county_json(meta, url, jsonname)
|
||||
if outage_response is None:
|
||||
return
|
||||
|
||||
if not outage_response.headers.get('Content-Type', '').startswith('application/octet-stream'):
|
||||
logger.error(f"Unexpected content type from AEP VA/WV: {outage_response.headers.get('Content-Type')}")
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(outage_response, "parsing AEP VA/WV JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
try:
|
||||
# WV data
|
||||
state = 'WV'
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][2]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name'), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
# VA data
|
||||
state = 'VA'
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][1]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
logger.info("Successfully processed AEP VA/WV county data")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing AEP VA/WV data: {e}")
|
||||
|
||||
def aep_county_oh(meta, url, jsonname):
|
||||
"""Fetch AEP county data for Ohio"""
|
||||
logger.info("Fetching AEP county data for Ohio")
|
||||
company = 'AEP'
|
||||
state = 'OH'
|
||||
outage_response = county_json(meta, url, jsonname)
|
||||
if outage_response is None:
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(outage_response, "parsing AEP OH JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
try:
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info("Successfully processed AEP OH county data")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing AEP OH data: {e}")
|
||||
|
||||
def aep_county_ky(meta, url, jsonname):
|
||||
"""Fetch AEP county data for Kentucky"""
|
||||
logger.info("Fetching AEP county data for Kentucky")
|
||||
company = 'AEP'
|
||||
state = 'KY'
|
||||
outage_response = county_json(meta, url, jsonname)
|
||||
if outage_response is None:
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(outage_response, "parsing AEP KY JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
try:
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info("Successfully processed AEP KY county data")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing AEP KY data: {e}")
|
||||
|
||||
def firstenergy_county(meta, url, jsonname):
|
||||
"""Fetch First Energy county data"""
|
||||
logger.info("Fetching First Energy county data")
|
||||
company = 'FE'
|
||||
state = 'WV'
|
||||
outage_response = county_json(meta, url, jsonname)
|
||||
if outage_response is None:
|
||||
return
|
||||
|
||||
if not outage_response.headers.get('Content-Type', '').startswith('application/octet-stream'):
|
||||
logger.error(f"Unexpected content type from First Energy: {outage_response.headers.get('Content-Type')}")
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(outage_response, "parsing First Energy JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
try:
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info("Successfully processed First Energy county data")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing First Energy data: {e}")
|
||||
|
||||
def get_kubra_hexes(url):
|
||||
"""Get Kubra hex data"""
|
||||
outage_response = safe_request(url, "fetching Kubra hex data")
|
||||
if outage_response is None:
|
||||
return None, None
|
||||
|
||||
if not outage_response.headers.get('Content-Type', '').startswith('application/json'):
|
||||
logger.error(f"Unexpected content type from Kubra: {outage_response.headers.get('Content-Type')}")
|
||||
return None, None
|
||||
|
||||
tempdata = safe_json_load(outage_response, "parsing Kubra hex JSON")
|
||||
if tempdata is None:
|
||||
return None, None
|
||||
|
||||
try:
|
||||
bothhex = tempdata.get('data').get('cluster_interval_generation_data')
|
||||
hexes = bothhex.split('/')
|
||||
logger.info(f"Successfully extracted Kubra hexes: {hexes}")
|
||||
return hexes[2], hexes[3]
|
||||
except (KeyError, AttributeError) as e:
|
||||
logger.error(f"Error extracting Kubra hexes: {e}")
|
||||
return None, None
|
||||
|
||||
def kubra_fe(baseurl1, baseurl2, meta):
|
||||
"""Fetch Kubra First Energy data"""
|
||||
logger.info("Fetching Kubra First Energy data")
|
||||
hex2 = get_kubra_hexes(meta)
|
||||
if hex2[0] is None:
|
||||
return
|
||||
|
||||
url = baseurl1 + hex2[1] + baseurl2
|
||||
company = 'FE'
|
||||
state = 'WV'
|
||||
outage_response = safe_request(url, "fetching Kubra FE data")
|
||||
if outage_response is None:
|
||||
return
|
||||
|
||||
if not outage_response.headers.get('Content-Type', '').startswith('application/json'):
|
||||
logger.error(f"Unexpected content type from Kubra FE: {outage_response.headers.get('Content-Type')}")
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(outage_response, "parsing Kubra FE JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
try:
|
||||
for j in tempdata['file_data']['areas']:
|
||||
if j.get('key') == "county":
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info("Successfully processed Kubra FE county data")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing Kubra FE data: {e}")
|
||||
|
||||
def kubra_aep(baseurl1, baseurl2, meta, company='AEP'):
|
||||
"""Fetch Kubra AEP data"""
|
||||
logger.info(f"Fetching Kubra AEP data for company: {company}")
|
||||
hex2 = get_kubra_hexes(meta)
|
||||
if hex2[0] is None:
|
||||
return
|
||||
|
||||
url = baseurl1 + hex2[1] + baseurl2
|
||||
outage_response = safe_request(url, "fetching Kubra AEP data")
|
||||
if outage_response is None:
|
||||
return
|
||||
|
||||
if not outage_response.headers.get('Content-Type', '').startswith('application/json'):
|
||||
logger.error(f"Unexpected content type from Kubra AEP: {outage_response.headers.get('Content-Type')}")
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(outage_response, "parsing Kubra AEP JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
process_outage_data(tempdata, company)
|
||||
|
||||
def process_outage_data(data, company):
|
||||
"""Process outage data with enhanced error handling"""
|
||||
try:
|
||||
# Navigate to the primary list of areas
|
||||
primary_areas = data.get("file_data", {}).get("areas", [])
|
||||
|
||||
# If the list is empty, there's nothing to process
|
||||
if not primary_areas:
|
||||
logger.warning("No 'areas' data found in outage data.")
|
||||
return
|
||||
|
||||
# Check the key of the first item to determine the format
|
||||
first_item_key = primary_areas[0].get("key")
|
||||
|
||||
if first_item_key == "state":
|
||||
# Format 1: Loop through each state object
|
||||
for state_area in primary_areas:
|
||||
state_name = state_area.get("name", "Unknown State")
|
||||
# Get the nested list of counties for this state
|
||||
county_list = state_area.get("areas", [])
|
||||
for county in county_list:
|
||||
# We are now at the county level
|
||||
if county.get("key") == "county":
|
||||
outageinfo = county.get('cust_a').get('val'), county.get('cust_s'), county.get('name').capitalize(), county.get('state'), company
|
||||
allcountyoutages.append(outageinfo)
|
||||
elif first_item_key == "county":
|
||||
# Format 2: The primary list is already the county list
|
||||
for county in primary_areas:
|
||||
# We are now at the county level
|
||||
if county.get("key") == "county":
|
||||
outageinfo = county.get('cust_a').get('val'), county.get('cust_s'), county.get('name').capitalize(), county.get('state'), company
|
||||
allcountyoutages.append(outageinfo)
|
||||
else:
|
||||
logger.warning(f"Unknown data format. Could not find 'state' or 'county' key. Found: {first_item_key}")
|
||||
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing outage data: {e}")
|
||||
|
||||
def insert_outage_data(cursor, outage_data, current_timestamp):
|
||||
"""Insert outage data into the new table"""
|
||||
if not outage_data:
|
||||
logger.info("No outage data to insert into the database.")
|
||||
return
|
||||
|
||||
sql = 'INSERT INTO newcountyoutages (outages, served, county, state, update, company) VALUES (%s, %s, %s, %s, %s, %s)'
|
||||
try:
|
||||
logger.info(f"Inserting {len(outage_data)} rows into the database.")
|
||||
cursor.executemany(sql, outage_data)
|
||||
logger.info("Successfully inserted data into the database.")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to insert data into the database: {e}")
|
||||
raise
|
||||
|
||||
def main():
|
||||
"""Main function to collect and insert outage data"""
|
||||
conn = None
|
||||
try:
|
||||
conn = psycopg2.connect(**DB_PARAMS)
|
||||
cursor = conn.cursor()
|
||||
logger.info("Successfully connected to the database.")
|
||||
|
||||
# Clear the global list at the start
|
||||
global allcountyoutages
|
||||
allcountyoutages = []
|
||||
|
||||
# Collect outage data for each provider
|
||||
logger.info("Starting data collection.")
|
||||
|
||||
# --- Kubra First Energy ---
|
||||
try:
|
||||
kubra_fe('https://kubra.io/data/', '/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json', WV_FE_META)
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting Kubra FE data: {e}")
|
||||
|
||||
# --- Kubra AEP WV ---
|
||||
try:
|
||||
kubra_aep('https://kubra.io/data/', '/public/reports/7929429f-635d-4761-b6c7-78f646cef3c2_report.json', AEP_WV_KUBRA_META)
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting Kubra AEP WV data: {e}")
|
||||
|
||||
# --- Kubra AEP OH ---
|
||||
try:
|
||||
kubra_aep('https://kubra.io/data/', '/public/reports/1bc6bd19-2315-4548-980a-6df73b93b355_report.json', AEP_OH_KUBRA_META)
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting Kubra AEP OH data: {e}")
|
||||
|
||||
# --- Kubra AEP KY ---
|
||||
try:
|
||||
kubra_aep('https://kubra.io/data/', '/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json', AEP_KY_KUBRA_META)
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting Kubra AEP KY data: {e}")
|
||||
|
||||
# --- Grayson County ---
|
||||
try:
|
||||
grayson()
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting Grayson County data: {e}")
|
||||
|
||||
# --- KU ---
|
||||
try:
|
||||
ku()
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting KU data: {e}")
|
||||
|
||||
# --- South Central Power ---
|
||||
try:
|
||||
southcentralpower()
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting South Central Power data: {e}")
|
||||
|
||||
# --- Big Sandy ---
|
||||
try:
|
||||
bigsandy()
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting Big Sandy data: {e}")
|
||||
|
||||
# --- AEP Direct (OH, WV, KY) ---
|
||||
try:
|
||||
aep_county_oh(AEP_OH_META, AEP_OH_BASE, "metadata.json")
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting AEP OH data: {e}")
|
||||
|
||||
try:
|
||||
aep_county_vawv(AEP_WV_META, AEP_WV_BASE, "metadata.json")
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting AEP WV/VA data: {e}")
|
||||
|
||||
try:
|
||||
aep_county_ky(AEP_KY_META, AEP_KY_BASE, "metadata.json")
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting AEP KY data: {e}")
|
||||
|
||||
# --- First Energy Direct ---
|
||||
try:
|
||||
firstenergy_county(WV_FE_META, 'https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/', "metadata.json")
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting First Energy data: {e}")
|
||||
|
||||
# Insert collected data into the new table
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
insert_outage_data(cursor, allcountyoutages, current_timestamp)
|
||||
conn.commit()
|
||||
logger.info("Data collection and database insert completed successfully.")
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("An error occurred during the main execution.")
|
||||
if conn:
|
||||
conn.rollback()
|
||||
finally:
|
||||
if conn:
|
||||
cursor.close()
|
||||
conn.close()
|
||||
logger.info("Database connection closed.")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
50
powersum.py
Normal file
50
powersum.py
Normal file
@@ -0,0 +1,50 @@
|
||||
import requests
|
||||
import polyline
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from datetime import datetime, timezone
|
||||
from geojson import Point, Feature, FeatureCollection, dump
|
||||
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
|
||||
|
||||
allcountyoutages = []
|
||||
|
||||
S = requests.Session()
|
||||
|
||||
|
||||
#select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update
|
||||
#select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > '2023-04-01' and update < '2023-04-02' and cwa = 'RLX' group by county,state,update) as potato group by county,state;
|
||||
cursor.execute("select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update")
|
||||
|
||||
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
for i in allcountyoutages:
|
||||
sql = 'insert into countyoutages (outages, served, county, state, update, company) values (%s, %s, %s, %s, %s, %s)'
|
||||
val = (i[0], i[1], i[2], i[3], current_timestamp, i[4])
|
||||
cursor.execute(sql,val)
|
||||
conn.commit()
|
||||
|
||||
cursor.execute('update countyoutages set cwa = county.cwa from county where county.countyname = countyoutages.county and county.state = countyoutages.state and countyoutages.cwa is null')
|
||||
|
||||
conn.commit()
|
||||
|
||||
cursor.execute("delete from countyoutages where cwa != 'RLX'")
|
||||
cursor.execute("delete from countyoutages where cwa is null")
|
||||
cursor.execute("delete from countyoutages where update < now () - interval '30 days'")
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
51
powersummary.py
Normal file
51
powersummary.py
Normal file
@@ -0,0 +1,51 @@
|
||||
from tabulate import tabulate
|
||||
import requests
|
||||
import polyline
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from datetime import datetime, timezone
|
||||
from geojson import Point, Feature, FeatureCollection, dump
|
||||
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("""
|
||||
SELECT
|
||||
startguess::timestamp(0),
|
||||
lastchange::timestamp(0),
|
||||
(lastchange-startguess)::interval(0),
|
||||
peakoutage,
|
||||
cause,
|
||||
lat,
|
||||
lon,
|
||||
county,
|
||||
state
|
||||
FROM
|
||||
power
|
||||
WHERE
|
||||
(cause ILIKE '%%tree%%' OR cause ILIKE '%%weather%%')
|
||||
AND cwa = 'RLX'
|
||||
AND startguess BETWEEN now() - interval '120 hours' AND now()
|
||||
ORDER BY
|
||||
startguess DESC
|
||||
""")
|
||||
allweather = cursor.fetchall()
|
||||
cleanprint = []
|
||||
#print(allweather)
|
||||
|
||||
|
||||
if len(allweather) == 0:
|
||||
outage = ("No Tree Damage or Weather Reports In The Last 24 Hours")
|
||||
else:
|
||||
outage = tabulate(allweather,headers=['Start Time UTC', 'End Time UTC','Duration','Max Out','Cause','Lat','Lon','County','State'])
|
||||
|
||||
|
||||
with open("/var/www/html/work/24hrpower.txt", "w") as outfile:
|
||||
outfile.write(outage)
|
||||
|
||||
|
||||
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
Reference in New Issue
Block a user