clean
This commit is contained in:
File diff suppressed because one or more lines are too long
562
power2.py
562
power2.py
@@ -1,562 +0,0 @@
|
||||
import requests
|
||||
import polyline
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from datetime import datetime, timezone
|
||||
from geojson import Point, Feature, FeatureCollection, dump
|
||||
import pandas as pd
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||
|
||||
# TODO
|
||||
# Add Buckeye REC https://buckeyerec.outagemap.coop/
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
proxies = {"http":"http://nws:nws@localhost:9000"}
|
||||
|
||||
aepwvnew = ['0320001','0320003','0320010','0320011','0320012','0320013','0320021','0320030','0320031','0320100','0320102','0320120']
|
||||
aepohnew = ['0320013','0320010','0320011','0320012','0320003','0320001','0302322','0302233','0302232','0302223','0320102','0320100']
|
||||
aepkynew = ['0320031','0320030','0320021','0320013','0320012','0320011','0320010','0320003','0320001']
|
||||
firstenergy = ['030223','030232','032001','032003','032010','032012']
|
||||
dominionva = ['0320121','0320120','0300103','0320102','0320101','0320100','0320031','0320013','0320011']
|
||||
baltimore = ['0320011','0320100','0320101','0320013','0320102','0320103']
|
||||
pepco = ['03201002','03201003','03201020','03201021']
|
||||
|
||||
|
||||
aepohmeta = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
aepwvmeta = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
aepwvkubra = "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false"
|
||||
aepohkubra = 'https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false'
|
||||
aepkykubra = 'https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false'
|
||||
#firstpowerwvmeta = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/pa/interval_generation_data/metadata.json"
|
||||
aepkymeta = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json'
|
||||
domvameta = 'https://outagemap.dominionenergy.com/resources/data/external/interval_generation_data/metadata.json'
|
||||
wvfemeta = 'https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false'
|
||||
|
||||
aepwvcluster = 'cluster-2'
|
||||
aepohcluster = 'cluster-1'
|
||||
aepkycluster = 'cluster-2'
|
||||
aepwvbase = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
aepbasewv = 'https://kubra.io/cluster-data/'
|
||||
aepohbase = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
#firstpowerwvbase = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/"
|
||||
|
||||
aepkybase = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/'
|
||||
domvabase = 'https://outagemap.dominionenergy.com/resources/data/external/interval_generation_data/'
|
||||
graysonrecc = 'https://outages.graysonrecc.com/data/outages.json'
|
||||
|
||||
kubrabase = 'https://kubra.io/cluster-data/'
|
||||
firstenergybase = 'https://kubra.io/cluster-data/'
|
||||
firstenergycluster = 'cluster-4'
|
||||
firstenergyhex1 = 'f5f94943-5df4-4752-a0a7-8ef4baded880'
|
||||
firstenergyhex2 = 'e2986f8a-5a69-4d2f-821c-e5db03932b68'
|
||||
|
||||
southcentraljson = 'https://outage.southcentralpower.com/data/outages.json'
|
||||
|
||||
allcountyoutages = []
|
||||
allkubraoutages = []
|
||||
allaepkubracoutages = []
|
||||
|
||||
def remove_external_curly_braces(s):
|
||||
try:
|
||||
p = s[0]
|
||||
return p
|
||||
except Error as e:
|
||||
print('error in curly ' + e)
|
||||
return s
|
||||
|
||||
|
||||
|
||||
def get_kubra_hexes(url):
|
||||
outage = S.get(url)
|
||||
try:
|
||||
tempdata = json.loads(outage.text)
|
||||
bothhex = tempdata.get('data').get('cluster_interval_generation_data')
|
||||
hexes = bothhex.split('/')
|
||||
returndata = (hexes[2],hexes[3])
|
||||
return returndata
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
||||
|
||||
def kubra(baseurl,cluster,namearray,meta):
|
||||
try:
|
||||
data = get_kubra_hexes(meta)
|
||||
#print(data)
|
||||
hex1, hex2 = get_kubra_hexes(meta)
|
||||
|
||||
newnamearray = []
|
||||
for i in namearray:
|
||||
dir = str(i)
|
||||
dir = dir[-3:]
|
||||
dir = str(dir[::-1])
|
||||
url = baseurl + dir + '/' + hex1 + '/' + hex2 + '/public/' + cluster + '/' + i + '.json'
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']:
|
||||
outageinfo = None
|
||||
try:
|
||||
outageinfo = j.get('desc').get('cluster')
|
||||
except:
|
||||
continue
|
||||
if outageinfo == True:
|
||||
for k in range(4):
|
||||
newnamearray.append(str(i)+ str(k))
|
||||
if outageinfo == False:
|
||||
allkubraoutages.append(j)
|
||||
|
||||
|
||||
newnamearray = list(dict.fromkeys(newnamearray))
|
||||
if len(newnamearray) > 0:
|
||||
kubra(baseurl,cluster,newnamearray,meta)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
||||
def kubra_aep(baseurl,cluster,namearray,meta):
|
||||
data = get_kubra_hexes(meta)
|
||||
#print(data)
|
||||
hex1, hex2 = get_kubra_hexes(meta)
|
||||
newnamearray = []
|
||||
for i in namearray:
|
||||
dir = str(i)
|
||||
dir = dir[-3:]
|
||||
dir = str(dir[::-1])
|
||||
url = baseurl + dir + '/' + hex1 + '/' + hex2 + '/public/' + cluster + '/' + i + '.json'
|
||||
outage = S.get(url)
|
||||
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']:
|
||||
outageinfo = None
|
||||
try:
|
||||
outageinfo = j.get('desc').get('cluster')
|
||||
except:
|
||||
continue
|
||||
if outageinfo == True:
|
||||
for k in range(4):
|
||||
newnamearray.append(str(i)+ str(k))
|
||||
if outageinfo == False:
|
||||
allaepkubracoutages.append(j)
|
||||
#allkubraoutages.append(j)
|
||||
|
||||
|
||||
newnamearray = list(dict.fromkeys(newnamearray))
|
||||
if len(newnamearray) > 0:
|
||||
kubra_aep(baseurl,cluster,newnamearray,meta)
|
||||
|
||||
def insertkubra(data):
|
||||
for j in data:
|
||||
try:
|
||||
custa = j.get('desc').get('cust_a').get('val')
|
||||
except:
|
||||
continue
|
||||
pointgeom = j.get('geom').get('p')
|
||||
if len(pointgeom) == 1:
|
||||
pointlatlon = polyline.decode(pointgeom[0])
|
||||
lat = pointlatlon[0][0]
|
||||
lon = pointlatlon[0][1]
|
||||
else:
|
||||
continue
|
||||
areageom = j.get('geom').get('a')
|
||||
if areageom != None:
|
||||
areageom = remove_external_curly_braces(areageom)
|
||||
else:
|
||||
areageom = None
|
||||
cause = j.get('desc').get('cause').get('EN-US')
|
||||
#cause = j.get('desc').get('cause')
|
||||
start = j.get('desc').get('start_time')
|
||||
if start != None:
|
||||
try:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M:%S%z")
|
||||
except ValueError:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M%z")
|
||||
etr = j.get('desc').get('etr')
|
||||
if etr == 'ETR-NULL' or etr == 'ETR-EXP': etr = None
|
||||
if etr != None:
|
||||
try:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M:%S%z")
|
||||
except ValueError:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M%z")
|
||||
incid = j.get('desc').get('inc_id')
|
||||
crew_status = j.get('desc').get('crew_status').get('EN-US')
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,pointgeom,areageom,start,cause,outagen,crew_status,incidentid,peakoutage,etr,derivedstart,lastchange,active) values (%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s, %s, %s, %s, %s) on conflict (pointgeom) do update set (outagen, cause, start, etr, crew_status,lastchange) = (%s, %s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon,pointgeom,areageom, start, cause, custa, crew_status, incid, custa,etr,current_timestamp, current_timestamp,'True',custa, cause, start, etr, crew_status,current_timestamp)
|
||||
val = (sql,vals)
|
||||
# print(val)
|
||||
cursor.execute(sql,vals)
|
||||
conn.commit()
|
||||
|
||||
def insert_kubra_aep(data):
|
||||
for j in data:
|
||||
try:
|
||||
custa = j.get('desc').get('cust_a').get('val')
|
||||
except:
|
||||
continue
|
||||
pointgeom = j.get('geom').get('p')
|
||||
if len(pointgeom) == 1:
|
||||
pointlatlon = polyline.decode(pointgeom[0])
|
||||
lat = pointlatlon[0][0]
|
||||
lon = pointlatlon[0][1]
|
||||
else:
|
||||
continue
|
||||
areageom = j.get('geom').get('a')
|
||||
if areageom != None:
|
||||
areageom = remove_external_curly_braces(areageom)
|
||||
else:
|
||||
areageom = None
|
||||
cause = "Pending Investigation" # Default to Pending if no cause is found
|
||||
cause_dict = j.get('desc').get('cause')
|
||||
if cause_dict:
|
||||
cause = cause_dict.get('EN-US')
|
||||
#cause = j.get('desc').get('cause')
|
||||
start = j.get('desc').get('start_time')
|
||||
if start != None:
|
||||
try:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M:%S%z")
|
||||
except ValueError:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M%z")
|
||||
etr = j.get('desc').get('etr')
|
||||
if etr == 'ETR-NULL' or etr == 'ETR-EXP': etr = None
|
||||
if etr != None:
|
||||
try:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M:%S%z")
|
||||
except ValueError:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M%z")
|
||||
incid = j.get('desc').get('inc_id')
|
||||
crew_status = j.get('desc').get('crew_status').get('EN-US')
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,pointgeom,areageom,start,cause,outagen,crew_status,incidentid,peakoutage,etr,derivedstart,lastchange,active) values (%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s, %s, %s, %s, %s) on conflict (pointgeom) do update set (outagen, cause, start, etr, crew_status,lastchange) = (%s, %s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon,pointgeom,areageom, start, cause, custa, crew_status, incid, custa,etr,current_timestamp, current_timestamp,'True',custa, cause, start, etr, crew_status,current_timestamp)
|
||||
val = (sql,vals)
|
||||
#print(val)
|
||||
cursor.execute(sql,vals)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def remove_dupes(l):
|
||||
b = []
|
||||
for i in range(0, len(l)):
|
||||
if l[i] not in l[i+1:]:
|
||||
b.append(l[i])
|
||||
return b
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def newaep(meta,namearray,baseurl):
|
||||
newnamearray = []
|
||||
metainfo = json.loads(S.get(meta).text)
|
||||
metadir = metainfo['directory']
|
||||
for i in namearray:
|
||||
url = baseurl + metadir + '/outages/' + i + '.json'
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']:
|
||||
outageinfo = None
|
||||
try:
|
||||
outageinfo = j.get('title')
|
||||
except:
|
||||
continue
|
||||
if outageinfo == 'Area Outage':
|
||||
for k in range(4):
|
||||
newnamearray.append(str(i)+ str(k))
|
||||
if outageinfo == 'Outage Information':
|
||||
allkubraoutages.append(j)
|
||||
|
||||
|
||||
newnamearray = list(dict.fromkeys(newnamearray))
|
||||
if len(newnamearray) > 0:
|
||||
newaep(meta,newnamearray,baseurl)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def check_bad_offset(offset):
|
||||
try:
|
||||
if ":" == offset[-3:-2]:
|
||||
offset = offset[:-3]+offset[-2:]
|
||||
return offset
|
||||
except:
|
||||
return offset
|
||||
|
||||
def fix_bad_timestamp(timestamp):
|
||||
parsed_timestamp = pd.to_datetime(timestamp)
|
||||
return parsed_timestamp
|
||||
|
||||
S = requests.Session()
|
||||
S.verify = False
|
||||
|
||||
|
||||
|
||||
def southcentral():
|
||||
Sp = requests.Session()
|
||||
Sp.verify = False
|
||||
#Sp.proxies.update(proxies)
|
||||
temp = Sp.get(southcentraljson).text
|
||||
outageinfo = json.loads(temp)
|
||||
# print(outageinfo)
|
||||
if len(outageinfo) != 0:
|
||||
|
||||
for i in outageinfo:
|
||||
id = i.get('outageRecID')
|
||||
lat = i.get('outagePoint').get('lat')
|
||||
lon = i.get('outagePoint').get('lng')
|
||||
start = i.get('outageStartTime')
|
||||
end = i.get('outageEndTime')
|
||||
cause = i.get('cause')
|
||||
initial = i.get('customersOutInitially')
|
||||
now = i.get('customersOutNow')
|
||||
change = i.get('outageModifiedTime')
|
||||
crew = i.get('outageWorkStatus')
|
||||
# change = check_bad_offset(change)
|
||||
# start = check_bad_offset(start)
|
||||
# end = check_bad_offset(end)
|
||||
|
||||
|
||||
if start != None:
|
||||
start = fix_bad_timestamp(start)
|
||||
if end != None:
|
||||
end = fix_bad_timestamp(end)
|
||||
if change != None:
|
||||
change = fix_bad_timestamp(change)
|
||||
# change = datetime.strptime(change,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
|
||||
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,start,derivedstart,cause,outagen,crew_status,peakoutage,pointgeom,lastchange,active) values (%s,%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s) on conflict (pointgeom) do update set (outagen, cause, start, crew_status,lastchange) = (%s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon, start, current_timestamp, cause, now, crew, initial,id,change,'True',now,cause,start,crew,change)
|
||||
val = (sql,vals)
|
||||
cursor.execute(sql,vals)
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def grayson():
|
||||
outageinfo = json.loads(S.get(graysonrecc).text)
|
||||
if len(outageinfo) != 0:
|
||||
|
||||
for i in outageinfo:
|
||||
id = i.get('outageRecID')
|
||||
lat = i.get('outagePoint').get('lat')
|
||||
lon = i.get('outagePoint').get('lng')
|
||||
start = i.get('outageStartTime')
|
||||
end = i.get('outageEndTime')
|
||||
cause = i.get('cause')
|
||||
initial = i.get('customersOutInitially')
|
||||
now = i.get('customersOutNow')
|
||||
change = i.get('outageModifiedTime')
|
||||
crew = i.get('outageWorkStatus')
|
||||
# change = check_bad_offset(change)
|
||||
# start = check_bad_offset(start)
|
||||
# end = check_bad_offset(end)
|
||||
|
||||
|
||||
if start != None:
|
||||
start = fix_bad_timestamp(start)
|
||||
if end != None:
|
||||
end = fix_bad_timestamp(end)
|
||||
if change != None:
|
||||
change = fix_bad_timestamp(change)
|
||||
# change = datetime.strptime(change,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
|
||||
|
||||
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,start,derivedstart,cause,outagen,crew_status,peakoutage,pointgeom,lastchange,active) values (%s,%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s) on conflict (pointgeom) do update set (outagen, cause, start, crew_status,lastchange) = (%s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon, start, current_timestamp, cause, now, crew, initial,id,change,'True',now,cause,start,crew,change)
|
||||
val = (sql,vals)
|
||||
cursor.execute(sql,vals)
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def check_outages(meta,namearray,baseurl):
|
||||
metainfo = json.loads(S.get(meta).text)
|
||||
metadir = metainfo['directory']
|
||||
for i in namearray:
|
||||
url = baseurl + metadir + '/outages/' + i + '.json'
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']:
|
||||
id = j.get('id')
|
||||
try:
|
||||
custa = j.get('desc').get('cust_a').get('val')
|
||||
except:
|
||||
continue
|
||||
pointgeom = j.get('geom').get('p')
|
||||
if len(pointgeom) == 1:
|
||||
pointlatlon = polyline.decode(pointgeom[0])
|
||||
lat = pointlatlon[0][0]
|
||||
lon = pointlatlon[0][1]
|
||||
else:
|
||||
continue
|
||||
areageom = j.get('geom').get('a')
|
||||
if areageom != None:
|
||||
areageom = remove_external_curly_braces(areageom)
|
||||
else:
|
||||
areageom = None
|
||||
|
||||
cause = j.get('desc').get('cause')
|
||||
start = j.get('desc').get('start')
|
||||
if start != None:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
etr = j.get('desc').get('etr')
|
||||
if etr == 'ETR-NULL' or etr == 'ETR-EXP': etr = None
|
||||
if etr != None:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
incid = j.get('desc').get('inc_id')
|
||||
crew_status = j.get('desc').get('crew_status')
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,pointgeom,areageom,start,cause,outagen,crew_status,incidentid,peakoutage,etr,genericid,derivedstart,lastchange,active) values (%s,%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s, %s, %s, %s, %s) on conflict (pointgeom) do update set (outagen, cause, start, etr, crew_status,lastchange) = (%s, %s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon,pointgeom,areageom, start, cause, custa, crew_status, incid, custa,etr,id,current_timestamp, current_timestamp,'True',custa, cause, start, etr, crew_status,current_timestamp)
|
||||
val = (sql,vals)
|
||||
cursor.execute(sql,vals)
|
||||
|
||||
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
try:
|
||||
southcentral()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
try:
|
||||
grayson()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
#try:
|
||||
# newaep(aepwvmeta,aepwvnew,aepwvbase)
|
||||
#except Exception as e:
|
||||
# print(e)
|
||||
#try:
|
||||
# newaep(aepohmeta,aepohnew,aepohbase)
|
||||
#except Exception as e:
|
||||
# print(e)
|
||||
#try:
|
||||
# newaep(aepkymeta,aepkynew,aepkybase)
|
||||
#except Exception as e:
|
||||
# print(e)
|
||||
try:
|
||||
kubra_aep(kubrabase,aepwvcluster,aepwvnew,aepwvkubra)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
try:
|
||||
kubra_aep(kubrabase,aepohcluster,aepohnew,aepohkubra)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
try:
|
||||
kubra_aep(kubrabase,aepkycluster,aepkynew,aepkykubra)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
try:
|
||||
#newaep(firstpowerwvmeta,firstpowerwvnew,firstpowerwvbase)
|
||||
kubra(firstenergybase,firstenergycluster,firstenergy,wvfemeta)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
#try:
|
||||
# newaep(domvameta,dominionva,domvabase)
|
||||
#except Exception as e:
|
||||
# print(e)
|
||||
|
||||
#kubra(kubrabase,aepwvcluster,aepwvnew,aepwvmeta)
|
||||
|
||||
|
||||
nodupe = remove_dupes(allcountyoutages)
|
||||
nodupekubra = remove_dupes(allkubraoutages)
|
||||
nodupeaepkubra = remove_dupes(allaepkubracoutages)
|
||||
#print(nodupe)
|
||||
|
||||
def insertaep(data):
|
||||
for j in data:
|
||||
try:
|
||||
custa = j.get('desc').get('cust_a').get('val')
|
||||
except:
|
||||
continue
|
||||
pointgeom = j.get('geom').get('p')
|
||||
if len(pointgeom) == 1:
|
||||
pointlatlon = polyline.decode(pointgeom[0])
|
||||
lat = pointlatlon[0][0]
|
||||
lon = pointlatlon[0][1]
|
||||
else:
|
||||
continue
|
||||
areageom = j.get('geom').get('a')
|
||||
if areageom != None:
|
||||
areageom = remove_external_curly_braces(areageom)
|
||||
else:
|
||||
areageom = None
|
||||
cause = j.get('desc').get('cause')
|
||||
start = j.get('desc').get('start')
|
||||
if start != None:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
etr = j.get('desc').get('etr')
|
||||
if etr == 'ETR-NULL' or etr == 'ETR-EXP': etr = None
|
||||
if etr != None:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
incid = j.get('desc').get('inc_id')
|
||||
crew_status = j.get('desc').get('crew_status')
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
#sql = "INSERT INTO power (lat,lon,pointgeom,areageom,start,cause,outagen,crew_status,incidentid,peakoutage,etr,derivedstart,lastchange,active) values (%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s, %s, %s, %s, %s) on conflict (pointgeom) do update set (outagen, cause, start, etr, crew_status,lastchange) = (%s, %s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon,pointgeom,areageom, start, cause, custa, crew_status, incid, custa,etr,current_timestamp, current_timestamp,'True',custa, cause, start, etr, crew_status,current_timestamp)
|
||||
#val = (sql,vals)
|
||||
print(vals)
|
||||
#cursor.execute(sql,vals)
|
||||
#conn.commit()
|
||||
|
||||
|
||||
if len(nodupe) > 0:
|
||||
insertaep(nodupe)
|
||||
if len(nodupekubra) > 0:
|
||||
insertkubra(nodupekubra)
|
||||
if len(nodupeaepkubra) > 0:
|
||||
insert_kubra_aep(nodupeaepkubra)
|
||||
|
||||
cursor.execute('UPDATE public.power SET realgeom = ST_SetSRID(ST_MakePoint(lon, lat), 4326) where (lat is not null and lon is not null and realgeom is null)')
|
||||
cursor.execute('UPDATE public.power SET peakoutage = outagen where outagen > peakoutage')
|
||||
cursor.execute('update public.power set county = county.countyname from public.county where ST_contains(county.geom,power.realgeom) and power.county is null')
|
||||
cursor.execute('update public.power set cwa = fzone.cwa from public.fzone where ST_contains(fzone.geom,power.realgeom) and power.cwa is null')
|
||||
cursor.execute('update public.power set state = county.state from public.county where ST_contains(county.geom,power.realgeom) and power.state is null')
|
||||
cursor.execute('update public.power set startguess = least(start,derivedstart)')
|
||||
cursor.execute('update public.power set realareageom = st_linefromencodedpolyline(areageom) where areageom is not null and realareageom is null')
|
||||
cursor.execute("update power set pointgeom = NULL where lastchange < now() - interval '2 hours'")
|
||||
conn.commit()
|
||||
|
||||
|
||||
cursor.execute("update power set active = true where lastchange > now() - interval '30 minutes'")
|
||||
cursor.execute("update power set active = false where lastchange < now() - interval '30 minutes'")
|
||||
conn.commit()
|
||||
#cursor.execute("delete from power where cwa != 'RLX'")
|
||||
cursor.execute("delete from power where lastchange < now () - interval '365 days'")
|
||||
conn.commit()
|
||||
|
||||
#print(allkubraoutages)
|
||||
cursor.close()
|
||||
conn.close()
|
||||
447
power3.py
447
power3.py
@@ -1,447 +0,0 @@
|
||||
import requests
|
||||
import polyline
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from datetime import datetime, timezone
|
||||
from geojson import Point, Feature, FeatureCollection, dump
|
||||
import re
|
||||
import logging
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||
handlers=[
|
||||
logging.FileHandler('power3.log'),
|
||||
logging.StreamHandler()
|
||||
]
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
proxies = {"http":"http://nws:nws@localhost:9000"}
|
||||
|
||||
aepohmeta = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
aepwvmeta = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
#firstpowerwvmeta = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/metadata.json"
|
||||
aepkymeta = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json'
|
||||
wvfemeta= 'https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false'
|
||||
aepwvkubrameta = "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false"
|
||||
aepohkubrameta = 'https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false'
|
||||
aepkykubrameta = 'https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false'
|
||||
|
||||
aepwvbase = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
aepohbase = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
#firstpowerwvbase = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/"
|
||||
aepkybase = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/'
|
||||
graysoncounty = 'https://outages.graysonrecc.com/data/boundaries.json'
|
||||
flemingjson = 'https://outage.fme.coop/data/boundaries.json'
|
||||
|
||||
#buckeye rec
|
||||
#https://outage.buckeyerec.coop/maps/OutageWebMap/maps/GWT.rpc
|
||||
#washington
|
||||
#https://weci.ebill.coop/woViewer/MapWiseWeb/GWT.rpc
|
||||
allcountyoutages = []
|
||||
|
||||
S = requests.Session()
|
||||
|
||||
|
||||
|
||||
|
||||
def fleming():
|
||||
logger.info("Starting fleming()")
|
||||
state = 'KY'
|
||||
company = 'FLEM'
|
||||
try:
|
||||
temp = S.get(flemingjson)
|
||||
temp.raise_for_status()
|
||||
tempdata = json.loads(temp.text)
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} boundaries from fleming")
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed for fleming: {e}")
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error for fleming: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in fleming: {e}")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def bigsandy():
|
||||
logger.info("Starting bigsandy()")
|
||||
state = 'OH'
|
||||
company = 'BS'
|
||||
try:
|
||||
temp = S.get('https://outagemap.bigsandyrecc.com/data/boundaries.json')
|
||||
temp.raise_for_status()
|
||||
tempdata = json.loads(temp.text)
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} boundaries from bigsandy")
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed for bigsandy: {e}")
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error for bigsandy: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in bigsandy: {e}")
|
||||
|
||||
|
||||
|
||||
|
||||
def southcentralpower():
|
||||
logger.info("Starting southcentralpower()")
|
||||
company = 'SCP'
|
||||
url = 'https://outage.southcentralpower.com/data/boundaries.json'
|
||||
Sp = requests.Session()
|
||||
# Sp.proxies.update(proxies)
|
||||
try:
|
||||
response = Sp.get(url)
|
||||
response.raise_for_status()
|
||||
tempdata = json.loads(response.text)
|
||||
state = 'OH'
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} boundaries from southcentralpower")
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed for southcentralpower: {e}")
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error for southcentralpower: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in southcentralpower: {e}")
|
||||
|
||||
#wv https://kubra.io/data/e2ae0326-9912-436a-9355-eb2687e798b1/public/reports/7929429f-635d-4761-b6c7-78f646cef3c2_report.json
|
||||
def ku_get_url():
|
||||
logger.info("Starting ku_get_url()")
|
||||
try:
|
||||
url = 'https://stormcenter.lge-ku.com/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40'
|
||||
r = requests.get(url)
|
||||
r.raise_for_status()
|
||||
x = re.search(r"instanceId: '(.*?)',", r.text)
|
||||
if not x:
|
||||
logger.error("Could not find instanceId in ku_get_url")
|
||||
return None
|
||||
urlcom = x.group(1)
|
||||
urlcom = 'https://kubra.io/stormcenter/api/v1/stormcenters/' + urlcom + '/views/a6cee9e4-312b-4b77-9913-2ae371eb860d/currentState?preview=false'
|
||||
stuff = S.get(urlcom)
|
||||
stuff.raise_for_status()
|
||||
jsonstuff = json.loads(stuff.text)
|
||||
interval_data = jsonstuff.get('data').get('interval_generation_data')
|
||||
if not interval_data:
|
||||
logger.error("Could not find interval_generation_data in ku_get_url")
|
||||
return None
|
||||
urlcom = 'https://kubra.io/' + interval_data + '/public/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40_report.json'
|
||||
logger.info("Successfully generated URL for ku")
|
||||
return urlcom
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed in ku_get_url: {e}")
|
||||
return None
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error in ku_get_url: {e}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in ku_get_url: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def county_json(meta,url,jsonname):
|
||||
logger.info(f"Starting county_json for meta: {meta}")
|
||||
try:
|
||||
response = S.get(meta)
|
||||
response.raise_for_status()
|
||||
metainfo = json.loads(response.text)
|
||||
metadir = metainfo['directory']
|
||||
url = url + metadir + jsonname
|
||||
outage = S.get(url)
|
||||
outage.raise_for_status()
|
||||
logger.info(f"Successfully fetched county JSON from {url}")
|
||||
return outage
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed in county_json: {e}")
|
||||
return None
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error in county_json: {e}")
|
||||
return None
|
||||
except KeyError as e:
|
||||
logger.error(f"Key error in county_json (missing 'directory'): {e}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in county_json: {e}")
|
||||
return None
|
||||
|
||||
|
||||
|
||||
def ku():
|
||||
ku = []
|
||||
url = ku_get_url()
|
||||
data = S.get(url).text
|
||||
tempdata = json.loads(data)
|
||||
temp = tempdata['file_data']['areas'][2]['areas'][0]['areas']
|
||||
temp1 = tempdata['file_data']['areas'][2]['areas'][1]['areas']
|
||||
temp2 = tempdata['file_data']['areas'][1]['areas'][0]['areas']
|
||||
|
||||
for i in temp:
|
||||
ku.append(i)
|
||||
for i in temp1:
|
||||
ku.append(i)
|
||||
for i in temp2:
|
||||
ku.append(i)
|
||||
for o in ku:
|
||||
outageinfo = o['cust_a']['val'],o['cust_s'],o['name'].capitalize(),o['state'],o['utility']
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def grayson():
|
||||
company = 'GRE'
|
||||
outage = S.get(graysoncounty)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
state = 'KY'
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
def aep_county_vawv(meta,url,jsonname):
|
||||
company = 'AEP'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
|
||||
tempdata = json.loads(outage.text)
|
||||
state = 'WV'
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][2]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name'), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
state = 'VA'
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][1]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
def aep_county_oh(meta,url,jsonname):
|
||||
company = 'AEP'
|
||||
state = 'OH'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
def aep_county_ky(meta,url,jsonname):
|
||||
company = 'AEP'
|
||||
state = 'KY'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
def firstenergy_county(meta,url,jsonname):
|
||||
company = 'FE'
|
||||
state = 'WV'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(),state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
def get_kubra_hexes(url):
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
bothhex = tempdata.get('data').get('cluster_interval_generation_data')
|
||||
hexes = bothhex.split('/')
|
||||
return hexes[2],hexes[3]
|
||||
|
||||
|
||||
|
||||
|
||||
def kubra_fe(baseurl1,baseurl2,meta):
|
||||
|
||||
hex2 = get_kubra_hexes(meta)
|
||||
url = baseurl1 + hex2[1] + baseurl2
|
||||
company = 'FE'
|
||||
state = 'WV'
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas']:
|
||||
if j.get('key') == "county":
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('name').capitalize(),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
def kubra_aep(baseurl1,baseurl2,meta,company='AEP'):
|
||||
|
||||
hex2 = get_kubra_hexes(meta)
|
||||
url = baseurl1 + hex2[1] + baseurl2
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
process_outage_data(tempdata,company)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def process_outage_data(data,company):
|
||||
"""
|
||||
Identifies the data structure and loops through the county-level data.
|
||||
|
||||
Args:
|
||||
data (dict): The parsed JSON data as a Python dictionary.
|
||||
"""
|
||||
# Navigate to the primary list of areas
|
||||
primary_areas = data.get("file_data", {}).get("areas", [])
|
||||
|
||||
# If the list is empty, there's nothing to process
|
||||
if not primary_areas:
|
||||
print("No 'areas' data found.")
|
||||
return
|
||||
|
||||
# --- This is the key logic to handle both formats ---
|
||||
# Check the key of the first item to determine the format
|
||||
first_item_key = primary_areas[0].get("key")
|
||||
|
||||
if first_item_key == "state":
|
||||
# Format 1: Loop through each state object
|
||||
for state_area in primary_areas:
|
||||
state_name = state_area.get("name", "Unknown State")
|
||||
# Get the nested list of counties for this state
|
||||
county_list = state_area.get("areas", [])
|
||||
for county in county_list:
|
||||
# We are now at the county level
|
||||
if county.get("key") == "county":
|
||||
outageinfo = county.get('cust_a').get('val'), county.get('cust_s'), county.get('name').capitalize(),county.get('state'),company
|
||||
allcountyoutages.append(outageinfo)
|
||||
elif first_item_key == "county":
|
||||
# Format 2: The primary list is already the county list
|
||||
for county in primary_areas:
|
||||
# We are now at the county level
|
||||
if county.get("key") == "county":
|
||||
outageinfo = county.get('cust_a').get('val'), county.get('cust_s'), county.get('name').capitalize(),county.get('state'),company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
else:
|
||||
print("Unknown data format. Could not find 'state' or 'county' key.")
|
||||
|
||||
|
||||
try:
|
||||
logger.info("Attempting kubra_fe for FE WV")
|
||||
kubra_fe('https://kubra.io/data/','/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json',wvfemeta)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in kubra_fe for FE WV: {e}")
|
||||
try:
|
||||
logger.info("Attempting kubra_aep for AEP WV")
|
||||
kubra_aep('https://kubra.io/data/','/public/reports/7929429f-635d-4761-b6c7-78f646cef3c2_report.json',aepwvkubrameta)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in kubra_aep for AEP WV: {e}")
|
||||
try:
|
||||
logger.info("Attempting kubra_aep for AEP OH")
|
||||
kubra_aep('https://kubra.io/data/','/public/reports/1bc6bd19-2315-4548-980a-6df73b93b355_report.json',aepohkubrameta)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in kubra_aep for AEP OH: {e}")
|
||||
try:
|
||||
logger.info("Attempting kubra_aep for AEP KY")
|
||||
kubra_aep('https://kubra.io/data/','/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json',aepkykubrameta)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in kubra_aep for AEP KY: {e}")
|
||||
|
||||
|
||||
try:
|
||||
logger.info("Attempting grayson")
|
||||
grayson()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in grayson: {e}")
|
||||
try:
|
||||
logger.info("Attempting ku")
|
||||
ku()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in ku: {e}")
|
||||
try:
|
||||
logger.info("Attempting southcentralpower")
|
||||
southcentralpower()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in southcentralpower: {e}")
|
||||
try:
|
||||
logger.info("Attempting bigsandy")
|
||||
bigsandy()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in bigsandy: {e}")
|
||||
try:
|
||||
logger.info("Attempting fleming")
|
||||
fleming()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in fleming: {e}")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
#for i in allcountyoutages:
|
||||
# sql = 'insert into countyoutages (outages, served, county, state, update, company) values (%s, %s, %s, %s, %s, %s)'
|
||||
# val = (i[0], i[1], i[2], i[3], current_timestamp, i[4])
|
||||
# cursor.execute(sql,val)
|
||||
#conn.commit()
|
||||
|
||||
all_values = []
|
||||
for i in allcountyoutages:
|
||||
# Make sure the order matches the SQL placeholders
|
||||
val = (i[0], i[1], i[2], i[3], current_timestamp, i[4])
|
||||
all_values.append(val)
|
||||
|
||||
# 2. Define the SQL statement ONCE
|
||||
sql = 'INSERT INTO countyoutages (outages, served, county, state, update, company) VALUES (%s, %s, %s, %s, %s, %s)'
|
||||
|
||||
# 3. Execute the command ONCE with all the data
|
||||
if all_values: # Only execute if there's data to insert
|
||||
try:
|
||||
cursor.executemany(sql, all_values)
|
||||
conn.commit() # Commit after successful execution
|
||||
logger.info(f"Successfully inserted {len(all_values)} records into the database")
|
||||
except Exception as e:
|
||||
logger.error(f"Database error during insert: {e}")
|
||||
conn.rollback() # Rollback in case of error
|
||||
else:
|
||||
logger.warning("No data to insert into the database")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
cursor.execute('update countyoutages set cwa = county.cwa from county where county.countyname = countyoutages.county and county.state = countyoutages.state and countyoutages.cwa is null')
|
||||
|
||||
conn.commit()
|
||||
|
||||
#cursor.execute("delete from countyoutages where cwa != 'RLX'")
|
||||
cursor.execute("delete from countyoutages where cwa is null")
|
||||
#cursor.execute("delete from countyoutages where update < now () - interval '365 days'")
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#print(allcountyoutages)
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
Reference in New Issue
Block a user