594 lines
23 KiB
Python
594 lines
23 KiB
Python
# powercounty.py
|
|
|
|
import logging
|
|
import requests
|
|
import json
|
|
import psycopg2
|
|
from datetime import datetime
|
|
import re
|
|
from collections import defaultdict
|
|
import threading
|
|
|
|
|
|
# Set up logging
|
|
logging.basicConfig(
|
|
level=logging.INFO,
|
|
format='%(asctime)s - %(levelname)s - %(message)s',
|
|
handlers=[
|
|
logging.FileHandler('powercounty.log'),
|
|
logging.StreamHandler()
|
|
]
|
|
)
|
|
|
|
# Set up a logger for this module
|
|
logger = logging.getLogger(__name__)
|
|
|
|
# Database connection parameters
|
|
DB_PARAMS = {
|
|
'host': 'localhost',
|
|
'database': 'nws',
|
|
'user': 'nws',
|
|
'password': 'nws'
|
|
}
|
|
|
|
# Set up a requests session
|
|
S = requests.Session()
|
|
|
|
# Power company metadata and URLs (from power3.py)
|
|
AEP_OH_META = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
|
AEP_WV_META = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
|
AEP_KY_META = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json'
|
|
WV_FE_META = 'https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false'
|
|
AEP_WV_KUBRA_META = "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false"
|
|
AEP_OH_KUBRA_META = 'https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false'
|
|
AEP_KY_KUBRA_META = 'https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false'
|
|
|
|
AEP_WV_BASE = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
|
AEP_OH_BASE = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
|
AEP_KY_BASE = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/'
|
|
GRAYSON_COUNTY = 'https://outages.graysonrecc.com/data/boundaries.json'
|
|
|
|
# Additional URLs from power3.py
|
|
flemingjson = 'https://outage.fme.coop/data/boundaries.json'
|
|
bigsandy_url = 'http://outagemap.bigsandyrecc.com/data/boundaries.json'
|
|
southcentralpower_url = 'https://outage.southcentralpower.com/data/boundaries.json'
|
|
|
|
# Global list to collect all outage data
|
|
allcountyoutages = []
|
|
|
|
|
|
# This function will try to get a URL and log any errors
|
|
def safe_request(url, description="Fetching data"):
|
|
try:
|
|
logger.info(f"{description}: {url}")
|
|
response = S.get(url)
|
|
response.raise_for_status() # Raise an exception for bad status codes
|
|
logger.info(f"Successfully fetched data from {url}")
|
|
return response
|
|
except requests.exceptions.RequestException as e:
|
|
logger.error(f"Failed to {description} from {url}: {e}")
|
|
return None
|
|
|
|
# This function will parse a JSON response and log errors
|
|
def safe_json_load(response, description="Parsing JSON"):
|
|
try:
|
|
logger.info(f"{description}")
|
|
data = json.loads(response.text)
|
|
logger.info("Successfully parsed JSON data")
|
|
return data
|
|
except (json.JSONDecodeError, AttributeError) as e:
|
|
logger.error(f"Failed to {description}: {e}")
|
|
return None
|
|
|
|
# Ported functions from power3.py with enhanced logging
|
|
|
|
def fleming():
|
|
"""Fetch outage data for Fleming County, KY"""
|
|
logger.info("Fetching Fleming County outage data")
|
|
state = 'KY'
|
|
company = 'FLEM'
|
|
temp = safe_request(flemingjson, "fetching Fleming data")
|
|
if temp is None:
|
|
return
|
|
|
|
tempdata = safe_json_load(temp, "parsing Fleming JSON")
|
|
if tempdata is None:
|
|
return
|
|
|
|
try:
|
|
for j in tempdata[0]['boundaries']:
|
|
outageinfo = j.get('customersOutNow'), j.get('customersServed'), j.get('name'), state, company
|
|
allcountyoutages.append(outageinfo)
|
|
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} Fleming County boundaries")
|
|
except (KeyError, IndexError) as e:
|
|
logger.error(f"Error processing Fleming County data: {e}")
|
|
|
|
def bigsandy():
|
|
"""Fetch outage data for Big Sandy RECC"""
|
|
logger.info("Fetching Big Sandy RECC outage data")
|
|
state = 'OH'
|
|
company = 'BS'
|
|
temp = safe_request(bigsandy_url, "fetching Big Sandy data")
|
|
if temp is None:
|
|
return
|
|
|
|
tempdata = safe_json_load(temp, "parsing Big Sandy JSON")
|
|
if tempdata is None:
|
|
return
|
|
|
|
try:
|
|
for j in tempdata[0]['boundaries']:
|
|
outageinfo = j.get('customersOutNow'), j.get('customersServed'), j.get('name'), state, company
|
|
allcountyoutages.append(outageinfo)
|
|
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} Big Sandy boundaries")
|
|
except (KeyError, IndexError) as e:
|
|
logger.error(f"Error processing Big Sandy data: {e}")
|
|
|
|
def southcentralpower():
|
|
"""Fetch outage data for South Central Power"""
|
|
logger.info("Fetching South Central Power outage data")
|
|
company = 'SCP'
|
|
url = southcentralpower_url
|
|
temp = safe_request(url, "fetching South Central Power data")
|
|
if temp is None:
|
|
return
|
|
|
|
tempdata = safe_json_load(temp, "parsing South Central Power JSON")
|
|
if tempdata is None:
|
|
return
|
|
|
|
state = 'OH'
|
|
try:
|
|
for j in tempdata[0]['boundaries']:
|
|
outageinfo = j.get('customersOutNow'), j.get('customersServed'), j.get('name'), state, company
|
|
allcountyoutages.append(outageinfo)
|
|
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} South Central Power boundaries")
|
|
except (KeyError, IndexError) as e:
|
|
logger.error(f"Error processing South Central Power data: {e}")
|
|
|
|
def ku_get_url():
|
|
"""Get KU outage data URL"""
|
|
logger.info("Getting KU outage data URL")
|
|
url = 'https://stormcenter.lge-ku.com/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40'
|
|
r = safe_request(url, "fetching KU report page")
|
|
if r is None:
|
|
return None
|
|
|
|
try:
|
|
x = re.search(r"instanceId: '(.*?)',", r.text)
|
|
if not x:
|
|
logger.error("Could not extract instanceId from KU report page")
|
|
return None
|
|
|
|
urlcom = x.group(1)
|
|
urlcom = 'https://kubra.io/stormcenter/api/v1/stormcenters/' + urlcom + '/views/a6cee9e4-312b-4b77-9913-2ae371eb860d/currentState?preview=false'
|
|
stuff = safe_request(urlcom, "fetching KU stormcenter data")
|
|
if stuff is None:
|
|
return None
|
|
|
|
jsonstuff = safe_json_load(stuff, "parsing KU stormcenter JSON")
|
|
if jsonstuff is None:
|
|
return None
|
|
|
|
interval_data = jsonstuff.get('data').get('interval_generation_data')
|
|
urlcom = 'https://kubra.io/' + interval_data + '/public/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40_report.json'
|
|
logger.info(f"Successfully constructed KU data URL: {urlcom}")
|
|
return urlcom
|
|
except Exception as e:
|
|
logger.error(f"Error getting KU URL: {e}")
|
|
return None
|
|
|
|
def county_json(meta, url, jsonname):
|
|
"""Generic function to get county JSON data"""
|
|
metainfo_response = safe_request(meta, "fetching metadata for county JSON")
|
|
if metainfo_response is None:
|
|
return None
|
|
|
|
metainfo = safe_json_load(metainfo_response, "parsing metadata for county JSON")
|
|
if metainfo is None:
|
|
return None
|
|
|
|
try:
|
|
metadir = metainfo['directory']
|
|
url = url + metadir + jsonname
|
|
outage_response = safe_request(url, "fetching county JSON data")
|
|
return outage_response
|
|
except KeyError as e:
|
|
logger.error(f"Error accessing metadata directory: {e}")
|
|
return None
|
|
|
|
def ku():
|
|
"""Fetch KU outage data"""
|
|
logger.info("Fetching KU outage data")
|
|
ku_list = []
|
|
url = ku_get_url()
|
|
if url is None:
|
|
return
|
|
|
|
data_response = safe_request(url, "fetching KU data")
|
|
if data_response is None:
|
|
return
|
|
|
|
tempdata = safe_json_load(data_response, "parsing KU data JSON")
|
|
if tempdata is None:
|
|
return
|
|
|
|
try:
|
|
temp = tempdata['file_data']['areas'][2]['areas'][0]['areas']
|
|
temp1 = tempdata['file_data']['areas'][2]['areas'][1]['areas']
|
|
temp2 = tempdata['file_data']['areas'][1]['areas'][0]['areas']
|
|
|
|
for i in temp:
|
|
ku_list.append(i)
|
|
for i in temp1:
|
|
ku_list.append(i)
|
|
for i in temp2:
|
|
ku_list.append(i)
|
|
|
|
for o in ku_list:
|
|
outageinfo = o['cust_a']['val'], o['cust_s'], o['name'].capitalize(), o['state'], o['utility']
|
|
allcountyoutages.append(outageinfo)
|
|
|
|
logger.info(f"Successfully processed {len(ku_list)} KU outage records")
|
|
except (KeyError, IndexError) as e:
|
|
logger.error(f"Error processing KU data: {e}")
|
|
|
|
def grayson():
|
|
"""Fetch Grayson County outage data"""
|
|
logger.info("Fetching Grayson County outage data")
|
|
company = 'GRE'
|
|
outage_response = safe_request(GRAYSON_COUNTY, "fetching Grayson County data")
|
|
if outage_response is None:
|
|
return
|
|
|
|
if not outage_response.headers.get('Content-Type', '').startswith('application/json'):
|
|
logger.error(f"Unexpected content type from Grayson County: {outage_response.headers.get('Content-Type')}")
|
|
return
|
|
|
|
tempdata = safe_json_load(outage_response, "parsing Grayson County JSON")
|
|
if tempdata is None:
|
|
return
|
|
|
|
state = 'KY'
|
|
try:
|
|
for j in tempdata[0]['boundaries']:
|
|
outageinfo = j.get('customersOutNow'), j.get('customersServed'), j.get('name'), state, company
|
|
allcountyoutages.append(outageinfo)
|
|
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} Grayson County boundaries")
|
|
except (KeyError, IndexError) as e:
|
|
logger.error(f"Error processing Grayson County data: {e}")
|
|
|
|
def aep_county_vawv(meta, url, jsonname):
|
|
"""Fetch AEP county data for VA and WV"""
|
|
logger.info("Fetching AEP county data for VA and WV")
|
|
company = 'AEP'
|
|
outage_response = county_json(meta, url, jsonname)
|
|
if outage_response is None:
|
|
return
|
|
|
|
if not outage_response.headers.get('Content-Type', '').startswith('application/octet-stream'):
|
|
logger.error(f"Unexpected content type from AEP VA/WV: {outage_response.headers.get('Content-Type')}")
|
|
return
|
|
|
|
tempdata = safe_json_load(outage_response, "parsing AEP VA/WV JSON")
|
|
if tempdata is None:
|
|
return
|
|
|
|
try:
|
|
# WV data
|
|
state = 'WV'
|
|
for j in tempdata['file_data']['areas'][0]['areas'][2]['areas']:
|
|
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name'), state, company
|
|
allcountyoutages.append(outageinfo)
|
|
|
|
# VA data
|
|
state = 'VA'
|
|
for j in tempdata['file_data']['areas'][0]['areas'][1]['areas']:
|
|
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
|
allcountyoutages.append(outageinfo)
|
|
|
|
logger.info("Successfully processed AEP VA/WV county data")
|
|
except (KeyError, IndexError) as e:
|
|
logger.error(f"Error processing AEP VA/WV data: {e}")
|
|
|
|
def aep_county_oh(meta, url, jsonname):
|
|
"""Fetch AEP county data for Ohio"""
|
|
logger.info("Fetching AEP county data for Ohio")
|
|
company = 'AEP'
|
|
state = 'OH'
|
|
outage_response = county_json(meta, url, jsonname)
|
|
if outage_response is None:
|
|
return
|
|
|
|
tempdata = safe_json_load(outage_response, "parsing AEP OH JSON")
|
|
if tempdata is None:
|
|
return
|
|
|
|
try:
|
|
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
|
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
|
allcountyoutages.append(outageinfo)
|
|
logger.info("Successfully processed AEP OH county data")
|
|
except (KeyError, IndexError) as e:
|
|
logger.error(f"Error processing AEP OH data: {e}")
|
|
|
|
def aep_county_ky(meta, url, jsonname):
|
|
"""Fetch AEP county data for Kentucky"""
|
|
logger.info("Fetching AEP county data for Kentucky")
|
|
company = 'AEP'
|
|
state = 'KY'
|
|
outage_response = county_json(meta, url, jsonname)
|
|
if outage_response is None:
|
|
return
|
|
|
|
tempdata = safe_json_load(outage_response, "parsing AEP KY JSON")
|
|
if tempdata is None:
|
|
return
|
|
|
|
try:
|
|
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
|
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
|
allcountyoutages.append(outageinfo)
|
|
logger.info("Successfully processed AEP KY county data")
|
|
except (KeyError, IndexError) as e:
|
|
logger.error(f"Error processing AEP KY data: {e}")
|
|
|
|
def firstenergy_county(meta, url, jsonname):
|
|
"""Fetch First Energy county data"""
|
|
logger.info("Fetching First Energy county data")
|
|
company = 'FE'
|
|
state = 'WV'
|
|
outage_response = county_json(meta, url, jsonname)
|
|
if outage_response is None:
|
|
return
|
|
|
|
if not outage_response.headers.get('Content-Type', '').startswith('application/octet-stream'):
|
|
logger.error(f"Unexpected content type from First Energy: {outage_response.headers.get('Content-Type')}")
|
|
return
|
|
|
|
tempdata = safe_json_load(outage_response, "parsing First Energy JSON")
|
|
if tempdata is None:
|
|
return
|
|
|
|
try:
|
|
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
|
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
|
allcountyoutages.append(outageinfo)
|
|
logger.info("Successfully processed First Energy county data")
|
|
except (KeyError, IndexError) as e:
|
|
logger.error(f"Error processing First Energy data: {e}")
|
|
|
|
def get_kubra_hexes(url):
|
|
"""Get Kubra hex data"""
|
|
outage_response = safe_request(url, "fetching Kubra hex data")
|
|
if outage_response is None:
|
|
return None, None
|
|
|
|
if not outage_response.headers.get('Content-Type', '').startswith('application/json'):
|
|
logger.error(f"Unexpected content type from Kubra: {outage_response.headers.get('Content-Type')}")
|
|
return None, None
|
|
|
|
tempdata = safe_json_load(outage_response, "parsing Kubra hex JSON")
|
|
if tempdata is None:
|
|
return None, None
|
|
|
|
try:
|
|
bothhex = tempdata.get('data').get('cluster_interval_generation_data')
|
|
hexes = bothhex.split('/')
|
|
logger.info(f"Successfully extracted Kubra hexes: {hexes}")
|
|
return hexes[2], hexes[3]
|
|
except (KeyError, AttributeError) as e:
|
|
logger.error(f"Error extracting Kubra hexes: {e}")
|
|
return None, None
|
|
|
|
def kubra_fe(baseurl1, baseurl2, meta):
|
|
"""Fetch Kubra First Energy data"""
|
|
logger.info("Fetching Kubra First Energy data")
|
|
hex2 = get_kubra_hexes(meta)
|
|
if hex2[0] is None:
|
|
return
|
|
|
|
url = baseurl1 + hex2[1] + baseurl2
|
|
company = 'FE'
|
|
state = 'WV'
|
|
outage_response = safe_request(url, "fetching Kubra FE data")
|
|
if outage_response is None:
|
|
return
|
|
|
|
if not outage_response.headers.get('Content-Type', '').startswith('application/json'):
|
|
logger.error(f"Unexpected content type from Kubra FE: {outage_response.headers.get('Content-Type')}")
|
|
return
|
|
|
|
tempdata = safe_json_load(outage_response, "parsing Kubra FE JSON")
|
|
if tempdata is None:
|
|
return
|
|
|
|
try:
|
|
for j in tempdata['file_data']['areas']:
|
|
if j.get('key') == "county":
|
|
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('name').capitalize(), state, company
|
|
allcountyoutages.append(outageinfo)
|
|
logger.info("Successfully processed Kubra FE county data")
|
|
except (KeyError, IndexError) as e:
|
|
logger.error(f"Error processing Kubra FE data: {e}")
|
|
|
|
def kubra_aep(baseurl1, baseurl2, meta, company='AEP'):
|
|
"""Fetch Kubra AEP data"""
|
|
logger.info(f"Fetching Kubra AEP data for company: {company}")
|
|
hex2 = get_kubra_hexes(meta)
|
|
if hex2[0] is None:
|
|
return
|
|
|
|
url = baseurl1 + hex2[1] + baseurl2
|
|
outage_response = safe_request(url, "fetching Kubra AEP data")
|
|
if outage_response is None:
|
|
return
|
|
|
|
if not outage_response.headers.get('Content-Type', '').startswith('application/json'):
|
|
logger.error(f"Unexpected content type from Kubra AEP: {outage_response.headers.get('Content-Type')}")
|
|
return
|
|
|
|
tempdata = safe_json_load(outage_response, "parsing Kubra AEP JSON")
|
|
if tempdata is None:
|
|
return
|
|
|
|
process_outage_data(tempdata, company)
|
|
|
|
def process_outage_data(data, company):
|
|
"""Process outage data with enhanced error handling"""
|
|
try:
|
|
# Navigate to the primary list of areas
|
|
primary_areas = data.get("file_data", {}).get("areas", [])
|
|
|
|
# If the list is empty, there's nothing to process
|
|
if not primary_areas:
|
|
logger.warning("No 'areas' data found in outage data.")
|
|
return
|
|
|
|
# Check the key of the first item to determine the format
|
|
first_item_key = primary_areas[0].get("key")
|
|
|
|
if first_item_key == "state":
|
|
# Format 1: Loop through each state object
|
|
for state_area in primary_areas:
|
|
state_name = state_area.get("name", "Unknown State")
|
|
# Get the nested list of counties for this state
|
|
county_list = state_area.get("areas", [])
|
|
for county in county_list:
|
|
# We are now at the county level
|
|
if county.get("key") == "county":
|
|
outageinfo = county.get('cust_a').get('val'), county.get('cust_s'), county.get('name').capitalize(), county.get('state'), company
|
|
allcountyoutages.append(outageinfo)
|
|
elif first_item_key == "county":
|
|
# Format 2: The primary list is already the county list
|
|
for county in primary_areas:
|
|
# We are now at the county level
|
|
if county.get("key") == "county":
|
|
outageinfo = county.get('cust_a').get('val'), county.get('cust_s'), county.get('name').capitalize(), county.get('state'), company
|
|
allcountyoutages.append(outageinfo)
|
|
else:
|
|
logger.warning(f"Unknown data format. Could not find 'state' or 'county' key. Found: {first_item_key}")
|
|
|
|
except (KeyError, IndexError) as e:
|
|
logger.error(f"Error processing outage data: {e}")
|
|
|
|
def insert_outage_data(cursor, outage_data, current_timestamp):
|
|
"""Insert outage data into the new table"""
|
|
if not outage_data:
|
|
logger.info("No outage data to insert into the database.")
|
|
return
|
|
|
|
sql = 'INSERT INTO newcountyoutages (outages, served, county, state, update, company) VALUES (%s, %s, %s, %s, %s, %s)'
|
|
try:
|
|
logger.info(f"Inserting {len(outage_data)} rows into the database.")
|
|
cursor.executemany(sql, outage_data)
|
|
logger.info("Successfully inserted data into the database.")
|
|
except Exception as e:
|
|
logger.error(f"Failed to insert data into the database: {e}")
|
|
raise
|
|
|
|
def main():
|
|
"""Main function to collect and insert outage data"""
|
|
conn = None
|
|
try:
|
|
conn = psycopg2.connect(**DB_PARAMS)
|
|
cursor = conn.cursor()
|
|
logger.info("Successfully connected to the database.")
|
|
|
|
# Clear the global list at the start
|
|
global allcountyoutages
|
|
allcountyoutages = []
|
|
|
|
# Collect outage data for each provider
|
|
logger.info("Starting data collection.")
|
|
|
|
# --- Kubra First Energy ---
|
|
try:
|
|
kubra_fe('https://kubra.io/data/', '/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json', WV_FE_META)
|
|
except Exception as e:
|
|
logger.error(f"Error collecting Kubra FE data: {e}")
|
|
|
|
# --- Kubra AEP WV ---
|
|
try:
|
|
kubra_aep('https://kubra.io/data/', '/public/reports/7929429f-635d-4761-b6c7-78f646cef3c2_report.json', AEP_WV_KUBRA_META)
|
|
except Exception as e:
|
|
logger.error(f"Error collecting Kubra AEP WV data: {e}")
|
|
|
|
# --- Kubra AEP OH ---
|
|
try:
|
|
kubra_aep('https://kubra.io/data/', '/public/reports/1bc6bd19-2315-4548-980a-6df73b93b355_report.json', AEP_OH_KUBRA_META)
|
|
except Exception as e:
|
|
logger.error(f"Error collecting Kubra AEP OH data: {e}")
|
|
|
|
# --- Kubra AEP KY ---
|
|
try:
|
|
kubra_aep('https://kubra.io/data/', '/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json', AEP_KY_KUBRA_META)
|
|
except Exception as e:
|
|
logger.error(f"Error collecting Kubra AEP KY data: {e}")
|
|
|
|
# --- Grayson County ---
|
|
try:
|
|
grayson()
|
|
except Exception as e:
|
|
logger.error(f"Error collecting Grayson County data: {e}")
|
|
|
|
# --- KU ---
|
|
try:
|
|
ku()
|
|
except Exception as e:
|
|
logger.error(f"Error collecting KU data: {e}")
|
|
|
|
# --- South Central Power ---
|
|
try:
|
|
southcentralpower()
|
|
except Exception as e:
|
|
logger.error(f"Error collecting South Central Power data: {e}")
|
|
|
|
# --- Big Sandy ---
|
|
try:
|
|
bigsandy()
|
|
except Exception as e:
|
|
logger.error(f"Error collecting Big Sandy data: {e}")
|
|
|
|
# --- AEP Direct (OH, WV, KY) ---
|
|
try:
|
|
aep_county_oh(AEP_OH_META, AEP_OH_BASE, "metadata.json")
|
|
except Exception as e:
|
|
logger.error(f"Error collecting AEP OH data: {e}")
|
|
|
|
try:
|
|
aep_county_vawv(AEP_WV_META, AEP_WV_BASE, "metadata.json")
|
|
except Exception as e:
|
|
logger.error(f"Error collecting AEP WV/VA data: {e}")
|
|
|
|
try:
|
|
aep_county_ky(AEP_KY_META, AEP_KY_BASE, "metadata.json")
|
|
except Exception as e:
|
|
logger.error(f"Error collecting AEP KY data: {e}")
|
|
|
|
# --- First Energy Direct ---
|
|
try:
|
|
firstenergy_county(WV_FE_META, 'https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/', "metadata.json")
|
|
except Exception as e:
|
|
logger.error(f"Error collecting First Energy data: {e}")
|
|
|
|
# Insert collected data into the new table
|
|
current_timestamp = str(datetime.utcnow())
|
|
insert_outage_data(cursor, allcountyoutages, current_timestamp)
|
|
conn.commit()
|
|
logger.info("Data collection and database insert completed successfully.")
|
|
|
|
except Exception as e:
|
|
logger.exception("An error occurred during the main execution.")
|
|
if conn:
|
|
conn.rollback()
|
|
finally:
|
|
if conn:
|
|
cursor.close()
|
|
conn.close()
|
|
logger.info("Database connection closed.")
|
|
|
|
|
|
if __name__ == '__main__':
|
|
main()
|