This commit is contained in:
2025-12-08 01:48:50 +00:00
parent 80896ea3fb
commit c50b70bda0
10 changed files with 0 additions and 2002 deletions

255
one2.php
View File

@@ -1,255 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<script src="js/jsani/lib/jquery-1.11.1.min.js"></script>
<script src="js/jsani/lib/jquery-ui-1.11.2/jquery-ui.min.js"></script>
<link href="js/jsani/lib/jquery-ui-1.11.2/jquery-ui.min.css" rel="stylesheet" type="text/css">
<script src="js/jsani/jquery.jsani.min.js"></script>
<link href="js/jsani/jquery.jsani.min.css" rel="stylesheet" type="text/css">
</head>
<style type="text/css">
html, body {
height: 100%;
}
#caminfo {
height: 100%;
}
table {
border-collapse: collapse;
}
table,th,td {
border: 1px solid black;
padding-horizontal: 15px;
}
</style>
<body>
<div style="width: auto; height: 150px; margin: auto;">
<div id="caminfo">
<table>
<tbody id="camstats"></tbody>
</table>
</div>
</div>
<div style="width: auto; margin: auto;">
<div id="animation_1"></div>
</div>
<div id="archive">
<label>Current archive length is 15 days</label>
<form onsubmit="return false;" method="post" name="archive">
<label for="archive">Enter Ending Date Time For Archive Data (YYYY-MM-DD HH:MM) UTC</label>
<input type="text" id="dtg" name="dtg">
<input type="submit" name="submit" value="Submit" onclick="archiveshow()" />
</form>
<br>
<br>
<form onsubmit="return false;" method="post" name="gifit">
<label for="gifit">Enter frame delay and last frame dwell to make an animated gif from loaded frames.</label>
<br>
<label for="delay">Delay</label>
<input type="text" id="delay" name="delay" value="20" size="6">
<label for="delay">Last Frame Delay</label>
<input type="text" id="lastdelay" name="lastdelay" value="200" size="6">
<label for="maxh">Max Horizonal Size</label>
<input type="text" id="maxh" name="maxh" value="500" size="6">
<label for="maxv">Max Vertical Size</label>
<input type="text" id="maxv" name="maxv" value="400" size="6">
<label for="delay">Last Frame Delay</label>
<input type="submit" name="gifit" value="Make GIF" onclick="makemp4()"/>
</form>
</div>
<script>
<?php
$camid = pg_escape_string($_GET['camid']);
echo('var camid = ' . $_GET["camid"] . ';');
if($_GET['dtg']){
$endtime = pg_escape_string($_GET['dtg']);
}
if($_GET['camimages']){
$camimages = $_GET['camimages'];
echo('var camimages = ' . $_GET["camimages"] . ';');
}
if(!$_GET['camimages']){
echo('var camimages = ' . 20 . ';');
}
if($_GET['dtg']){
echo('var camtime = ' . json_encode($_GET["dtg"]). ';');
}
?>
externalcam = [];
function archiveshow() {
archive = document.getElementById("dtg").value;
archive=archive.trim().replace(/ /g, '+');
window.open("one.php?camid=" + camid + "&camimages=" + camimages + "&dtg=" + archive);
}
function doTheInsert(label1,value1,label2,value2) {
newRow="<td>" + label1 + "</td><td>" + value1 + "</td><td>" + label2 + "</td><td>" + value2 + "</td>";
document.getElementById('camstats').innerHTML += newRow;
}
function updatetable() {
$.getJSON("single.php?camid=" + camid, function(caminfo){
document.getElementById('camstats').innerHTML = '';
doTheInsert('State',caminfo[0]['state'],'County',caminfo[0]['county']);
doTheInsert('Public Zone', caminfo[0]['pzone'], 'CWA', caminfo[0]['cwa']);
doTheInsert('Elevation',caminfo[0]['elevation'] + ' feet', 'Direct link to feed (all may not be accessible from RLX)','<a href="' + caminfo[0]['url'] + '">' + 'link' + '</a>');
doTheInsert('Description','<a href="' + googleMap(caminfo[0]['lat'],caminfo[0]['lon']) + '" target=”_blank">' + caminfo[0]['description'] + '</a>', '','');
});
}
function googleMap(lat, lon){
return "http://maps.google.com/maps?t=k&q=loc:" + lat + "+" + lon + "&basemap=satellite";
}
function getcam() {
if (typeof camtime !== 'undefined') {
var reqstring = "individualcam.php?camid=" + camid + '&camimages=' + camimages + '&dtg=' + camtime;
} else {
var reqstring = "individualcam.php?camid=" + camid + '&camimages=' + camimages;
}
//$.getJSON("individualcam.php?camid=" + camid + '&camimages=' + camimages, function(data){
$.getJSON(reqstring, function(data){
var camlist = [];
var filetimes = [];
for(var i in data){
camlist.push(data[i].filepath);
filetimes.push('<a href="./camdata/' + data[i].filepath + '">Pause Loop and click this to view image directly</a> ' + 'Image Retrieved: ' + data[i].dateutc + ' UTC');
externalcam.push("./camdata/" + data[i].filepath);
}
camlist.reverse();
filetimes.reverse();
externalcam.reverse();
var animation = $("#animation_1").jsani({
baseDir: './camdata',
imageSrcs: camlist,
aniWidth: 800,
aniHeight: 600,
//initdwell: 200,
frameLabels: filetimes,
controls: ['framelabel', 'stopplay', 'firstframe', 'previous', 'next', 'lastframe', 'looprock', 'slow', 'fast', 'zoom'],
last_frame_pause: 8,
//first_frame_pause: 1,
//frame_pause: '0:5, 3:6'
});
updatetable();
});
}
getcam();
window.setInterval('refresh()', 60000);
function refresh() {
window.location.reload();
}
function makemp4() {
var delay = document.getElementById("delay").value;
var lastdelay = document.getElementById("lastdelay").value;
var maxh = document.getElementById("maxh").value;
var maxv = document.getElementById("maxv").value;
$.ajax({
type: "POST",
url: 'mp4.php',
data: {data: externalcam, images: camimages, delay: delay, lastdelay: lastdelay, maxh: maxh, maxv: maxv},
success: function(data) {
//var outputImg = document.createElement('img');
//var giffy = document.querySelector(`data:image/gif;base64,${data}`);
var giffy = document.createElement('img');
giffy.src = `data:image/gif;base64,${data}`;
var outputImg = document.createElement('a');
outputImg.innerHTML = "Click to save image <br>";
outputImg.appendChild(giffy);
outputImg.href = giffy.src;
outputImg.download = "loof.gif";
giffy.href = giffy.src;
giffy.download = "loop.gif";
outputImg.appendChild(giffy)
document.body.appendChild(outputImg);
//document.body.appendChild(giffy);
//window.open('about:blank').document.body.appendChild(outputImg);
//var OpenWindow = window.open('about:blank','_blank');
//OpenWindow.document.body.appendChild(outputImg);
}
});
}
</script>
<div id='hidden' style="display: none;">
</div>
</body>
</html>

View File

@@ -1,265 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<script src="js/jsani/lib/jquery-1.11.1.min.js"></script>
<script src="js/jsani/lib/jquery-ui-1.11.2/jquery-ui.min.js"></script>
<link href="js/jsani/lib/jquery-ui-1.11.2/jquery-ui.min.css" rel="stylesheet" type="text/css">
<script src="js/jsani/jquery.jsani.min.js"></script>
<link href="js/jsani/jquery.jsani.min.css" rel="stylesheet" type="text/css">
</head>
<style type="text/css">
html, body {
height: 100%;
}
#caminfo {
height: 100%;
}
table {
border-collapse: collapse;
}
table,th,td {
border: 1px solid black;
padding-horizontal: 15px;
}
</style>
<body>
<div style="width: auto; height: 150px; margin: auto;">
<div id="caminfo">
<table>
<tbody id="camstats"></tbody>
<form onsubmit="return false;" method="post" name="gifit">
<input type="submit" name="stopit" value="Stop Auto Refresh" onclick="stoptimer()"/>
</form>
</table>
</div>
</div>
<div style="width: auto; margin: auto;">
<div id="animation_1"></div>
</div>
<div id="archive">
<label>Current archive length is 15 days</label>
<form onsubmit="return false;" method="post" name="archive">
<label for="archive">Enter Ending Date Time For Archive Data (YYYY-MM-DD HH:MM) UTC</label>
<input type="text" id="dtg" name="dtg">
<input type="submit" name="submit" value="Submit" onclick="archiveshow()" />
</form>
<br>
<form onsubmit="return false;" method="post" name="gifit">
<label for="gifit">Enter frame delay and last frame delay (ms) to make an animated gif from loaded frames (will take a couple seconds).</label>
<br>
<label for="delay">Delay</label>
<input type="text" id="delay" name="delay" value="20" size="6">
<label for="delay">Last Frame Delay</label>
<input type="text" id="lastdelay" name="lastdelay" value="200" size="6">
<label for="maxh">Constrain Horizonal Size</label>
<input type="text" id="maxh" name="maxh" value="500" size="6">
<label for="maxv">Constrain Vertical Size</label>
<input type="text" id="maxv" name="maxv" value="400" size="6">
<label for="delay">Last Frame Delay</label>
<input type="submit" name="gifit" value="Make GIF" onclick="makemp4()"/>
</form>
</div>
<script>
<?php
$camid = pg_escape_string($_GET['camid']);
echo('var camid = ' . $_GET["camid"] . ';');
if($_GET['dtg'] ?? null){
$endtime = pg_escape_string($_GET['dtg']);
}
if($_GET['camimages'] ?? null){
$camimages = $_GET['camimages'];
echo('var camimages = ' . $_GET["camimages"] . ';');
}
if(!$_GET['camimages'] ?? null){
echo('var camimages = ' . 20 . ';');
}
if($_GET['dtg'] ?? null){
echo('var camtime = ' . json_encode($_GET["dtg"]). ';');
}
?>
externalcam = [];
function archiveshow() {
archive = document.getElementById("dtg").value;
archive=archive.trim().replace(/ /g, '+');
window.open("one.php?camid=" + camid + "&camimages=" + camimages + "&dtg=" + archive);
}
function doTheInsert(label1,value1,label2,value2) {
newRow="<td>" + label1 + "</td><td>" + value1 + "</td><td>" + label2 + "</td><td>" + value2 + "</td>";
document.getElementById('camstats').innerHTML += newRow;
}
function updatetable() {
$.getJSON("single.php?camid=" + camid, function(caminfo){
document.getElementById('camstats').innerHTML = '';
doTheInsert('State',caminfo[0]['state'],'County',caminfo[0]['county']);
doTheInsert('Public Zone', caminfo[0]['pzone'], 'CWA', caminfo[0]['cwa']);
doTheInsert('Elevation',caminfo[0]['elevation'] + ' feet', 'Direct link to feed (all may not be accessible from RLX)','<a href="' + caminfo[0]['url'] + '">' + 'link' + '</a>');
if (caminfo[0]['source'] == null) {
doTheInsert('Source for sharing','none', 'Permission to share (if null, use judgement)',caminfo[0]['permission']);
}
if (caminfo[0]['source'] != null) {
doTheInsert('Source for sharing','<a href="' + caminfo[0]['source'] + '" target=”_blank">' + 'link' + '</a>', 'Permission to share (if null, use judgement)',caminfo[0]['permission']);
}
doTheInsert('Description','<a href="' + googleMap(caminfo[0]['lat'],caminfo[0]['lon']) + '" target=”_blank">' + caminfo[0]['description'] + '</a>', '','');
document.title = 'Cam: ' + caminfo[0]['description'];
});
}
function googleMap(lat, lon){
return "http://maps.google.com/maps?t=k&q=loc:" + lat + "+" + lon + "&basemap=satellite";
}
function getcam() {
if (typeof camtime !== 'undefined') {
var reqstring = "individualcam.php?camid=" + camid + '&camimages=' + camimages + '&dtg=' + camtime;
} else {
var reqstring = "individualcam.php?camid=" + camid + '&camimages=' + camimages;
}
//$.getJSON("individualcam.php?camid=" + camid + '&camimages=' + camimages, function(data){
$.getJSON(reqstring, function(data){
var camlist = [];
var filetimes = [];
for(var i in data){
camlist.push(data[i].filepath);
filetimes.push('<a href="./camdata/' + data[i].filepath + '">Pause Loop and click this to view image directly</a> ' + 'Image Retrieved: ' + data[i].dateutc + ' UTC');
externalcam.push("./camdata/" + data[i].filepath);
}
camlist.reverse();
filetimes.reverse();
externalcam.reverse();
var animation = $("#animation_1").jsani({
baseDir: './camdata',
imageSrcs: camlist,
aniWidth: 800,
aniHeight: 600,
//initdwell: 200,
frameLabels: filetimes,
controls: ['framelabel', 'stopplay', 'firstframe', 'previous', 'next', 'lastframe', 'looprock', 'slow', 'fast', 'zoom'],
last_frame_pause: 8,
//first_frame_pause: 1,
//frame_pause: '0:5, 3:6'
});
updatetable();
});
}
getcam();
// window.setInterval('refresh()', 60000);
const myInterval = setInterval('refresh()', 60000);
function refresh() {
window.location.reload();
}
function stoptimer() {
clearInterval(myInterval);
}
function makemp4() {
var delay = document.getElementById("delay").value;
var lastdelay = document.getElementById("lastdelay").value;
var maxh = document.getElementById("maxh").value;
var maxv = document.getElementById("maxv").value;
$.ajax({
type: "POST",
url: 'mp4.php',
data: {data: externalcam, images: camimages, delay: delay, lastdelay: lastdelay, maxh: maxh, maxv: maxv},
success: function(data) {
var giffy = document.createElement('img');
giffy.src = `data:image/gif;base64,${data}`;
var outputImg = document.createElement('a');
outputImg.innerHTML = "Click here or on the image to save gif <br>";
outputImg.appendChild(giffy);
outputImg.href = giffy.src;
outputImg.download = "loof.gif";
giffy.href = giffy.src;
giffy.download = "loop.gif";
outputImg.appendChild(giffy)
document.body.appendChild(outputImg);
//document.body.appendChild(giffy);
//window.open('about:blank').document.body.appendChild(outputImg);
//var OpenWindow = window.open('about:blank','_blank');
//OpenWindow.document.body.appendChild(outputImg);
}
});
}
</script>
<div id='hidden' style="display: none;">
</div>
</body>
</html>

View File

@@ -1,209 +0,0 @@
import requests
import polyline
import json
import psycopg2
import psycopg2.extensions
from datetime import datetime, timezone
from geojson import Point, Feature, FeatureCollection, dump
import re
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
cursor = conn.cursor()
proxies = {"http":"http://nws:nws@localhost:9000"}
aepohmeta = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
aepwvmeta = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
firstpowerwvmeta = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/metadata.json"
aepkymeta = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json'
aepwvbase = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
aepohbase = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
firstpowerwvbase = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/"
aepkybase = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/'
graysoncounty = 'https://outages.graysonrecc.com/data/boundaries.json'
#flemingjson = 'https://outage.fme.coop/data/boundaries.json'
#buckeye rec
#https://outage.buckeyerec.coop/maps/OutageWebMap/maps/GWT.rpc
#washington
#https://weci.ebill.coop/woViewer/MapWiseWeb/GWT.rpc
allcountyoutages = []
S = requests.Session()
def fleming():
state = 'KY'
company = 'FLEM'
temp = S.get(flemingjson)
tempdata = json.loads(temp.text)
for j in tempdata[0]['boundaries']:
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
allcountyoutages.append(outageinfo)
def bigsandy():
state = 'OH'
company = 'BS'
temp = S.get('http://outagemap.bigsandyrecc.com/data/boundaries.json')
tempdata = json.loads(temp.text)
for j in tempdata[0]['boundaries']:
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
allcountyoutages.append(outageinfo)
def southcentralpower():
company = 'SCP'
url = 'http://outage.southcentralpower.com/data/boundaries.json'
Sp = requests.Session()
# Sp.proxies.update(proxies)
temp = Sp.get(url).text
tempdata = json.loads(temp)
state = 'OH'
for j in tempdata[0]['boundaries']:
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
allcountyoutages.append(outageinfo)
def ku_get_url():
url = 'https://stormcenter.lge-ku.com/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40'
r = requests.get(url).text
x = re.search(r"instanceId: '(.*?)',",r)
urlcom = x.group(1)
urlcom = 'https://kubra.io/stormcenter/api/v1/stormcenters/' + urlcom + '/views/a6cee9e4-312b-4b77-9913-2ae371eb860d/currentState?preview=false'
stuff = S.get(urlcom)
jsonstuff = json.loads(stuff.text)
interval_data = jsonstuff.get('data').get('interval_generation_data')
urlcom = 'https://kubra.io/' + interval_data + '/public/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40_report.json'
return urlcom
def county_json(meta,url,jsonname):
metainfo = json.loads(S.get(meta).text)
metadir = metainfo['directory']
url = url + metadir + jsonname
outage = S.get(url)
return outage
def ku():
ku = []
url = ku_get_url()
data = S.get(url).text
tempdata = json.loads(data)
temp = tempdata['file_data']['areas'][2]['areas'][0]['areas']
temp1 = tempdata['file_data']['areas'][2]['areas'][1]['areas']
temp2 = tempdata['file_data']['areas'][1]['areas'][0]['areas']
for i in temp:
ku.append(i)
for i in temp1:
ku.append(i)
for i in temp2:
ku.append(i)
for o in ku:
outageinfo = o['cust_a']['val'],o['cust_s'],o['name'].capitalize(),o['state'],o['utility']
allcountyoutages.append(outageinfo)
def grayson():
company = 'GRE'
outage = S.get(graysoncounty)
if outage.headers.get('Content-Type').startswith('application/json'):
tempdata = json.loads(outage.text)
state = 'KY'
for j in tempdata[0]['boundaries']:
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
allcountyoutages.append(outageinfo)
def aep_county_vawv(meta,url,jsonname):
company = 'AEP'
outage = county_json(meta,url,jsonname)
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
tempdata = json.loads(outage.text)
state = 'WV'
for j in tempdata['file_data']['areas'][0]['areas'][2]['areas']:
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name'), state, company
allcountyoutages.append(outageinfo)
state = 'VA'
for j in tempdata['file_data']['areas'][0]['areas'][1]['areas']:
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
allcountyoutages.append(outageinfo)
def aep_county_oh(meta,url,jsonname):
company = 'AEP'
state = 'OH'
outage = county_json(meta,url,jsonname)
tempdata = json.loads(outage.text)
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
allcountyoutages.append(outageinfo)
def aep_county_ky(meta,url,jsonname):
company = 'AEP'
state = 'KY'
outage = county_json(meta,url,jsonname)
tempdata = json.loads(outage.text)
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
allcountyoutages.append(outageinfo)
def firstenergy_county(meta,url,jsonname):
company = 'FE'
state = 'WV'
outage = county_json(meta,url,jsonname)
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
tempdata = json.loads(outage.text)
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(),state, company
allcountyoutages.append(outageinfo)
aep_county_vawv(aepwvmeta,'https://d2oclp3li76tyy.cloudfront.net/resources/data/external/interval_generation_data/','/report_county.json')
firstenergy_county('https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/metadata.json','https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/','/report_county_ctv_wv.json')
aep_county_oh(aepohmeta,'http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/','/report_county.json')
aep_county_ky(aepkymeta, aepkybase, '/report_county.json')
grayson()
ku()
southcentralpower()
bigsandy()
current_timestamp = str(datetime.utcnow())
for i in allcountyoutages:
sql = 'insert into countyoutages (outages, served, county, state, update, company) values (%s, %s, %s, %s, %s, %s)'
val = (i[0], i[1], i[2], i[3], current_timestamp, i[4])
cursor.execute(sql,val)
conn.commit()
cursor.execute('update countyoutages set cwa = county.cwa from county where county.countyname = countyoutages.county and county.state = countyoutages.state and countyoutages.cwa is null')
conn.commit()
#cursor.execute("delete from countyoutages where cwa != 'RLX'")
cursor.execute("delete from countyoutages where cwa is null")
cursor.execute("delete from countyoutages where update < now () - interval '365 days'")
conn.commit()
#print(allcountyoutages)

View File

@@ -1,82 +0,0 @@
Color: 255 0 0
Font: 1, 11, 1, "Arial"
Threshold: 9999
Place: -82.33573 37.99161, icon=2, label="Outage #4
Cause: None
Last Update: 2025-04-06 21:25: UTC"
Place: -81.61944 38.92038, icon=2, label="Outage #19
Cause: Pending Investigation
Last Update: 2025-04-06 21:25: UTC"
Place: -81.72974 39.15171, icon=2, label="Outage #19
Cause: Pending Investigation
Last Update: 2025-04-06 21:25: UTC"
Place: -81.53436 39.20742, icon=2, label="Outage #19
Cause: Pending Investigation
Last Update: 2025-04-06 21:25: UTC"
Place: -80.25851 39.16533, icon=2, label="Outage #19
Cause: Pending Investigation
Last Update: 2025-04-06 21:25: UTC"
Place: -80.57541 39.34415, icon=2, label="Outage #19
Cause: Pending Investigation
Last Update: 2025-04-06 21:25: UTC"
Place: -81.248 38.77818, icon=2, label="Outage #19
Cause: Pending Investigation
Last Update: 2025-04-06 21:25: UTC"
Place: -80.52457 38.39864, icon=2, label="Outage #19
Cause: Pending Investigation
Last Update: 2025-04-06 21:25: UTC"
Place: -80.41218 38.46986, icon=2, label="Outage #19
Cause: Pending Investigation
Last Update: 2025-04-06 21:25: UTC"
Place: -80.45523 38.45674, icon=2, label="Outage #19
Cause: Pending Investigation
Last Update: 2025-04-06 21:25: UTC"
Place: -81.46448 38.43202, icon=2, label="Outage #163
Cause: Tree Contact
Last Update: 2025-04-06 21:25: UTC"
Place: -81.48033 38.70811, icon=2, label="Outage #108
Cause: None
Last Update: 2025-04-06 21:25: UTC"
Place: -81.08233 38.33495, icon=2, label="Outage #14
Cause: Tree Contact
Last Update: 2025-04-06 21:25: UTC"
Place: -80.64911 38.42277, icon=2, label="Outage #19
Cause: Tree Damage
Last Update: 2025-04-06 21:00: UTC"
Place: -80.64508 38.42421, icon=2, label="Outage #19
Cause: Emergency Equipment Repair
Last Update: 2025-04-06 21:00: UTC"
Place: -80.64925 38.42389, icon=2, label="Outage #19
Cause: Pending Investigation
Last Update: 2025-04-06 21:00: UTC"
Place: -81.16478 38.32286, icon=2, label="Outage #4
Cause: None
Last Update: 2025-04-06 21:10: UTC"
Place: -81.54974 38.53972, icon=2, label="Outage #7
Cause: None
Last Update: 2025-04-06 21:25: UTC"
Place: -81.13474 38.01172, icon=2, label="Outage #23
Cause: None
Last Update: 2025-04-06 21:25: UTC"
Place: -82.47047 37.15274, icon=2, label="Outage #46
Cause: None
Last Update: 2025-04-06 21:25: UTC"
Place: -82.39272 37.04287, icon=2, label="Outage #147
Cause: Tree Contact
Last Update: 2025-04-06 21:25: UTC"
Place: -83.059326 38.44866, icon=2, label="Outage #9
Cause: None
Last Update: 2025-04-06 21:11: UTC"
Place: -81.7956 37.56702, icon=2, label="Outage #64
Cause: Tree Contact
Last Update: 2025-04-06 21:25: UTC"
Place: -81.80097 37.5298, icon=2, label="Outage #4
Cause: Equipment Related
Last Update: 2025-04-06 21:25: UTC"
Place: -81.91268 39.47965, icon=2, label="Outage #4
Cause: None
Last Update: 2025-04-06 21:25: UTC"
Place: -81.44512 39.42116, icon=2, label="Outage #4
Cause: None
Last Update: 2025-04-06 21:25: UTC"
End:

View File

@@ -1,188 +0,0 @@
Refresh: 1
Threshold: 999 nautical_miles
Title: Power Outages (RLX CWA) - Triangles
Font: 1, 11, 0, "Courier New"
Color: 255 0 0
Triangles:
37.15803, -82.2399
37.19803, -82.2399
37.17803, -82.1799
End:
Color: 0 0 255
Text: 37.17803, -82.2099, 1, "Outage #4", "Outage #4\nCause: None\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
37.13274, -82.50047
37.172740000000005, -82.50047
37.15274, -82.44047
End:
Color: 0 0 255
Text: 37.15274, -82.47047, 1, "Outage #46", "Outage #46\nCause: None\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
37.02287, -82.42272
37.062870000000004, -82.42272
37.04287, -82.36272
End:
Color: 0 0 255
Text: 37.04287, -82.39272, 1, "Outage #147", "Outage #147\nCause: Tree Contact\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
38.31505, -81.11213000000001
38.355050000000006, -81.11213000000001
38.33505, -81.05213
End:
Color: 0 0 255
Text: 38.33505, -81.08213, 1, "Outage #13", "Outage #13\nCause: Tree Contact\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
38.40376, -81.50807
38.443760000000005, -81.50807
38.42376, -81.44807
End:
Color: 0 0 255
Text: 38.42376, -81.47807, 1, "Outage #8", "Outage #8\nCause: Tree Contact\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
38.51972, -81.57974
38.559720000000006, -81.57974
38.53972, -81.51974
End:
Color: 0 0 255
Text: 38.53972, -81.54974, 1, "Outage #7", "Outage #7\nCause: None\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
38.688109999999995, -81.51033
38.72811, -81.51033
38.70811, -81.45033
End:
Color: 0 0 255
Text: 38.70811, -81.48033, 1, "Outage #108", "Outage #108\nCause: None\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
39.459649999999996, -81.94268
39.49965, -81.94268
39.47965, -81.88268
End:
Color: 0 0 255
Text: 39.47965, -81.91268, 1, "Outage #4", "Outage #4\nCause: None\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
38.90038, -81.64944
38.940380000000005, -81.64944
38.92038, -81.58944
End:
Color: 0 0 255
Text: 38.92038, -81.61944, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
39.13171, -81.75974000000001
39.171710000000004, -81.75974000000001
39.15171, -81.69974
End:
Color: 0 0 255
Text: 39.15171, -81.72974, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
39.37441999999999, -80.6919
39.41442, -80.6919
39.39442, -80.6319
End:
Color: 0 0 255
Text: 39.39442, -80.6619, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
38.828689999999995, -81.20764
38.86869, -81.20764
38.84869, -81.14764
End:
Color: 0 0 255
Text: 38.84869, -81.17764, 1, "Outage #30", "Outage #30\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
39.187419999999996, -81.56436000000001
39.22742, -81.56436000000001
39.20742, -81.50436
End:
Color: 0 0 255
Text: 39.20742, -81.53436, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
39.145329999999994, -80.28851
39.18533, -80.28851
39.16533, -80.22851
End:
Color: 0 0 255
Text: 39.16533, -80.25851, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
39.324149999999996, -80.60541
39.36415, -80.60541
39.34415, -80.54541
End:
Color: 0 0 255
Text: 39.34415, -80.57541, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
38.758179999999996, -81.278
38.79818, -81.278
38.77818, -81.218
End:
Color: 0 0 255
Text: 38.77818, -81.248, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
38.449859999999994, -80.44218000000001
38.48986, -80.44218000000001
38.46986, -80.38218
End:
Color: 0 0 255
Text: 38.46986, -80.41218, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
38.43674, -80.48523
38.47674000000001, -80.48523
38.45674, -80.42523
End:
Color: 0 0 255
Text: 38.45674, -80.45523, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
37.5098, -81.83097000000001
37.549800000000005, -81.83097000000001
37.5298, -81.77097
End:
Color: 0 0 255
Text: 37.5298, -81.80097, 1, "Outage #4", "Outage #4\nCause: Equipment Related\nLast Update: 2025-04-06 21:50: UTC"
Triangles:
38.314949999999996, -81.11233
38.35495, -81.11233
38.33495, -81.05233
End:
Color: 0 0 255
Text: 38.33495, -81.08233, 1, "Outage #14", "Outage #14\nCause: Tree Contact\nLast Update: 2025-04-06 21:40: UTC"
Triangles:
39.40116, -81.47512
39.44116, -81.47512
39.42116, -81.41512
End:
Color: 0 0 255
Text: 39.42116, -81.44512, 1, "Outage #4", "Outage #4\nCause: None\nLast Update: 2025-04-06 21:40: UTC"
Triangles:
38.37864, -80.55457
38.41864, -80.55457
38.39864, -80.49457
End:
Color: 0 0 255
Text: 38.39864, -80.52457, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:30: UTC"
Triangles:
37.547019999999996, -81.8256
37.58702, -81.8256
37.56702, -81.76559999999999
End:
Color: 0 0 255
Text: 37.56702, -81.7956, 1, "Outage #64", "Outage #64\nCause: Tree Contact\nLast Update: 2025-04-06 21:25: UTC"
Triangles:
38.41202, -81.49448
38.452020000000005, -81.49448
38.43202, -81.43448
End:
Color: 0 0 255
Text: 38.43202, -81.46448, 1, "Outage #163", "Outage #163\nCause: Tree Contact\nLast Update: 2025-04-06 21:25: UTC"
Triangles:
37.991719999999994, -81.16474
38.03172, -81.16474
38.01172, -81.10473999999999
End:
Color: 0 0 255
Text: 38.01172, -81.13474, 1, "Outage #23", "Outage #23\nCause: None\nLast Update: 2025-04-06 21:25: UTC"
Triangles:
37.97161, -82.36573
38.011610000000005, -82.36573
37.99161, -82.30573
End:
Color: 0 0 255
Text: 37.99161, -82.33573, 1, "Outage #4", "Outage #4\nCause: None\nLast Update: 2025-04-06 21:25: UTC"
End:

View File

@@ -1,110 +0,0 @@
<?php
// Connecting, selecting database
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
or die('Could not connect: ' . pg_last_error());
//no gets, curent point outage info
if(empty($_GET)) {
$result = pg_query_params($dbconn,
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and active = true",
array('RLX')) or die('Query failed: ' . pg_last_error());
$resultArray = pg_fetch_all($result);
echo($resultArray[0]['json_build_object']);
}
//county current
if($_GET['county']) {
$result = pg_query_params($dbconn,
"SELECT distinct on (county,state) update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 order by county,state,update desc",
array('RLX')) or die('Query failed: ' . pg_last_error());
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
$array[] = $line;
}
echo json_encode($array);
}
//county archive
if($_GET['countyarchive']) {
if($_GET['start']) {
$starttime = pg_escape_string($_GET['start']);
if($_GET['end']) {
$endtime = pg_escape_string($_GET['end']);
$result = pg_query_params($dbconn,
"SELECT county,state, update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 and update > $2 and update < $3 order by update asc",
array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
$array[] = $line;
}
echo json_encode($array);
}}}
//Archive point data
if($_GET['archivepoint']) {
if($_GET['start']) {
$starttime = pg_escape_string($_GET['start']);
if($_GET['end']) {
$endtime = pg_escape_string($_GET['end']);
$result = pg_query_params($dbconn,
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and startguess > $2 and lastchange < $3"
,array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
$resultArray = pg_fetch_all($result);
echo($resultArray[0]['json_build_object']);
}}}
//if($_GET['svr']=='current') {
//$result = pg_query_params($dbconn,
//"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()"
//,array('2023-01-01 01:00','2023-02-12 10:00')) or die('Query failed: ' . pg_last_error());
//$resultArray = pg_fetch_all($result);
//echo($resultArray[0]['json_build_object']);
//}
if($_GET['svr']=='current') {
$result = pg_query($dbconn,
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()") or die('Query failed: ' . pg_last_error());
$resultArray = pg_fetch_all($result);
echo($resultArray[0]['json_build_object']);
}
if($_GET['svr'] == 'archive') {
if($_GET['start']) {
$starttime = pg_escape_string($_GET['start']);
if($_GET['end']) {
$endtime = pg_escape_string($_GET['end']);
$result = pg_query_params($dbconn,
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue > $1 and endtime < $2"
,array($starttime,$endtime)) or die('Query failed: ' . pg_last_error());
$resultArray = pg_fetch_all($result);
echo($resultArray[0]['json_build_object']);
}
}
if(!isset($_GET['start']) && !isset($_GET['end'])) {
$result = pg_query($dbconn,
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() - interval '24 hours' and endtime > now() - interval '24 hours'") or die('Query failed: ' . pg_last_error());
$resultArray = pg_fetch_all($result);
echo($resultArray[0]['json_build_object']);
}
}
pg_free_result($result);
pg_close($dbconn);
?>

View File

@@ -1,109 +0,0 @@
<?php
// Connecting, selecting database
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
or die('Could not connect: ' . pg_last_error());
//no gets, curent point outage info
if(empty($_GET)) {
$result = pg_query_params($dbconn,
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and active = true",
array('RLX')) or die('Query failed: ' . pg_last_error());
$resultArray = pg_fetch_all($result);
echo($resultArray[0]['json_build_object']);
}
//county current
//"SELECT distinct on (county,state) update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 order by county,state,update desc",
if($_GET['county']) {
$result = pg_query_params($dbconn,
"select distinct on (county,state) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update = (select max(update) from countyoutages) and cwa = $1 group by county,state,update",
array('RLX')) or die('Query failed: ' . pg_last_error());
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
$array[] = $line;
}
echo json_encode($array);
}
//county archive
if($_GET['countyarchive']) {
if($_GET['start']) {
$starttime = pg_escape_string($_GET['start']);
if($_GET['end']) {
$endtime = pg_escape_string($_GET['end']);
$result = pg_query_params($dbconn,
//"SELECT county,state, update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 and update > $2 and update < $3 order by update asc",
"select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update",
array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
$array[] = $line;
}
echo json_encode($array);
}}}
//Archive point data
if($_GET['archivepoint']) {
$starttime = pg_escape_string($_GET['start']);
$endtime = pg_escape_string($_GET['end']);
$result = pg_query_params($dbconn,
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and startguess > $2 and lastchange < $3",
array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
$resultArray = pg_fetch_all($result);
echo($resultArray[0]['json_build_object']);
}
//if($_GET['svr']=='current') {
//$result = pg_query_params($dbconn,
//"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()"
//,array('2023-01-01 01:00','2023-02-12 10:00')) or die('Query failed: ' . pg_last_error());
//$resultArray = pg_fetch_all($result);
//echo($resultArray[0]['json_build_object']);
//}
if($_GET['svr']=='current') {
$result = pg_query($dbconn,
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()") or die('Query failed: ' . pg_last_error());
$resultArray = pg_fetch_all($result);
echo($resultArray[0]['json_build_object']);
}
if($_GET['svr'] == 'archive') {
if($_GET['start']) {
$starttime = pg_escape_string($_GET['start']);
if($_GET['end']) {
$endtime = pg_escape_string($_GET['end']);
$result = pg_query_params($dbconn,
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue > $1 and endtime < $2"
,array($starttime,$endtime)) or die('Query failed: ' . pg_last_error());
$resultArray = pg_fetch_all($result);
echo($resultArray[0]['json_build_object']);
}
}
if(!isset($_GET['start']) && !isset($_GET['end'])) {
$result = pg_query($dbconn,
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() - interval '24 hours' and endtime > now() - interval '24 hours'") or die('Query failed: ' . pg_last_error());
$resultArray = pg_fetch_all($result);
echo($resultArray[0]['json_build_object']);
}
}
pg_free_result($result);
pg_close($dbconn);
?>

View File

@@ -1,141 +0,0 @@
<?php
// Connecting, selecting database
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
or die('Could not connect: ' . pg_last_error());
//no gets, curent point outage info
if(empty($_GET)) {
$result = pg_query_params($dbconn,
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and active = true",
array('LWX')) or die('Query failed: ' . pg_last_error());
$resultArray = pg_fetch_all($result);
echo($resultArray[0]['json_build_object']);
}
//county/state max
if($_GET['max'] ?? null) {
if($_GET['start'] ?? null) {
$starttime = pg_escape_string($_GET['start']);
if($_GET['end'] ?? null) {
$endtime = pg_escape_string($_GET['end']);
$result = pg_query_params($dbconn,
//select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > '2023-04-01' and update < '2023-04-02' and cwa = 'RLX' group by county,state,update) as potato group by county,state;
"select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update) as potato group by county,state",
array('LWX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
$array[] = $line;
}
echo json_encode($array);
}}}
//county current
//"SELECT distinct on (county,state) update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 order by county,state,update desc",
if($_GET['county'] ?? null) {
$result = pg_query_params($dbconn,
"select distinct on (county,state) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update = (select max(update) from countyoutages) and (cwa = $1 or cwa = $2 or cwa = $3 or cwa = $4 or cwa = $5 or cwa = $6 or cwa = $7) group by county,state,update",
array('RLX','JKL','ILN','PBZ','MRX','LWX','RNK')) or die('Query failed: ' . pg_last_error());
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
$array[] = $line;
}
echo json_encode($array);
}
//county archive
if($_GET['countyarchive'] ?? null) {
if($_GET['start'] ?? null) {
$starttime = pg_escape_string($_GET['start']);
if($_GET['end'] ?? null) {
$endtime = pg_escape_string($_GET['end']);
$result = pg_query_params($dbconn,
//"SELECT county,state, update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 and update > $2 and update < $3 order by update asc",
"select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $8 and update < $9 and (cwa = $1 or cwa = $2 or cwa = $3 or cwa = $4 or cwa = $5 or cwa = $6 or cwa = $7) group by county,state,update",
array('RLX','JKL','ILN','PBZ','MRX','LWX','RNK',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
$array[] = $line;
}
echo json_encode($array);
}}}
//Archive point data
if($_GET['archivepoint'] ?? null) {
$starttime = pg_escape_string($_GET['start']);
$endtime = pg_escape_string($_GET['end']);
$result = pg_query_params($dbconn,
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and startguess > $2 and lastchange < $3",
array('LWX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
$resultArray = pg_fetch_all($result);
echo($resultArray[0]['json_build_object']);
}
//if($_GET['svr']=='current') {
//$result = pg_query_params($dbconn,
//"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()"
//,array('2023-01-01 01:00','2023-02-12 10:00')) or die('Query failed: ' . pg_last_error());
//$resultArray = pg_fetch_all($result);
//echo($resultArray[0]['json_build_object']);
//}
if($_GET['svr'] ?? null =='current') {
$result = pg_query($dbconn,
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()") or die('Query failed: ' . pg_last_error());
$resultArray = pg_fetch_all($result);
echo($resultArray[0]['json_build_object']);
}
if($_GET['svr'] ?? null == 'archive') {
if($_GET['start'] ?? null) {
$starttime = pg_escape_string($_GET['start']);
if($_GET['end'] ?? null) {
$endtime = pg_escape_string($_GET['end']);
$result = pg_query_params($dbconn,
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue > $1 and endtime < $2"
,array($starttime,$endtime)) or die('Query failed: ' . pg_last_error());
$resultArray = pg_fetch_all($result);
echo($resultArray[0]['json_build_object']);
}
}
if(!isset($_GET['start']) && !isset($_GET['end'])) {
$result = pg_query($dbconn,
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() - interval '24 hours' and endtime > now() - interval '24 hours'") or die('Query failed: ' . pg_last_error());
$resultArray = pg_fetch_all($result);
echo($resultArray[0]['json_build_object']);
}
}
pg_free_result($result);
pg_close($dbconn);
?>

View File

@@ -1,593 +0,0 @@
# powercounty.py
import logging
import requests
import json
import psycopg2
from datetime import datetime
import re
from collections import defaultdict
import threading
# Set up logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s',
handlers=[
logging.FileHandler('powercounty.log'),
logging.StreamHandler()
]
)
# Set up a logger for this module
logger = logging.getLogger(__name__)
# Database connection parameters
DB_PARAMS = {
'host': 'localhost',
'database': 'nws',
'user': 'nws',
'password': 'nws'
}
# Set up a requests session
S = requests.Session()
# Power company metadata and URLs (from power3.py)
AEP_OH_META = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
AEP_WV_META = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
AEP_KY_META = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json'
WV_FE_META = 'https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false'
AEP_WV_KUBRA_META = "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false"
AEP_OH_KUBRA_META = 'https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false'
AEP_KY_KUBRA_META = 'https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false'
AEP_WV_BASE = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
AEP_OH_BASE = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
AEP_KY_BASE = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/'
GRAYSON_COUNTY = 'https://outages.graysonrecc.com/data/boundaries.json'
# Additional URLs from power3.py
flemingjson = 'https://outage.fme.coop/data/boundaries.json'
bigsandy_url = 'http://outagemap.bigsandyrecc.com/data/boundaries.json'
southcentralpower_url = 'https://outage.southcentralpower.com/data/boundaries.json'
# Global list to collect all outage data
allcountyoutages = []
# This function will try to get a URL and log any errors
def safe_request(url, description="Fetching data"):
try:
logger.info(f"{description}: {url}")
response = S.get(url)
response.raise_for_status() # Raise an exception for bad status codes
logger.info(f"Successfully fetched data from {url}")
return response
except requests.exceptions.RequestException as e:
logger.error(f"Failed to {description} from {url}: {e}")
return None
# This function will parse a JSON response and log errors
def safe_json_load(response, description="Parsing JSON"):
try:
logger.info(f"{description}")
data = json.loads(response.text)
logger.info("Successfully parsed JSON data")
return data
except (json.JSONDecodeError, AttributeError) as e:
logger.error(f"Failed to {description}: {e}")
return None
# Ported functions from power3.py with enhanced logging
def fleming():
"""Fetch outage data for Fleming County, KY"""
logger.info("Fetching Fleming County outage data")
state = 'KY'
company = 'FLEM'
temp = safe_request(flemingjson, "fetching Fleming data")
if temp is None:
return
tempdata = safe_json_load(temp, "parsing Fleming JSON")
if tempdata is None:
return
try:
for j in tempdata[0]['boundaries']:
outageinfo = j.get('customersOutNow'), j.get('customersServed'), j.get('name'), state, company
allcountyoutages.append(outageinfo)
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} Fleming County boundaries")
except (KeyError, IndexError) as e:
logger.error(f"Error processing Fleming County data: {e}")
def bigsandy():
"""Fetch outage data for Big Sandy RECC"""
logger.info("Fetching Big Sandy RECC outage data")
state = 'OH'
company = 'BS'
temp = safe_request(bigsandy_url, "fetching Big Sandy data")
if temp is None:
return
tempdata = safe_json_load(temp, "parsing Big Sandy JSON")
if tempdata is None:
return
try:
for j in tempdata[0]['boundaries']:
outageinfo = j.get('customersOutNow'), j.get('customersServed'), j.get('name'), state, company
allcountyoutages.append(outageinfo)
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} Big Sandy boundaries")
except (KeyError, IndexError) as e:
logger.error(f"Error processing Big Sandy data: {e}")
def southcentralpower():
"""Fetch outage data for South Central Power"""
logger.info("Fetching South Central Power outage data")
company = 'SCP'
url = southcentralpower_url
temp = safe_request(url, "fetching South Central Power data")
if temp is None:
return
tempdata = safe_json_load(temp, "parsing South Central Power JSON")
if tempdata is None:
return
state = 'OH'
try:
for j in tempdata[0]['boundaries']:
outageinfo = j.get('customersOutNow'), j.get('customersServed'), j.get('name'), state, company
allcountyoutages.append(outageinfo)
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} South Central Power boundaries")
except (KeyError, IndexError) as e:
logger.error(f"Error processing South Central Power data: {e}")
def ku_get_url():
"""Get KU outage data URL"""
logger.info("Getting KU outage data URL")
url = 'https://stormcenter.lge-ku.com/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40'
r = safe_request(url, "fetching KU report page")
if r is None:
return None
try:
x = re.search(r"instanceId: '(.*?)',", r.text)
if not x:
logger.error("Could not extract instanceId from KU report page")
return None
urlcom = x.group(1)
urlcom = 'https://kubra.io/stormcenter/api/v1/stormcenters/' + urlcom + '/views/a6cee9e4-312b-4b77-9913-2ae371eb860d/currentState?preview=false'
stuff = safe_request(urlcom, "fetching KU stormcenter data")
if stuff is None:
return None
jsonstuff = safe_json_load(stuff, "parsing KU stormcenter JSON")
if jsonstuff is None:
return None
interval_data = jsonstuff.get('data').get('interval_generation_data')
urlcom = 'https://kubra.io/' + interval_data + '/public/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40_report.json'
logger.info(f"Successfully constructed KU data URL: {urlcom}")
return urlcom
except Exception as e:
logger.error(f"Error getting KU URL: {e}")
return None
def county_json(meta, url, jsonname):
"""Generic function to get county JSON data"""
metainfo_response = safe_request(meta, "fetching metadata for county JSON")
if metainfo_response is None:
return None
metainfo = safe_json_load(metainfo_response, "parsing metadata for county JSON")
if metainfo is None:
return None
try:
metadir = metainfo['directory']
url = url + metadir + jsonname
outage_response = safe_request(url, "fetching county JSON data")
return outage_response
except KeyError as e:
logger.error(f"Error accessing metadata directory: {e}")
return None
def ku():
"""Fetch KU outage data"""
logger.info("Fetching KU outage data")
ku_list = []
url = ku_get_url()
if url is None:
return
data_response = safe_request(url, "fetching KU data")
if data_response is None:
return
tempdata = safe_json_load(data_response, "parsing KU data JSON")
if tempdata is None:
return
try:
temp = tempdata['file_data']['areas'][2]['areas'][0]['areas']
temp1 = tempdata['file_data']['areas'][2]['areas'][1]['areas']
temp2 = tempdata['file_data']['areas'][1]['areas'][0]['areas']
for i in temp:
ku_list.append(i)
for i in temp1:
ku_list.append(i)
for i in temp2:
ku_list.append(i)
for o in ku_list:
outageinfo = o['cust_a']['val'], o['cust_s'], o['name'].capitalize(), o['state'], o['utility']
allcountyoutages.append(outageinfo)
logger.info(f"Successfully processed {len(ku_list)} KU outage records")
except (KeyError, IndexError) as e:
logger.error(f"Error processing KU data: {e}")
def grayson():
"""Fetch Grayson County outage data"""
logger.info("Fetching Grayson County outage data")
company = 'GRE'
outage_response = safe_request(GRAYSON_COUNTY, "fetching Grayson County data")
if outage_response is None:
return
if not outage_response.headers.get('Content-Type', '').startswith('application/json'):
logger.error(f"Unexpected content type from Grayson County: {outage_response.headers.get('Content-Type')}")
return
tempdata = safe_json_load(outage_response, "parsing Grayson County JSON")
if tempdata is None:
return
state = 'KY'
try:
for j in tempdata[0]['boundaries']:
outageinfo = j.get('customersOutNow'), j.get('customersServed'), j.get('name'), state, company
allcountyoutages.append(outageinfo)
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} Grayson County boundaries")
except (KeyError, IndexError) as e:
logger.error(f"Error processing Grayson County data: {e}")
def aep_county_vawv(meta, url, jsonname):
"""Fetch AEP county data for VA and WV"""
logger.info("Fetching AEP county data for VA and WV")
company = 'AEP'
outage_response = county_json(meta, url, jsonname)
if outage_response is None:
return
if not outage_response.headers.get('Content-Type', '').startswith('application/octet-stream'):
logger.error(f"Unexpected content type from AEP VA/WV: {outage_response.headers.get('Content-Type')}")
return
tempdata = safe_json_load(outage_response, "parsing AEP VA/WV JSON")
if tempdata is None:
return
try:
# WV data
state = 'WV'
for j in tempdata['file_data']['areas'][0]['areas'][2]['areas']:
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name'), state, company
allcountyoutages.append(outageinfo)
# VA data
state = 'VA'
for j in tempdata['file_data']['areas'][0]['areas'][1]['areas']:
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
allcountyoutages.append(outageinfo)
logger.info("Successfully processed AEP VA/WV county data")
except (KeyError, IndexError) as e:
logger.error(f"Error processing AEP VA/WV data: {e}")
def aep_county_oh(meta, url, jsonname):
"""Fetch AEP county data for Ohio"""
logger.info("Fetching AEP county data for Ohio")
company = 'AEP'
state = 'OH'
outage_response = county_json(meta, url, jsonname)
if outage_response is None:
return
tempdata = safe_json_load(outage_response, "parsing AEP OH JSON")
if tempdata is None:
return
try:
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
allcountyoutages.append(outageinfo)
logger.info("Successfully processed AEP OH county data")
except (KeyError, IndexError) as e:
logger.error(f"Error processing AEP OH data: {e}")
def aep_county_ky(meta, url, jsonname):
"""Fetch AEP county data for Kentucky"""
logger.info("Fetching AEP county data for Kentucky")
company = 'AEP'
state = 'KY'
outage_response = county_json(meta, url, jsonname)
if outage_response is None:
return
tempdata = safe_json_load(outage_response, "parsing AEP KY JSON")
if tempdata is None:
return
try:
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
allcountyoutages.append(outageinfo)
logger.info("Successfully processed AEP KY county data")
except (KeyError, IndexError) as e:
logger.error(f"Error processing AEP KY data: {e}")
def firstenergy_county(meta, url, jsonname):
"""Fetch First Energy county data"""
logger.info("Fetching First Energy county data")
company = 'FE'
state = 'WV'
outage_response = county_json(meta, url, jsonname)
if outage_response is None:
return
if not outage_response.headers.get('Content-Type', '').startswith('application/octet-stream'):
logger.error(f"Unexpected content type from First Energy: {outage_response.headers.get('Content-Type')}")
return
tempdata = safe_json_load(outage_response, "parsing First Energy JSON")
if tempdata is None:
return
try:
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
allcountyoutages.append(outageinfo)
logger.info("Successfully processed First Energy county data")
except (KeyError, IndexError) as e:
logger.error(f"Error processing First Energy data: {e}")
def get_kubra_hexes(url):
"""Get Kubra hex data"""
outage_response = safe_request(url, "fetching Kubra hex data")
if outage_response is None:
return None, None
if not outage_response.headers.get('Content-Type', '').startswith('application/json'):
logger.error(f"Unexpected content type from Kubra: {outage_response.headers.get('Content-Type')}")
return None, None
tempdata = safe_json_load(outage_response, "parsing Kubra hex JSON")
if tempdata is None:
return None, None
try:
bothhex = tempdata.get('data').get('cluster_interval_generation_data')
hexes = bothhex.split('/')
logger.info(f"Successfully extracted Kubra hexes: {hexes}")
return hexes[2], hexes[3]
except (KeyError, AttributeError) as e:
logger.error(f"Error extracting Kubra hexes: {e}")
return None, None
def kubra_fe(baseurl1, baseurl2, meta):
"""Fetch Kubra First Energy data"""
logger.info("Fetching Kubra First Energy data")
hex2 = get_kubra_hexes(meta)
if hex2[0] is None:
return
url = baseurl1 + hex2[1] + baseurl2
company = 'FE'
state = 'WV'
outage_response = safe_request(url, "fetching Kubra FE data")
if outage_response is None:
return
if not outage_response.headers.get('Content-Type', '').startswith('application/json'):
logger.error(f"Unexpected content type from Kubra FE: {outage_response.headers.get('Content-Type')}")
return
tempdata = safe_json_load(outage_response, "parsing Kubra FE JSON")
if tempdata is None:
return
try:
for j in tempdata['file_data']['areas']:
if j.get('key') == "county":
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('name').capitalize(), state, company
allcountyoutages.append(outageinfo)
logger.info("Successfully processed Kubra FE county data")
except (KeyError, IndexError) as e:
logger.error(f"Error processing Kubra FE data: {e}")
def kubra_aep(baseurl1, baseurl2, meta, company='AEP'):
"""Fetch Kubra AEP data"""
logger.info(f"Fetching Kubra AEP data for company: {company}")
hex2 = get_kubra_hexes(meta)
if hex2[0] is None:
return
url = baseurl1 + hex2[1] + baseurl2
outage_response = safe_request(url, "fetching Kubra AEP data")
if outage_response is None:
return
if not outage_response.headers.get('Content-Type', '').startswith('application/json'):
logger.error(f"Unexpected content type from Kubra AEP: {outage_response.headers.get('Content-Type')}")
return
tempdata = safe_json_load(outage_response, "parsing Kubra AEP JSON")
if tempdata is None:
return
process_outage_data(tempdata, company)
def process_outage_data(data, company):
"""Process outage data with enhanced error handling"""
try:
# Navigate to the primary list of areas
primary_areas = data.get("file_data", {}).get("areas", [])
# If the list is empty, there's nothing to process
if not primary_areas:
logger.warning("No 'areas' data found in outage data.")
return
# Check the key of the first item to determine the format
first_item_key = primary_areas[0].get("key")
if first_item_key == "state":
# Format 1: Loop through each state object
for state_area in primary_areas:
state_name = state_area.get("name", "Unknown State")
# Get the nested list of counties for this state
county_list = state_area.get("areas", [])
for county in county_list:
# We are now at the county level
if county.get("key") == "county":
outageinfo = county.get('cust_a').get('val'), county.get('cust_s'), county.get('name').capitalize(), county.get('state'), company
allcountyoutages.append(outageinfo)
elif first_item_key == "county":
# Format 2: The primary list is already the county list
for county in primary_areas:
# We are now at the county level
if county.get("key") == "county":
outageinfo = county.get('cust_a').get('val'), county.get('cust_s'), county.get('name').capitalize(), county.get('state'), company
allcountyoutages.append(outageinfo)
else:
logger.warning(f"Unknown data format. Could not find 'state' or 'county' key. Found: {first_item_key}")
except (KeyError, IndexError) as e:
logger.error(f"Error processing outage data: {e}")
def insert_outage_data(cursor, outage_data, current_timestamp):
"""Insert outage data into the new table"""
if not outage_data:
logger.info("No outage data to insert into the database.")
return
sql = 'INSERT INTO newcountyoutages (outages, served, county, state, update, company) VALUES (%s, %s, %s, %s, %s, %s)'
try:
logger.info(f"Inserting {len(outage_data)} rows into the database.")
cursor.executemany(sql, outage_data)
logger.info("Successfully inserted data into the database.")
except Exception as e:
logger.error(f"Failed to insert data into the database: {e}")
raise
def main():
"""Main function to collect and insert outage data"""
conn = None
try:
conn = psycopg2.connect(**DB_PARAMS)
cursor = conn.cursor()
logger.info("Successfully connected to the database.")
# Clear the global list at the start
global allcountyoutages
allcountyoutages = []
# Collect outage data for each provider
logger.info("Starting data collection.")
# --- Kubra First Energy ---
try:
kubra_fe('https://kubra.io/data/', '/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json', WV_FE_META)
except Exception as e:
logger.error(f"Error collecting Kubra FE data: {e}")
# --- Kubra AEP WV ---
try:
kubra_aep('https://kubra.io/data/', '/public/reports/7929429f-635d-4761-b6c7-78f646cef3c2_report.json', AEP_WV_KUBRA_META)
except Exception as e:
logger.error(f"Error collecting Kubra AEP WV data: {e}")
# --- Kubra AEP OH ---
try:
kubra_aep('https://kubra.io/data/', '/public/reports/1bc6bd19-2315-4548-980a-6df73b93b355_report.json', AEP_OH_KUBRA_META)
except Exception as e:
logger.error(f"Error collecting Kubra AEP OH data: {e}")
# --- Kubra AEP KY ---
try:
kubra_aep('https://kubra.io/data/', '/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json', AEP_KY_KUBRA_META)
except Exception as e:
logger.error(f"Error collecting Kubra AEP KY data: {e}")
# --- Grayson County ---
try:
grayson()
except Exception as e:
logger.error(f"Error collecting Grayson County data: {e}")
# --- KU ---
try:
ku()
except Exception as e:
logger.error(f"Error collecting KU data: {e}")
# --- South Central Power ---
try:
southcentralpower()
except Exception as e:
logger.error(f"Error collecting South Central Power data: {e}")
# --- Big Sandy ---
try:
bigsandy()
except Exception as e:
logger.error(f"Error collecting Big Sandy data: {e}")
# --- AEP Direct (OH, WV, KY) ---
try:
aep_county_oh(AEP_OH_META, AEP_OH_BASE, "metadata.json")
except Exception as e:
logger.error(f"Error collecting AEP OH data: {e}")
try:
aep_county_vawv(AEP_WV_META, AEP_WV_BASE, "metadata.json")
except Exception as e:
logger.error(f"Error collecting AEP WV/VA data: {e}")
try:
aep_county_ky(AEP_KY_META, AEP_KY_BASE, "metadata.json")
except Exception as e:
logger.error(f"Error collecting AEP KY data: {e}")
# --- First Energy Direct ---
try:
firstenergy_county(WV_FE_META, 'https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/', "metadata.json")
except Exception as e:
logger.error(f"Error collecting First Energy data: {e}")
# Insert collected data into the new table
current_timestamp = str(datetime.utcnow())
insert_outage_data(cursor, allcountyoutages, current_timestamp)
conn.commit()
logger.info("Data collection and database insert completed successfully.")
except Exception as e:
logger.exception("An error occurred during the main execution.")
if conn:
conn.rollback()
finally:
if conn:
cursor.close()
conn.close()
logger.info("Database connection closed.")
if __name__ == '__main__':
main()

View File

@@ -1,50 +0,0 @@
import requests
import polyline
import json
import psycopg2
import psycopg2.extensions
from datetime import datetime, timezone
from geojson import Point, Feature, FeatureCollection, dump
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
cursor = conn.cursor()
allcountyoutages = []
S = requests.Session()
#select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update
#select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > '2023-04-01' and update < '2023-04-02' and cwa = 'RLX' group by county,state,update) as potato group by county,state;
cursor.execute("select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update")
current_timestamp = str(datetime.utcnow())
for i in allcountyoutages:
sql = 'insert into countyoutages (outages, served, county, state, update, company) values (%s, %s, %s, %s, %s, %s)'
val = (i[0], i[1], i[2], i[3], current_timestamp, i[4])
cursor.execute(sql,val)
conn.commit()
cursor.execute('update countyoutages set cwa = county.cwa from county where county.countyname = countyoutages.county and county.state = countyoutages.state and countyoutages.cwa is null')
conn.commit()
cursor.execute("delete from countyoutages where cwa != 'RLX'")
cursor.execute("delete from countyoutages where cwa is null")
cursor.execute("delete from countyoutages where update < now () - interval '30 days'")
conn.commit()
cursor.close()
conn.close()