initial
This commit is contained in:
107
24hrpower.txt
Normal file
107
24hrpower.txt
Normal file
@@ -0,0 +1,107 @@
|
||||
Start Time UTC End Time UTC Duration Max Out Cause Lat Lon County State
|
||||
------------------- ------------------- ---------- --------- ------------------------------------------------------------ ------- -------- --------- -------
|
||||
2025-11-27 19:54:00 2025-11-27 21:25:14 1:31:14 101 WEATHER 38.5224 -81.5713 Kanawha WV
|
||||
2025-11-27 16:01:00 2025-11-27 17:20:14 1:19:14 524 TREE CONTACT 37.8594 -81.225 Raleigh WV
|
||||
2025-11-27 13:27:00 2025-11-27 17:20:14 3:53:14 22 TREE CONTACT 37.6263 -81.1044 Raleigh WV
|
||||
2025-11-27 04:52:00 2025-11-27 09:20:21 4:28:21 13 TREE CONTACT 38.373 -82.4103 Cabell WV
|
||||
2025-11-27 04:52:00 2025-11-27 08:50:18 3:58:18 34 TREE CONTACT 38.3747 -82.4084 Cabell WV
|
||||
2025-11-27 03:09:15 2025-11-27 04:55:24 1:46:09 809 TREE CONTACT 38.5091 -81.868 Putnam WV
|
||||
2025-11-27 03:09:15 2025-11-27 04:20:21 1:11:06 1414 TREE CONTACT 38.494 -81.8585 Putnam WV
|
||||
2025-11-27 03:09:15 2025-11-27 07:05:20 3:56:05 422 TREE CONTACT 38.5068 -81.864 Putnam WV
|
||||
2025-11-27 02:59:00 2025-11-27 05:05:23 2:06:23 5 TREE CONTACT 37.6812 -81.5491 Wyoming WV
|
||||
2025-11-27 01:40:35 2025-11-27 21:05:12 19:24:37 6 TREE CONTACT 38.3987 -81.2739 Kanawha WV
|
||||
2025-11-26 23:22:31 2025-11-27 00:20:20 0:57:49 6 TREE CONTACT 37.1947 -82.3321 Dickenson VA
|
||||
2025-11-26 22:33:00 2025-11-27 01:50:27 3:17:27 4 TREE CONTACT 38.0222 -81.84 Boone WV
|
||||
2025-11-26 22:19:00 2025-11-27 02:50:29 4:31:29 9 TREE CONTACT 38.3619 -81.918 Putnam WV
|
||||
2025-11-26 21:44:00 2025-11-27 00:05:20 2:21:20 13 TREE CONTACT 38.9049 -82.6331 Jackson OH
|
||||
2025-11-26 21:25:00 2025-11-27 01:35:26 4:10:26 5 TREE CONTACT 38.945 -82.0455 Mason WV
|
||||
2025-11-26 20:52:00 2025-11-27 04:10:31 7:18:31 36 TREE CONTACT 37.9718 -82.487 Lawrence KY
|
||||
2025-11-26 19:44:00 2025-11-26 21:20:28 1:36:28 7 TREE CONTACT 39.5987 -82.0516 Perry OH
|
||||
2025-11-26 19:35:00 2025-11-26 22:20:30 2:45:30 4 TREE CONTACT 39.6779 -81.959 Morgan OH
|
||||
2025-11-26 19:35:00 2025-11-26 21:05:31 1:30:31 13 TREE CONTACT 39.6832 -81.9634 Morgan OH
|
||||
2025-11-26 19:35:00 2025-11-26 21:35:22 2:00:22 4 TREE CONTACT 39.678 -81.9582 Morgan OH
|
||||
2025-11-26 19:33:00 2025-11-27 02:40:37 7:07:37 4 TREE CONTACT 38.3256 -82.7209 Boyd KY
|
||||
2025-11-26 19:22:00 2025-11-27 01:05:29 5:43:29 4 TREE CONTACT 39.3553 -82.2437 Athens OH
|
||||
2025-11-26 19:20:00 2025-11-26 22:50:33 3:30:33 4 TREE CONTACT 37.6276 -81.4973 Wyoming WV
|
||||
2025-11-26 19:16:00 2025-11-26 22:10:27 2:54:27 4 TREE CONTACT 38.3351 -82.7894 Carter KY
|
||||
2025-11-26 18:50:29 2025-11-26 19:15:27 0:24:58 19 Tree Damage 38.7172 -81.3703 Roane WV
|
||||
2025-11-26 18:47:00 2025-11-26 20:50:30 2:03:30 22 TREE CONTACT 38.9732 -81.8981 Meigs OH
|
||||
2025-11-26 18:33:00 2025-11-26 22:20:29 3:47:29 8 TREE CONTACT 38.151 -82.2664 Lincoln WV
|
||||
2025-11-26 18:29:00 2025-11-26 22:05:31 3:36:31 4 TREE CONTACT 38.4217 -82.2939 Cabell WV
|
||||
2025-11-26 18:12:00 2025-11-26 20:50:30 2:38:30 13 TREE CONTACT 38.1145 -81.9252 Boone WV
|
||||
2025-11-26 18:12:00 2025-11-26 19:50:29 1:38:29 12 TREE CONTACT 38.1121 -81.927 Boone WV
|
||||
2025-11-26 17:58:39 2025-11-26 20:20:26 2:21:47 357 TREE CONTACT 38.5699 -81.5321 Kanawha WV
|
||||
2025-11-26 17:53:00 2025-11-26 19:35:26 1:42:26 33 TREE CONTACT 39.5163 -82.0901 Athens OH
|
||||
2025-11-26 17:43:00 2025-11-26 20:05:24 2:22:24 12 TREE CONTACT 38.4934 -81.4916 Kanawha WV
|
||||
2025-11-26 17:43:00 2025-11-26 20:05:24 2:22:24 73 TREE CONTACT 39.637 -82.1291 Perry OH
|
||||
2025-11-26 17:31:00 2025-11-26 20:05:24 2:34:24 24 TREE CONTACT 38.3094 -81.7744 Kanawha WV
|
||||
2025-11-26 16:20:00 2025-11-26 18:55:42 2:35:42 4 TREE CONTACT 38.0703 -82.6007 Lawrence KY
|
||||
2025-11-26 16:06:00 2025-11-26 21:10:40 5:04:40 4 TREE CONTACT 38.0025 -82.7134 Lawrence KY
|
||||
2025-11-26 16:05:22 2025-11-26 21:15:32 5:10:09 19 Tree Damage 39.2398 -80.3019 Harrison WV
|
||||
2025-11-26 16:01:00 2025-11-26 19:20:28 3:19:28 4 TREE CONTACT 39.0015 -82.0663 Meigs OH
|
||||
2025-11-26 15:35:00 2025-11-26 19:55:41 4:20:41 68 TREE CONTACT 38.2704 -83.1736 Carter KY
|
||||
2025-11-26 15:24:00 2025-11-26 19:20:28 3:56:28 4 TREE CONTACT 38.4541 -81.5469 Kanawha WV
|
||||
2025-11-26 15:21:40 2025-11-26 16:44:08 1:22:28 62 431 Tree Failure-off ROW 38.4498 -82.9776 Greenup KY
|
||||
2025-11-26 13:57:00 2025-11-26 16:35:25 2:38:25 12 TREE CONTACT 38.4535 -82.2759 Cabell WV
|
||||
2025-11-26 13:51:00 2025-11-26 15:50:27 1:59:27 8 WEATHER 38.5198 -82.5185 Lawrence OH
|
||||
2025-11-26 13:25:00 2025-11-26 16:05:23 2:40:23 69 TREE CONTACT 38.4246 -81.5101 Kanawha WV
|
||||
2025-11-26 12:14:00 2025-11-26 16:35:25 4:21:25 4 TREE CONTACT 38.2595 -81.9982 Lincoln WV
|
||||
2025-11-26 12:14:00 2025-11-26 13:35:15 1:21:15 4 TREE CONTACT 38.2604 -81.998 Lincoln WV
|
||||
2025-11-26 11:59:00 2025-11-27 01:35:27 13:36:27 4 TREE CONTACT 38.2722 -82.28 Cabell WV
|
||||
2025-11-26 11:32:00 2025-11-26 15:35:26 4:03:26 25 TREE CONTACT 37.0754 -82.3701 Dickenson VA
|
||||
2025-11-26 10:51:00 2025-11-26 16:35:25 5:44:25 852 TREE CONTACT 39.6226 -82.0824 Perry OH
|
||||
2025-11-26 10:51:00 2025-11-26 16:50:25 5:59:25 342 TREE CONTACT 39.5817 -82.0624 Perry OH
|
||||
2025-11-26 10:05:10 2025-11-26 12:00:12 1:55:02 19 Tree Damage 39.2096 -81.2004 Ritchie WV
|
||||
2025-11-26 08:16:00 2025-11-26 13:35:15 5:19:15 78 TREE CONTACT 38.2826 -81.7308 Kanawha WV
|
||||
2025-11-26 04:20:00 2025-11-26 08:05:12 3:45:12 4 TREE CONTACT 38.487 -81.3294 Kanawha WV
|
||||
2025-11-26 03:19:00 2025-11-26 04:50:13 1:31:13 4 WEATHER 37.2948 -82.0507 Buchanan VA
|
||||
2025-11-26 02:50:44 2025-11-26 03:49:31 0:58:47 13 430 Tree failure from overhang or dead tree without ice/snow 39.7586 -82.3875 Perry OH
|
||||
2025-11-26 01:59:00 2025-11-26 05:35:10 3:36:10 15 TREE CONTACT 38.3854 -81.6286 Kanawha WV
|
||||
2025-11-26 00:30:00 2025-11-26 04:35:13 4:05:13 4 TREE CONTACT 37.2948 -82.0603 Buchanan VA
|
||||
2025-11-25 22:36:00 2025-11-26 03:05:14 4:29:14 7 WEATHER 37.281 -82.0921 Buchanan VA
|
||||
2025-11-25 22:15:07 2025-11-25 22:35:15 0:20:08 259 TREE CONTACT 38.3351 -81.7072 Kanawha WV
|
||||
2025-11-25 21:20:11 2025-11-26 08:15:10 10:54:59 19 Tree Damage 38.5847 -80.9153 Braxton WV
|
||||
2025-11-25 19:27:00 2025-11-25 22:05:17 2:38:17 103 TREE CONTACT 38.3259 -81.7086 Kanawha WV
|
||||
2025-11-25 19:27:00 2025-11-25 20:50:17 1:23:17 362 TREE CONTACT 38.3315 -81.7074 Kanawha WV
|
||||
2025-11-25 17:33:00 2025-11-25 18:35:20 1:02:20 1036 TREE CONTACT 37.9388 -82.0137 Logan WV
|
||||
2025-11-25 17:14:00 2025-11-25 20:35:14 3:21:14 4 TREE CONTACT 37.1658 -82.2542 Dickenson VA
|
||||
2025-11-25 15:58:00 2025-11-25 18:35:20 2:37:20 30 TREE CONTACT 38.1982 -81.003 Fayette WV
|
||||
2025-11-25 14:51:00 2025-11-25 16:50:22 1:59:22 12 TREE CONTACT 39.2672 -82.6058 Vinton OH
|
||||
2025-11-25 13:18:00 2025-11-25 16:20:19 3:02:19 5 TREE CONTACT 38.3351 -81.6425 Kanawha WV
|
||||
2025-11-25 12:14:00 2025-11-25 19:35:17 7:21:17 151 TREE CONTACT 37.0442 -82.3947 Dickenson VA
|
||||
2025-11-25 11:23:00 2025-11-25 13:50:21 2:27:21 27 TREE CONTACT 38.5702 -81.7033 Kanawha WV
|
||||
2025-11-25 10:10:00 2025-11-25 14:35:23 4:25:23 4 TREE CONTACT 38.0477 -82.0809 Lincoln WV
|
||||
2025-11-24 15:44:00 2025-11-24 20:35:24 4:51:24 264 TREE CONTACT 37.5226 -81.8099 Wyoming WV
|
||||
2025-11-24 15:44:00 2025-11-24 21:05:22 5:21:22 27 TREE CONTACT 37.5141 -81.8072 Wyoming WV
|
||||
2025-11-24 12:29:00 2025-11-24 18:40:34 6:11:34 19 TREE CONTACT 38.4707 -82.6995 Boyd KY
|
||||
2025-11-24 12:29:00 2025-11-24 14:55:42 2:26:42 18 TREE CONTACT 38.4706 -82.6993 Boyd KY
|
||||
2025-11-24 08:58:00 2025-11-24 13:35:24 4:37:24 7 TREE CONTACT 39.6278 -82.0292 Morgan OH
|
||||
2025-11-24 08:24:00 2025-11-24 15:05:25 6:41:25 35 TREE CONTACT 38.1802 -81.9185 Lincoln WV
|
||||
2025-11-24 08:15:00 2025-11-24 16:05:28 7:50:28 40 TREE CONTACT 37.3616 -81.7316 McDowell WV
|
||||
2025-11-24 04:39:00 2025-11-24 15:20:26 10:41:26 15 TREE CONTACT 37.9783 -82.3569 Wayne WV
|
||||
2025-11-24 01:20:11 2025-11-24 03:30:10 2:09:59 19 Tree Damage 38.5826 -80.8991 Braxton WV
|
||||
2025-11-24 00:22:00 2025-11-24 04:05:13 3:43:13 13 TREE CONTACT 39.6128 -81.9686 Morgan OH
|
||||
2025-11-23 23:37:00 2025-11-24 08:05:09 8:28:09 7 TREE CONTACT 38.4702 -81.3939 Kanawha WV
|
||||
2025-11-23 17:58:00 2025-11-23 21:40:17 3:42:17 12 TREE CONTACT 37.9447 -82.6866 Lawrence KY
|
||||
2025-11-23 16:05:00 2025-11-23 19:35:15 3:30:15 76 TREE CONTACT 38.5411 -81.416 Kanawha WV
|
||||
2025-11-23 16:05:00 2025-11-23 22:20:16 6:15:16 36 TREE CONTACT 38.5476 -81.4174 Roane WV
|
||||
2025-11-23 16:05:00 2025-11-23 20:20:18 4:15:18 61 TREE CONTACT 38.5417 -81.4191 Kanawha WV
|
||||
2025-11-23 13:34:00 2025-11-23 19:50:18 6:16:18 177 TREE CONTACT 38.9096 -81.7926 Jackson WV
|
||||
2025-11-23 10:42:00 2025-11-23 19:05:18 8:23:18 806 TREE CONTACT 38.2266 -81.413 Kanawha WV
|
||||
2025-11-23 09:21:00 2025-11-23 20:35:18 11:14:18 4 TREE CONTACT 38.0339 -81.4554 Kanawha WV
|
||||
2025-11-23 07:38:00 2025-11-23 15:35:11 7:57:11 195 TREE CONTACT 37.2019 -82.4628 Dickenson VA
|
||||
2025-11-23 06:48:00 2025-11-23 16:05:12 9:17:12 4 TREE CONTACT 38.6302 -81.3645 Roane WV
|
||||
2025-11-23 04:46:00 2025-11-23 08:40:15 3:54:15 58 TREE CONTACT 38.4178 -82.7328 Boyd KY
|
||||
2025-11-23 04:46:00 2025-11-23 11:40:14 6:54:14 40 TREE CONTACT 38.4156 -82.7356 Boyd KY
|
||||
2025-11-23 02:31:14 2025-11-23 10:05:12 7:33:58 623 TREE CONTACT 38.2149 -81.4187 Kanawha WV
|
||||
2025-11-23 02:31:14 2025-11-23 08:35:16 6:04:02 621 TREE CONTACT 38.2146 -81.4188 Kanawha WV
|
||||
2025-11-23 02:31:14 2025-11-23 11:05:13 8:33:59 624 TREE CONTACT 38.2149 -81.4188 Kanawha WV
|
||||
2025-11-23 01:18:00 2025-11-23 17:50:18 16:32:18 42 TREE CONTACT 38.1792 -81.4666 Kanawha WV
|
||||
2025-11-23 01:18:00 2025-11-23 19:50:18 18:32:18 157 TREE CONTACT 38.1814 -81.4555 Kanawha WV
|
||||
2025-11-23 01:18:00 2025-11-23 18:50:19 17:32:19 88 TREE CONTACT 38.1807 -81.4583 Kanawha WV
|
||||
2025-11-23 01:18:00 2025-11-23 18:05:12 16:47:12 67 TREE CONTACT 38.1791 -81.4657 Kanawha WV
|
||||
2025-11-23 00:13:00 2025-11-23 04:50:13 4:37:13 4 TREE CONTACT 38.3593 -82.4886 Wayne WV
|
||||
2025-11-23 00:07:00 2025-11-23 14:20:16 14:13:16 5 TREE CONTACT 38.4733 -81.2042 Clay WV
|
||||
2025-11-22 22:39:00 2025-11-23 05:20:14 6:41:14 6 TREE CONTACT 37.9014 -81.2874 Fayette WV
|
||||
2025-11-22 22:27:00 2025-11-23 00:35:12 2:08:12 10 TREE CONTACT 38.5029 -81.5277 Kanawha WV
|
||||
2025-11-22 22:07:00 2025-11-23 01:35:13 3:28:13 4 TREE CONTACT 38.0685 -82.0728 Lincoln WV
|
||||
260
511.html
Normal file
260
511.html
Normal file
@@ -0,0 +1,260 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>OHGO/WV511/goKY Tracker</title>
|
||||
<style>
|
||||
.tabulator, .tabulator-header, .tabulator-tableHolder {
|
||||
overflow: visible !important;
|
||||
}
|
||||
.switch {
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
width: 40px;
|
||||
height: 24px;
|
||||
}
|
||||
.switch input {
|
||||
opacity: 0;
|
||||
width: 0;
|
||||
height: 0;
|
||||
}
|
||||
.slider {
|
||||
position: absolute;
|
||||
cursor: pointer;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background-color: #ccc;
|
||||
-webkit-transition: .4s;
|
||||
transition: .4s;
|
||||
}
|
||||
.slider:before {
|
||||
position: absolute;
|
||||
content: "";
|
||||
height: 16px;
|
||||
width: 16px;
|
||||
left: 4px;
|
||||
bottom: 4px;
|
||||
background-color: white;
|
||||
-webkit-transition: .4s;
|
||||
transition: .4s;
|
||||
}
|
||||
input:checked + .slider {
|
||||
background-color: #2196F3;
|
||||
}
|
||||
input:focus + .slider {
|
||||
box-shadow: 0 0 1px #2196F3;
|
||||
}
|
||||
input:checked + .slider:before {
|
||||
-webkit-transform: translateX(16px);
|
||||
-ms-transform: translateX(16px);
|
||||
transform: translateX(16px);
|
||||
}
|
||||
.slider.round {
|
||||
border-radius: 24px;
|
||||
}
|
||||
.slider.round:before {
|
||||
border-radius: 50%;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.5.1/jquery.js" integrity="sha512-WNLxfP/8cVYL9sj8Jnp6et0BkubLP31jhTG9vhL/F5uEZmg5wEzKoXp1kJslzPQWwPT1eyMiSxlKCgzHLOTOTQ==" crossorigin="anonymous"></script>
|
||||
<link href="/tabulator/dist/css/tabulator_midnight.css" rel="stylesheet">
|
||||
<script type="text/javascript" src="https://unpkg.com/tabulator-tables@6.3.0/dist/js/tabulator.min.js"></script>
|
||||
<button id="refreshToggle" onclick="pauseData()">Data autorefreshes every 2 minutes, click here to pause (makes it easier)</button> <br>
|
||||
<a>Note that the start time will be the time the road was closed, not necessarily the time the issue started</a><br>
|
||||
<a>Double click any field to automatically copy to clipboard</a>
|
||||
<div id="controls">
|
||||
<label class="switch">
|
||||
<input type="checkbox" id="hidetoggle" onclick="hider()" checked>
|
||||
<span class="slider round"></span>
|
||||
</label>Toggle not showing/showing hidden reports
|
||||
</div>
|
||||
<input type="checkbox" id="cwa" name="cwa" value="RLX" onchange="filters()" checked>
|
||||
<label for="cwa">RLX only</label><br>
|
||||
<div id="wunderobs"></div>
|
||||
|
||||
<script>
|
||||
function googleMap(cell, formatterParams) {
|
||||
return "http://maps.google.com/maps?t=k&q=loc:" + cell.getData().lat + "+" + cell.getData().lon + "&basemap=satellite";
|
||||
}
|
||||
|
||||
var table = new Tabulator("#wunderobs", {
|
||||
virtualDom: true,
|
||||
virtualDomBuffer: 300,
|
||||
ajaxURL: "lsr.php?getCombinedTable=p",
|
||||
ajaxConfig: "GET",
|
||||
autoResize: true,
|
||||
initialSort: [{ column: "start", dir: "desc" }],
|
||||
columns: [
|
||||
{ title: "Id", field: "id", hozAlign: "center", visible: false },
|
||||
{ title: "Source", field: "source", hozAlign: "center", visible: false },
|
||||
{
|
||||
title: "LSR", field: "lsr",
|
||||
cellClick: function(e, cell) { cellClickCallback(e, cell); },
|
||||
formatter: "toggle",
|
||||
formatterParams: { size: 20, onValue: 'true', offValue: 'false', onColor: "green", offColor: "red", clickable: true }
|
||||
},
|
||||
{
|
||||
title: "Hide", field: "hide",
|
||||
cellClick: function(e, cell) { hideClickCallback(e, cell); },
|
||||
formatter: "toggle",
|
||||
formatterParams: { size: 20, onValue: 'true', offValue: 'false', onColor: "green", offColor: "red", clickable: true }
|
||||
},
|
||||
{ title: "Issue Start (Z)", field: "start" },
|
||||
// { title: "Last Update (Z)", field: "lastupdate" },
|
||||
{ title: "Lat", field: "lat" },
|
||||
{ title: "Lon", field: "lon" },
|
||||
{ title: "Category", field: "category" },
|
||||
{ title: "End (Z)", field: "endtime", formatter:function(cell, formatterParams, onRendered){
|
||||
const formattedDate = cell.getValue();
|
||||
if (!formattedDate) return "Ongoing";
|
||||
const inputDate = new Date(`${formattedDate}Z`);
|
||||
const now = new Date();
|
||||
const diffMs = now - inputDate;
|
||||
const diffMins = diffMs / (1000 * 60);
|
||||
|
||||
return diffMins < 30 ? "Ongoing" : formattedDate;
|
||||
return cell.getValue(); //return the contents of the cell;
|
||||
} },
|
||||
{ title: "County", field: "county" },
|
||||
{ title: "Location", field: "location", formatter: "link", formatterParams: { url: googleMap, target: "_blank" } },
|
||||
{ title: "Description", field: "description" }
|
||||
],
|
||||
cellDblClick: function(e, cell) {
|
||||
copyToClipboard(cell.getValue());
|
||||
},
|
||||
cellClick: function(e, cell) {
|
||||
if (e.ctrlKey) {
|
||||
copyToClipboard(cell.getValue());
|
||||
}
|
||||
},
|
||||
dataLoaded: function(data) {
|
||||
setTimeout(addManualDblClickListeners, 500);
|
||||
}
|
||||
});
|
||||
|
||||
function copyToClipboard(value) {
|
||||
if (value !== null && value !== undefined) {
|
||||
if (navigator.clipboard && navigator.clipboard.writeText) {
|
||||
navigator.clipboard.writeText(value.toString())
|
||||
.then(() => {
|
||||
// alert("Copied: " + value);
|
||||
})
|
||||
.catch(err => {
|
||||
fallbackCopy(value);
|
||||
});
|
||||
} else {
|
||||
fallbackCopy(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function fallbackCopy(text) {
|
||||
var textArea = document.createElement("textarea");
|
||||
textArea.value = text;
|
||||
document.body.appendChild(textArea);
|
||||
textArea.select();
|
||||
try {
|
||||
document.execCommand('copy');
|
||||
// alert("Copied (fallback): " + text);
|
||||
} catch (err) {
|
||||
// alert("Failed to copy text");
|
||||
}
|
||||
document.body.removeChild(textArea);
|
||||
}
|
||||
|
||||
function addManualDblClickListeners() {
|
||||
var cells = document.querySelectorAll(".tabulator-cell");
|
||||
cells.forEach(function(cell) {
|
||||
cell.addEventListener("dblclick", function(e) {
|
||||
copyToClipboard(cell.textContent.trim());
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function cellClickCallback(e, cell) {
|
||||
var row = cell.getRow();
|
||||
lsr(row.getData()['lsr'], row.getData()['id'],row.getData()['source'] );
|
||||
}
|
||||
|
||||
function lsr(lsr, id, source) {
|
||||
$.get({
|
||||
url: 'lsr.php?updater=true&lsr=' + lsr + "&id=" + id + "&table="+source,
|
||||
error: function(xhr, error) {
|
||||
alert('Unable to update, please refresh page');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function hideClickCallback(e, cell) {
|
||||
var row = cell.getRow();
|
||||
hide(row.getData()['hide'], row.getData()['id'], row.getData()['source']);
|
||||
}
|
||||
|
||||
function hide(hide, id, source) {
|
||||
$.get({
|
||||
url: 'lsr.php?updater=true&hide=' + hide + "&id=" + id + "&table="+source,
|
||||
error: function(xhr, error) {
|
||||
alert('Unable to update, please refresh page');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function reloadData() {
|
||||
var oldscrolly = window.scrollY;
|
||||
var oldscrollx = window.scrollX;
|
||||
table.replaceData()
|
||||
.then(function() {
|
||||
window.scroll(oldscrollx, oldscrolly);
|
||||
setTimeout(addManualDblClickListeners, 500);
|
||||
})
|
||||
.catch(function(error) {
|
||||
// Silent error handling
|
||||
});
|
||||
}
|
||||
|
||||
function hider() {
|
||||
var hideit = document.getElementById("hidetoggle");
|
||||
if (hideit.checked == true) {
|
||||
table.removeFilter("hide", "=", "false");
|
||||
} else {
|
||||
table.addFilter("hide", "=", "false");
|
||||
}
|
||||
}
|
||||
|
||||
function filters() {
|
||||
var y = document.getElementById("cwa").checked;
|
||||
if (y) {
|
||||
table.addFilter("cwa", "=", 'RLX');
|
||||
} else {
|
||||
table.removeFilter("cwa", "=", 'RLX');
|
||||
}
|
||||
}
|
||||
|
||||
var timeout;
|
||||
var isRefreshing = true;
|
||||
timeout = setInterval(reloadData, 120000);
|
||||
|
||||
function pauseData() {
|
||||
var button = document.getElementById("refreshToggle");
|
||||
if (isRefreshing) {
|
||||
clearInterval(timeout);
|
||||
button.textContent = "Resume Autorefresh";
|
||||
isRefreshing = false;
|
||||
} else {
|
||||
timeout = setInterval(reloadData, 120000);
|
||||
button.textContent = "Data autorefreshes every 2 minutes, click here to pause (makes it easier)";
|
||||
isRefreshing = true;
|
||||
}
|
||||
}
|
||||
|
||||
window.addEventListener("load", function() {
|
||||
setTimeout(addManualDblClickListeners, 1000);
|
||||
});
|
||||
filters()
|
||||
</script>
|
||||
This information is not provided as a direct service to the NWS
|
||||
</body>
|
||||
</html>
|
||||
144
5min.html
Normal file
144
5min.html
Normal file
@@ -0,0 +1,144 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
|
||||
<link rel="stylesheet" href="https://www.w3schools.com/w3css/4/w3.css">
|
||||
|
||||
|
||||
<style type="text/css">
|
||||
body {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
html, body {
|
||||
height: 100%;
|
||||
width: 100%
|
||||
|
||||
}
|
||||
#main {
|
||||
height: 95%;
|
||||
|
||||
}
|
||||
#bottombar {
|
||||
height: 5%;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id ="main">
|
||||
</div>
|
||||
|
||||
<div id="bottombar">
|
||||
<a href="cams.html" class="w3-button w3-black">Cam List</a>
|
||||
<a href="map.html" class="w3-button w3-black">Cam Map</a>
|
||||
<a href="admin.html" class="w3-button w3-black">Add Camera</a>
|
||||
<a href="db.html" class="w3-button w3-black">WU obs</a>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
function addZero(i) {
|
||||
if (i < 10) {
|
||||
i = "0" + i;
|
||||
}
|
||||
return i;
|
||||
}
|
||||
|
||||
|
||||
//STATIONS KCRW,KHTS,KPKB,KCKB,KEKN,KBKW,KPIT,KAGC,KZZV,KMGW,KHLG,KDUJ,KFKL,KLBE,KBVI,KJKL,KLOZ,KSME,KSJS,KSYM
|
||||
//LOCAL STATIONS KCRW,KHTS,KPKB,KCKB,KEKN,KBKW
|
||||
var icao = ["KCRW","KHTS","KPKB","KCKB","KEKN","KBKW"];
|
||||
var metars = {}
|
||||
stnid = icao.join();
|
||||
|
||||
|
||||
|
||||
|
||||
for (var m = 0; m < icao.length; m++) {
|
||||
metars[icao[m]] = null;
|
||||
|
||||
}
|
||||
|
||||
|
||||
let url = 'https://api.synopticdata.com/v2/stations/latest?&token=5b49fa1894c04c1a91e621cefa783bfa&within=15&timeformat=%25Y%25m%25d&obtimezone=utc&units=english&output=json&status=active&stid=' + stnid;
|
||||
//let url = 'https://api.synopticdata.com/v2/stations/latest?&token=5b49fa1894c04c1a91e621cefa783bfa&within=15&timeformat=%25Y%25m%25d&obtimezone=utc&units=english&output=json&status=active&stid=KCRW,KHTS,KPKB,KCKB,KEKN,KBKW';
|
||||
grab_update(url);
|
||||
setInterval(grab_update,60000,url);
|
||||
|
||||
function grab_update(url) {
|
||||
|
||||
fetch(url)
|
||||
.then(res => res.json())
|
||||
.then((out) => {
|
||||
|
||||
|
||||
for (var i = 0; i < out["STATION"].length; i++) {
|
||||
//document.write(out["STATION"][i].OBSERVATIONS.metar_value_1.value);
|
||||
//document.write('<br>');
|
||||
metars[out["STATION"][i].STID] = out["STATION"][i].OBSERVATIONS.metar_value_1.value;
|
||||
//$("#main").html($('#main').html() + out["STATION"][i].OBSERVATIONS.metar_value_1.value + '<br>');
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
var currentdate = new Date();
|
||||
var datetime = "Last Update: "
|
||||
+ addZero((currentdate.getUTCMonth()+1)) + "/"
|
||||
+ addZero(currentdate.getUTCDate()) + "/"
|
||||
+ currentdate.getUTCFullYear() + " @ "
|
||||
+ addZero(currentdate.getUTCHours()) + ":"
|
||||
+ addZero(currentdate.getUTCMinutes()) + ":"
|
||||
+ addZero(currentdate.getUTCSeconds()) + "Z";
|
||||
|
||||
|
||||
var div = document.getElementById('main');
|
||||
div.innerHTML = "";
|
||||
for (l in metars) {
|
||||
if (metars[l] != null) {
|
||||
div.innerHTML += metars[l];
|
||||
div.innerHTML += '<br>';
|
||||
}
|
||||
|
||||
}
|
||||
div.innerHTML += datetime;
|
||||
div.innerHTML += '<br>';
|
||||
div.innerHTML += '<br>';
|
||||
div.innerHTML += "Data courtesy of SynopticLabs (MesoWest) - 5 minute obs will run 10-15 minutes behind real-time, but any METAR/SPECI should update immediately"
|
||||
div.innerHTML += '<br>';
|
||||
div.innerHTML += 'Leave this page open and obs will update automatically';
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
})}
|
||||
|
||||
|
||||
</script>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
221
admin.html
Normal file
221
admin.html
Normal file
@@ -0,0 +1,221 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Add Camera</title>
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
|
||||
<link rel="stylesheet" href="//netdna.bootstrapcdn.com/bootstrap/3.3.2/css/bootstrap.min.css">
|
||||
<link rel="stylesheet" href="//netdna.bootstrapcdn.com/font-awesome/3.2.1/css/font-awesome.min.css">
|
||||
<link rel="stylesheet" href="//d2d3qesrx8xj6s.cloudfront.net/dist/bootsnipp.min.css?ver=872ccd9c6dce18ce6ea4d5106540f089">
|
||||
<style type="text/css">
|
||||
html, body {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
#caminfo {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
table,th,td {
|
||||
border: 1px solid black;
|
||||
padding-horizontal: 15px;
|
||||
|
||||
}
|
||||
|
||||
|
||||
</style>
|
||||
</head>
|
||||
|
||||
|
||||
<body>
|
||||
|
||||
|
||||
|
||||
<div id="caminfo">
|
||||
<form class="form-horizontal" id="newcam">
|
||||
<fieldset>
|
||||
|
||||
<!-- Form Name -->
|
||||
<center><legend>Add Camera</legend>
|
||||
<span class="help-block">This is not a NOAA/NWS system, do not enter any PII gained as a result of your NWS position</span>
|
||||
<span class="help-block">If you are unsure of what to enter, just email the details to Peck</span></center>
|
||||
<!-- Text input-->
|
||||
<div class="form-group">
|
||||
<label class="col-md-4 control-label" for="url">URL</label>
|
||||
<div class="col-md-4">
|
||||
<input id="url" name="url" type="text" placeholder="" onchange="isValidUrl(this.value)" class="form-control input-md" required="">
|
||||
<span class="help-block">Include leading http:// https:// rtsp://</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Text input-->
|
||||
<div class="form-group">
|
||||
<label class="col-md-4 control-label" for="interval">Interval for download (min)</label>
|
||||
<div class="col-md-4">
|
||||
<input id="interval" name="interval" type="text" value="10" class="form-control input-md">
|
||||
<span class="help-block">No quicker than 2 minutes, 10 suggested if you do not have explicit permission</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Text input-->
|
||||
<div class="form-group">
|
||||
<label class="col-md-4 control-label" for="lat">Latitude</label>
|
||||
<div class="col-md-4">
|
||||
<input id="lat" name="lat" type="text" placeholder="" class="form-control input-md" required="">
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Text input-->
|
||||
<div class="form-group">
|
||||
<label class="col-md-4 control-label" for="lon">Longitude</label>
|
||||
<div class="col-md-4">
|
||||
<input id="lon" name="lon" type="text" placeholder="" class="form-control input-md" required="">
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Text input-->
|
||||
<div class="form-group">
|
||||
<label class="col-md-4 control-label" for="description">Description</label>
|
||||
<div class="col-md-4">
|
||||
<input id="description" name="description" type="text" placeholder="" class="form-control input-md" required="">
|
||||
<span class="help-block">Short description of the location</span>
|
||||
<span class="help-block">--------All fields above this line are mandatory--------</span>
|
||||
<span class="help-block">Default and/or blank values below this are probably fine</span>
|
||||
</div>
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<div class="form-group">
|
||||
<label class="col-md-4 control-label" for="method">Download Method</label>
|
||||
<div class="col-md-4">
|
||||
<select id="method" name="method" class="form-control">
|
||||
<option value="normal">Static image</option>
|
||||
<option value="rtsp">RTSP/HTTP/HLS (link playlist.m3u8 for HLS) stream</option>
|
||||
<option value="castr">Castr.io stream (link playlist.m3u8)</option>
|
||||
<option value="youtube">Youtube (link url in this format: https://www.youtube.com/@wxstoat/live)</option>
|
||||
<option value="normalproxy">Static image (US based proxy for sites that block my Canadian IP)</option>
|
||||
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
<div class="form-group">
|
||||
<label class="col-md-4 control-label" for="permission">Permission</label>
|
||||
<div class="col-md-4">
|
||||
<div class="radio">
|
||||
<label for="permission-1">
|
||||
<input type="radio" name="permission" id="permission-1" value="t">
|
||||
Yes
|
||||
</label>
|
||||
</div>
|
||||
<div class="radio">
|
||||
<label for="permission-2">
|
||||
<input type="radio" name="permission" id="permission-2" value="n" checked="checked">
|
||||
No
|
||||
</label>
|
||||
<span class="help-block">Permission to redistribute image, if you don't know, leave it no</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Text input-->
|
||||
<div class="form-group">
|
||||
<label class="col-md-4 control-label" for="credit">Cam Owner</label>
|
||||
<div class="col-md-4">
|
||||
<input id="credit" name="credit" type="text" placeholder="" class="form-control input-md">
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Text input-->
|
||||
<div class="form-group">
|
||||
<label class="col-md-4 control-label" for="email">E-mail for owner</label>
|
||||
<div class="col-md-4">
|
||||
<input id="email" name="email" type="text" placeholder="" class="form-control input-md">
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Button -->
|
||||
<div class="form-group">
|
||||
<label class="col-md-4 control-label" for="save"></label>
|
||||
<div class="col-md-4">
|
||||
<button id="save" name="save" class="btn btn-primary">Save</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</fieldset>
|
||||
</form>
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
function isValidUrl(string) {
|
||||
|
||||
$.ajax({
|
||||
url: 'admin.php?action=checkurl',
|
||||
type: 'POST',
|
||||
dataType: "json",
|
||||
data: {url:string},
|
||||
success: function(data) {
|
||||
console.log(data);
|
||||
if (data[0].exists == 't') {
|
||||
document.getElementById('url').style.borderColor = "red";
|
||||
alert("This camera url already exists in the database, if you don't see it displayed, ask Peck to make sure it's active")
|
||||
} else {
|
||||
document.getElementById('url').style.borderColor = "green";
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
$('#newcam').submit(function(e){
|
||||
e.preventDefault();
|
||||
$.ajax({
|
||||
url: 'admin.php?action=newcam',
|
||||
type: 'post',
|
||||
data:$('#newcam').serialize(),
|
||||
success:function(data){
|
||||
if (data == '1') {
|
||||
alert('cam added');
|
||||
document.getElementById("newcam").reset();
|
||||
} else {
|
||||
alert(data);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</script>
|
||||
|
||||
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
61
admin.php
Normal file
61
admin.php
Normal file
@@ -0,0 +1,61 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
$action = $_GET['action'];
|
||||
|
||||
if ($action == 'checkurl' ) {
|
||||
$url = $_POST['url'];
|
||||
$query = "SELECT exists (SELECT 1 FROM cams WHERE url = '{$url}')";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
pg_close($dbconn);
|
||||
|
||||
}
|
||||
|
||||
if ($action == 'newcam' ) {
|
||||
$url = $_POST['url'];
|
||||
$lat = $_POST['lat'];
|
||||
$lon = $_POST['lon'];
|
||||
$desc = $_POST['description'];
|
||||
$method = $_POST['method'];
|
||||
$permission = $_POST['permission'];
|
||||
$owner = $_POST['owner'];
|
||||
$email = $_POST['email'];
|
||||
|
||||
|
||||
$query = "INSERT into cams (url,lat,lon,description,interval,method,active,permission,owner,email,keephours) values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)";
|
||||
$result = pg_query_params($query, Array($url,$lat,$lon,$desc,'10',$method,'t',$permission,$owner,$email,'240')) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
|
||||
|
||||
|
||||
$status = pg_result_status($result);
|
||||
echo json_encode($status);
|
||||
pg_free_result($result);
|
||||
pg_close($dbconn);
|
||||
|
||||
|
||||
shell_exec('python3 /var/www/html/work/runallgeom.py');
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
26
cam.php
Normal file
26
cam.php
Normal file
@@ -0,0 +1,26 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
// Performing SQL query
|
||||
$query = "SELECT cwa,lat,lon,lastimage,county,elevation,camid,state,description,hydro,airport FROM cams WHERE active <> false AND lastsuccess IS NOT NULL AND (EXTRACT(EPOCH FROM (current_timestamp - lastsuccess ))/60) < (interval + 20) order by elevation desc";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Printing results in HTML
|
||||
$array = array();
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
// Ensure hydro is a proper boolean
|
||||
$line['hydro'] = ($line['hydro'] === 't' || $line['hydro'] === true);
|
||||
// Ensure airport is a proper boolean
|
||||
$line['airport'] = ($line['airport'] === 't' || $line['airport'] === true);
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Closing connection
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
735
camai.py
Normal file
735
camai.py
Normal file
@@ -0,0 +1,735 @@
|
||||
import sys
|
||||
import hashlib
|
||||
import time
|
||||
import os
|
||||
import requests
|
||||
import cv2
|
||||
import ffmpeg
|
||||
import shutil
|
||||
from mimetypes import guess_extension
|
||||
import psycopg2
|
||||
from psycopg2.extras import Json
|
||||
from yt_dlp import YoutubeDL
|
||||
import numpy as np
|
||||
import json
|
||||
from PIL import Image
|
||||
import subprocess
|
||||
import ephem
|
||||
import datetime
|
||||
import imagehash
|
||||
from io import BytesIO
|
||||
import re
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.chrome.options import Options
|
||||
from selenium.webdriver.chrome.service import Service
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from collections import defaultdict
|
||||
import threading
|
||||
import random
|
||||
import warnings
|
||||
from urllib3.exceptions import InsecureRequestWarning
|
||||
from urllib.parse import unquote
|
||||
import logging
|
||||
|
||||
# Suppress InsecureRequestWarning
|
||||
warnings.filterwarnings('ignore', category=InsecureRequestWarning)
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||
handlers=[
|
||||
logging.FileHandler('/var/www/html/work/camai.log')
|
||||
]
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Database connection
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Constants
|
||||
CAM_DIR = "/var/www/html/work/camdata/"
|
||||
HOST_DELAY = 1.5 # Changed from 0.5 to 1.5 seconds
|
||||
MAX_WORKERS = 20 # Changed from 50 to 10
|
||||
COMMIT_INTERVAL = 25 # Commit every 5 successful camera processes
|
||||
|
||||
# Global host tracking and commit counter
|
||||
host_last_access = defaultdict(float)
|
||||
host_lock = threading.Lock()
|
||||
commit_counter = threading.local()
|
||||
|
||||
# YouTube downloader setup
|
||||
ydl_options = {
|
||||
'quiet': True,
|
||||
'cookiefile': 'cook.txt'
|
||||
}
|
||||
ydl = YoutubeDL(ydl_options)
|
||||
ydl.add_default_info_extractors()
|
||||
|
||||
headers = {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36',
|
||||
'Cache-Control': 'no-cache'
|
||||
}
|
||||
|
||||
proxies = {
|
||||
"http": "http://192.168.11.83:8118",
|
||||
"https": "http://192.168.11.83:8118"
|
||||
}
|
||||
|
||||
def get_host_delay(url):
|
||||
"""Implement delay between requests to same host from any URL type"""
|
||||
hostname_match = re.match(r'(?:https?|rtsp)://([^/:]+)', url)
|
||||
hostname = hostname_match.group(1) if hostname_match else 'unknown'
|
||||
|
||||
with host_lock:
|
||||
current_time = time.time()
|
||||
last_access = host_last_access[hostname]
|
||||
time_since_last = current_time - last_access
|
||||
if time_since_last < HOST_DELAY:
|
||||
time.sleep(HOST_DELAY - time_since_last)
|
||||
host_last_access[hostname] = time.time()
|
||||
|
||||
def image_hash(blob):
|
||||
img = Image.open(BytesIO(blob))
|
||||
return str(imagehash.phash(img, hash_size=10))
|
||||
|
||||
def check_if_daytime(lat='38.4803826', lon='-81.1850195'):
|
||||
now = datetime.datetime.utcnow()
|
||||
o = ephem.Observer()
|
||||
o.lon, o.lat, o.date = lon, lat, now
|
||||
s = ephem.Sun()
|
||||
s.compute(o)
|
||||
return s.alt > -3
|
||||
|
||||
def guess_ext(content_type):
|
||||
ext = guess_extension(content_type.partition(';')[0].strip())
|
||||
return '.jpg' if ext == '.jpe' or not ext else ext
|
||||
|
||||
def resize_frame(frame):
|
||||
width = frame.shape[1]
|
||||
if width < 600:
|
||||
scalefactor = 800 / width
|
||||
elif width > 1000:
|
||||
scalefactor = 800 / width
|
||||
else:
|
||||
return frame
|
||||
|
||||
new_width = int(scalefactor * width)
|
||||
new_height = int(scalefactor * frame.shape[0])
|
||||
return cv2.resize(frame, (new_width, new_height))
|
||||
|
||||
def save_frame(frame, path, camid):
|
||||
if not os.path.exists(os.path.dirname(path)):
|
||||
os.makedirs(os.path.dirname(path))
|
||||
cv2.imwrite(path, frame)
|
||||
|
||||
def ipcamlive_update():
|
||||
cursor.execute("""SELECT camid, bloomsky FROM cams WHERE method = 'ipcam' AND
|
||||
active = True
|
||||
AND ((EXTRACT(EPOCH FROM (current_timestamp - lastsuccess))/60) > interval OR lastsuccess IS NULL)
|
||||
""")
|
||||
ipcams = cursor.fetchall()
|
||||
#logger.info(ipcams)
|
||||
base_url = 'https://www.ipcamlive.com/ajax/getcamerastreamstate.php?cameraalias='
|
||||
headers = {
|
||||
'Connection': 'keep-alive',
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36',
|
||||
'Content-Type': 'application/json; charset=UTF-8',
|
||||
'Accept': '*/*',
|
||||
'Origin': 'https://ipcamlive.com',
|
||||
'Sec-Fetch-Site': 'same-origin',
|
||||
'Sec-Fetch-Mode': 'cors',
|
||||
'Sec-Fetch-Dest': 'empty',
|
||||
'X-HTTP-Method-Override': 'POST',
|
||||
'Host': 'g1.ipcamlive.com',
|
||||
'Referer': 'https://ipcamlive.com/',
|
||||
'Accept-Language': 'en-US,en;q=0.9'
|
||||
}
|
||||
|
||||
try:
|
||||
# Create a single session for all requests
|
||||
with requests.Session() as session:
|
||||
session.headers.update(headers) # Apply headers to the session
|
||||
for camid, alias in ipcams:
|
||||
if alias:
|
||||
url = f"{base_url}{alias}&targetdomain=g1.ipcamlive.com"
|
||||
get_host_delay(url) # Assuming this function handles delays appropriately
|
||||
r = session.post(url) # Use session for the POST request
|
||||
if r.status_code == 200:
|
||||
ipcam = r.json()
|
||||
#logger.info(ipcam)
|
||||
ipid = ipcam.get('details').get('streamid')
|
||||
ipurl = ipcam.get('details').get('address')
|
||||
#logger.info(f"{ipid} {ipurl}")
|
||||
snapshot_url = f"{ipurl}streams/{ipid}/snapshot.jpg"
|
||||
if ipid == None:
|
||||
cursor.execute("UPDATE cams SET lastfailed = False WHERE camid = %s",(camid,))
|
||||
continue
|
||||
cursor.execute("UPDATE cams SET url = %s WHERE method = 'ipcam' AND camid = %s",
|
||||
(snapshot_url, camid))
|
||||
logger.info("IPCamlive update completed successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"IPCamlive update failed: {e}")
|
||||
|
||||
def get_camera_handler(method):
|
||||
"""Return appropriate handler based on camera method"""
|
||||
|
||||
return {
|
||||
#'rtsp': handle_rtsp_ffmpeg,
|
||||
'hls': handle_hls,
|
||||
'rtsp': handle_hls,
|
||||
'verkada': handle_verkada,
|
||||
'verkadalow': handle_verkadalow,
|
||||
'castr': handle_castr,
|
||||
'normal': handle_normal,
|
||||
'normalcache': handle_normal_cache,
|
||||
'youtube': handle_youtube,
|
||||
'normalproxy': handle_normal_proxy,
|
||||
'ipcam': handle_normal,
|
||||
'bloomsky': handle_bloomsky,
|
||||
'ivideon': handle_ivideon,
|
||||
'wetmet': handle_wetmet,
|
||||
'rtspme': handle_rtspme,
|
||||
'rail': handle_rail
|
||||
}.get(method, handle_normal)
|
||||
|
||||
def handle_normal(entry):
|
||||
|
||||
try:
|
||||
camid, url, *_ = entry
|
||||
get_host_delay(url)
|
||||
r = requests.get(url, timeout=3, headers=headers, verify=False)
|
||||
if r.status_code == 200:
|
||||
process_image_response(r.content, r.headers, camid, url)
|
||||
logger.debug(f"Successfully downloaded normal image for camid {camid}")
|
||||
except Exception as e:
|
||||
logger.error(f"Normal download failed for camid {camid}: {e}")
|
||||
failed_download(camid, str(e))
|
||||
|
||||
def handle_normal_cache(entry):
|
||||
camid, url, *rest = entry
|
||||
modified_entry = (camid, f"{url}?{random.randint(10, 99999)}", *rest)
|
||||
handle_normal(modified_entry)
|
||||
|
||||
|
||||
|
||||
def handle_ohgo(entry):
|
||||
camid, url, *_ = entry
|
||||
try:
|
||||
get_host_delay(url)
|
||||
r = requests.get(url, timeout=3, headers=headers, verify=False, proxies=proxies)
|
||||
if r.status_code == 200:
|
||||
process_image_response(r.content, r.headers, camid, url)
|
||||
logger.debug(f"Successfully downloaded proxy image for camid {camid}")
|
||||
except Exception as e:
|
||||
logger.error(f"Normal proxy download failed for camid {camid}: {e}")
|
||||
failed_download(camid, str(e))
|
||||
|
||||
|
||||
def handle_normal_proxy(entry):
|
||||
camid, url, *_ = entry
|
||||
try:
|
||||
get_host_delay(url)
|
||||
r = requests.get(url, timeout=3, headers=headers, verify=False, proxies=proxies)
|
||||
if r.status_code == 200:
|
||||
process_image_response(r.content, r.headers, camid, url)
|
||||
logger.debug(f"Successfully downloaded proxy image for camid {camid}")
|
||||
except Exception as e:
|
||||
logger.error(f"Normal proxy download failed for camid {camid}: {e}")
|
||||
failed_download(camid, str(e))
|
||||
|
||||
def handle_rtsp_ffmpeg(entry):
|
||||
camid, url, *_ = entry
|
||||
md5 = hashlib.md5()
|
||||
try:
|
||||
get_host_delay(url)
|
||||
|
||||
args = (
|
||||
ffmpeg
|
||||
.input(url, rtsp_transport='tcp', stimeout='5000000')
|
||||
.filter('scale', 800, -1)
|
||||
.output('pipe:', vframes=1, format='image2', vcodec='mjpeg')
|
||||
.compile()
|
||||
)
|
||||
args = [args[0]] + ['-nostats', '-loglevel', 'error'] + args[1:]
|
||||
|
||||
result = subprocess.run(args, capture_output=True, timeout=15, check=True)
|
||||
frame = result.stdout
|
||||
|
||||
if frame:
|
||||
md5.update(frame)
|
||||
nparr = np.frombuffer(frame, np.uint8)
|
||||
img = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
|
||||
if img is not None:
|
||||
process_frame(img, md5.hexdigest(), camid)
|
||||
logger.debug(f"Successfully processed RTSP frame for camid {camid}")
|
||||
except (subprocess.TimeoutExpired, subprocess.CalledProcessError) as e:
|
||||
logger.error(f"RTSP FFmpeg failed for camid {camid} (timeout or process error): {e}")
|
||||
failed_download(camid, str(e))
|
||||
except Exception as e:
|
||||
logger.error(f"RTSP FFmpeg failed for camid {camid}: {e}")
|
||||
failed_download(camid, str(e))
|
||||
|
||||
def handle_hls(entry):
|
||||
camid, url, *_ = entry
|
||||
md5 = hashlib.md5()
|
||||
try:
|
||||
get_host_delay(url)
|
||||
|
||||
# Get ffmpeg command arguments, now with a User-Agent header
|
||||
args = (
|
||||
ffmpeg
|
||||
.input(url, user_agent=headers['User-Agent'])
|
||||
.filter('scale', 800, -1)
|
||||
.output('pipe:', vframes=1, format='image2', vcodec='mjpeg')
|
||||
.compile()
|
||||
)
|
||||
# Add arguments to suppress output
|
||||
args = [args[0]] + ['-nostats', '-loglevel', 'error'] + args[1:]
|
||||
|
||||
# Execute using subprocess to allow for a timeout
|
||||
result = subprocess.run(args, capture_output=True, timeout=15, check=True)
|
||||
frame = result.stdout
|
||||
|
||||
if frame:
|
||||
md5.update(frame)
|
||||
nparr = np.frombuffer(frame, np.uint8)
|
||||
img = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
|
||||
if img is not None:
|
||||
process_frame(img, md5.hexdigest(), camid)
|
||||
logger.debug(f"Successfully processed HLS frame for camid {camid}")
|
||||
except (subprocess.TimeoutExpired, subprocess.CalledProcessError) as e:
|
||||
#logger.error(f"HLS FFmpeg failed for camid {camid} (timeout or process error): {e} {url}")
|
||||
failed_download(camid, str(e))
|
||||
except Exception as e:
|
||||
logger.error(f"HLS FFmpeg failed for camid {camid}: {e}")
|
||||
failed_download(camid, str(e))
|
||||
|
||||
def handle_verkada(entry):
|
||||
camid, url, *_ = entry
|
||||
md5 = hashlib.md5()
|
||||
try:
|
||||
get_host_delay(url)
|
||||
wvheaders = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:105.0) Gecko/20100101 Firefox/105.0",
|
||||
"Connection": "keep-alive",
|
||||
"referrer": "https://command.verkada.com/embed.html",
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
|
||||
"Accept-Language": "en-US,en;q=0.5",
|
||||
"cookie": "intercom-id-q5re5q6g=183b11d6-5bfb-4e20-8a91-e5733758fbfd; intercom-session-q5re5q6g=; auth='(null)'; token="
|
||||
}
|
||||
r = requests.get(url, headers=wvheaders)
|
||||
p = re.compile(r'https?://[^\s"<>]+high_res[^\s"<>]*')
|
||||
match = p.search(r.url)
|
||||
if match:
|
||||
|
||||
urlinfo = unquote(match.group(0))
|
||||
fragment = unquote(urlinfo.split('#')[1])
|
||||
data = json.loads(fragment)
|
||||
m3u8_url = data.get('urlHD', data.get('urlSD'))
|
||||
get_host_delay(m3u8_url)
|
||||
cap = cv2.VideoCapture(m3u8_url)
|
||||
if cap.isOpened():
|
||||
_, frame = cap.read()
|
||||
cap.release()
|
||||
if frame is not None:
|
||||
md5.update(frame.tobytes())
|
||||
frame = resize_frame(frame)
|
||||
process_frame(frame, md5.hexdigest(), camid)
|
||||
logger.debug(f"Successfully processed Verkada frame for camid {camid}")
|
||||
else:
|
||||
failed_download(camid, "No stream URL found")
|
||||
except Exception as e:
|
||||
logger.error(f"Verkada failed for camid {camid}: {e}")
|
||||
failed_download(camid, str(e))
|
||||
|
||||
|
||||
def handle_verkadalow(entry):
|
||||
camid, url, *_ = entry
|
||||
md5 = hashlib.md5()
|
||||
try:
|
||||
get_host_delay(url)
|
||||
wvheaders = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:105.0) Gecko/20100101 Firefox/105.0",
|
||||
"Connection": "keep-alive",
|
||||
"referrer": "https://command.verkada.com/embed.html",
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
|
||||
"Accept-Language": "en-US,en;q=0.5",
|
||||
"cookie": "intercom-id-q5re5q6g=183b11d6-5bfb-4e20-8a91-e5733758fbfd; intercom-session-q5re5q6g=; auth='(null)'; token="
|
||||
}
|
||||
r = requests.get(url, headers=wvheaders)
|
||||
p = re.compile(r'https?://[^\s"<>]+high_res[^\s"<>]*')
|
||||
match = p.search(r.url)
|
||||
if match:
|
||||
|
||||
urlinfo = unquote(match.group(0))
|
||||
fragment = unquote(urlinfo.split('#')[1])
|
||||
data = json.loads(fragment)
|
||||
m3u8_url = data.get('urlSD', data.get('urlHD'))
|
||||
get_host_delay(m3u8_url)
|
||||
cap = cv2.VideoCapture(m3u8_url)
|
||||
if cap.isOpened():
|
||||
_, frame = cap.read()
|
||||
cap.release()
|
||||
if frame is not None:
|
||||
md5.update(frame.tobytes())
|
||||
frame = resize_frame(frame)
|
||||
process_frame(frame, md5.hexdigest(), camid)
|
||||
logger.debug(f"Successfully processed Verkada frame for camid {camid}")
|
||||
else:
|
||||
failed_download(camid, "No stream URL found")
|
||||
except Exception as e:
|
||||
logger.error(f"Verkada failed for camid {camid}: {e}")
|
||||
failed_download(camid, str(e))
|
||||
|
||||
def handle_castr(entry):
|
||||
camid, url, *_ = entry
|
||||
md5 = hashlib.md5()
|
||||
try:
|
||||
get_host_delay(url)
|
||||
referrer = f'https://player.castr.io/{url.rsplit("/", 1)[-1]}'
|
||||
|
||||
args = (
|
||||
ffmpeg
|
||||
.input(url, headers=f'Referer: {referrer}\r\n')
|
||||
.filter('scale', 800, -1)
|
||||
.output('pipe:', vframes=1, format='image2', vcodec='mjpeg')
|
||||
.compile()
|
||||
)
|
||||
args = [args[0]] + ['-nostats', '-loglevel', 'error'] + args[1:]
|
||||
|
||||
result = subprocess.run(args, capture_output=True, timeout=15, check=True)
|
||||
frame = result.stdout
|
||||
|
||||
if frame:
|
||||
md5.update(frame)
|
||||
process_frame_bytes(frame, md5.hexdigest(), camid)
|
||||
logger.debug(f"Successfully processed Castr frame for camid {camid}")
|
||||
except (subprocess.TimeoutExpired, subprocess.CalledProcessError) as e:
|
||||
logger.error(f"Castr failed for camid {camid} (timeout or process error): {e}")
|
||||
failed_download(camid, str(e))
|
||||
except Exception as e:
|
||||
logger.error(f"Castr failed for camid {camid}: {e}")
|
||||
failed_download(camid, str(e))
|
||||
|
||||
def handle_youtube(entry):
|
||||
camid, url, *_ = entry
|
||||
md5 = hashlib.md5()
|
||||
try:
|
||||
get_host_delay(url)
|
||||
info = ydl.extract_info(url, download=False)
|
||||
stream_url = info['url']
|
||||
|
||||
get_host_delay(stream_url)
|
||||
cap = cv2.VideoCapture(stream_url)
|
||||
if cap.isOpened():
|
||||
_, frame = cap.read()
|
||||
cap.release()
|
||||
if frame is not None:
|
||||
md5.update(frame.tobytes())
|
||||
frame = resize_frame(frame)
|
||||
process_frame(frame, md5.hexdigest(), camid)
|
||||
logger.debug(f"Successfully processed YouTube frame for camid {camid}")
|
||||
except Exception as e:
|
||||
logger.error(f"YouTube failed for camid {camid}: {e}")
|
||||
failed_download(camid, str(e))
|
||||
|
||||
def handle_bloomsky(entry):
|
||||
camid, url, *_ = entry
|
||||
if check_if_daytime():
|
||||
handle_normal(entry)
|
||||
|
||||
|
||||
def handle_ivideon(entry):
|
||||
camid, url, *_ = entry
|
||||
md5 = hashlib.md5()
|
||||
try:
|
||||
get_host_delay(url)
|
||||
referrer = f'https://open.ivideon.io/{url.rsplit("/", 1)[-1]}'
|
||||
|
||||
args = (
|
||||
ffmpeg
|
||||
.input(url, headers=f'Referer: {referrer}\r\n')
|
||||
.filter('scale', 800, -1)
|
||||
.output('pipe:', vframes=1, format='image2', vcodec='mjpeg')
|
||||
.compile()
|
||||
)
|
||||
args = [args[0]] + ['-nostats', '-loglevel', 'error'] + args[1:]
|
||||
|
||||
result = subprocess.run(args, capture_output=True, timeout=15, check=True)
|
||||
frame = result.stdout
|
||||
|
||||
if frame:
|
||||
md5.update(frame)
|
||||
process_frame_bytes(frame, md5.hexdigest(), camid)
|
||||
logger.debug(f"Successfully processed Ivideon frame for camid {camid}")
|
||||
except (subprocess.TimeoutExpired, subprocess.CalledProcessError) as e:
|
||||
logger.error(f"Ivideon failed for camid {camid} (timeout or process error): {e}")
|
||||
failed_download(camid, str(e))
|
||||
except Exception as e:
|
||||
logger.error(f"Ivideon failed for camid {camid}: {e}")
|
||||
failed_download(camid, str(e))
|
||||
|
||||
|
||||
def handle_wetmet(entry):
|
||||
camid, url, *_ = entry
|
||||
md5 = hashlib.md5()
|
||||
try:
|
||||
get_host_delay(url)
|
||||
referrer = 'https://api.wetmet.net/'
|
||||
|
||||
args = (
|
||||
ffmpeg
|
||||
.input(url, headers=f'Referer: {referrer}\r\n')
|
||||
.filter('scale', 800, -1)
|
||||
.output('pipe:', vframes=1, format='image2', vcodec='mjpeg')
|
||||
.compile()
|
||||
)
|
||||
args = [args[0]] + ['-nostats', '-loglevel', 'error'] + args[1:]
|
||||
|
||||
result = subprocess.run(args, capture_output=True, timeout=15, check=True)
|
||||
frame = result.stdout
|
||||
|
||||
if frame:
|
||||
md5.update(frame)
|
||||
process_frame_bytes(frame, md5.hexdigest(), camid)
|
||||
logger.debug(f"Successfully processed Wetmet frame for camid {camid}")
|
||||
except (subprocess.TimeoutExpired, subprocess.CalledProcessError) as e:
|
||||
logger.error(f"Wetmet failed for camid {camid} (timeout or process error): {e}")
|
||||
failed_download(camid, str(e))
|
||||
except Exception as e:
|
||||
logger.error(f"Wetmet failed for camid {camid}: {e}")
|
||||
failed_download(camid, str(e))
|
||||
|
||||
def handle_rtspme(entry):
|
||||
camid, url, *_ = entry
|
||||
md5 = hashlib.md5()
|
||||
try:
|
||||
get_host_delay(url)
|
||||
service = Service(executable_path=r'/usr/bin/chromedriver')
|
||||
options = Options()
|
||||
options.add_argument("--headless")
|
||||
driver = webdriver.Chrome(service=service, options=options)
|
||||
driver.get(url)
|
||||
hlsurl = driver.execute_script('return ur_t')
|
||||
driver.close()
|
||||
|
||||
get_host_delay(hlsurl)
|
||||
cap = cv2.VideoCapture(hlsurl)
|
||||
if cap.isOpened():
|
||||
_, frame = cap.read()
|
||||
cap.release()
|
||||
if frame is not None:
|
||||
md5.update(frame.tobytes())
|
||||
frame = resize_frame(frame)
|
||||
process_frame(frame, md5.hexdigest(), camid)
|
||||
logger.debug(f"Successfully processed RTSPME frame for camid {camid}")
|
||||
except Exception as e:
|
||||
logger.error(f"RTSPME failed for camid {camid}: {e}")
|
||||
failed_download(camid, str(e))
|
||||
|
||||
def handle_rail(entry):
|
||||
camid, url, *_ = entry
|
||||
md5 = hashlib.md5()
|
||||
try:
|
||||
get_host_delay(url)
|
||||
r = requests.get(url, proxies=proxies)
|
||||
p = re.search(r"\"streaming\": \"(.*)\",", r.text)
|
||||
stream_url = p.group(1)
|
||||
|
||||
get_host_delay(stream_url)
|
||||
cap = cv2.VideoCapture(stream_url)
|
||||
if cap.isOpened():
|
||||
_, frame = cap.read()
|
||||
cap.release()
|
||||
if frame is not None:
|
||||
md5.update(frame.tobytes())
|
||||
frame = resize_frame(frame)
|
||||
process_frame(frame, md5.hexdigest(), camid)
|
||||
logger.debug(f"Successfully processed Rail frame for camid {camid}")
|
||||
except Exception as e:
|
||||
logger.error(f"Rail failed for camid {camid}: {e}")
|
||||
failed_download(camid, str(e))
|
||||
|
||||
def process_image_response(content, headers, camid, url):
|
||||
md5 = hashlib.md5(content)
|
||||
nparr = np.frombuffer(content, np.uint8)
|
||||
frame = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
|
||||
if frame is not None:
|
||||
frame = resize_frame(frame)
|
||||
ext = guess_ext(headers.get('content-type', ''))
|
||||
process_frame(frame, md5.hexdigest(), camid, ext)
|
||||
|
||||
def process_frame(frame, hash_value, camid, ext='.jpg'):
|
||||
path = f"{CAM_DIR}{camid}/{hash_value}{ext}"
|
||||
relpath = f"{camid}/{hash_value}{ext}"
|
||||
save_frame(frame, path, camid)
|
||||
camhandler(path, camid, relpath)
|
||||
|
||||
def process_frame_bytes(frame_bytes, hash_value, camid, ext='.jpg'):
|
||||
path = f"{CAM_DIR}{camid}/{hash_value}{ext}"
|
||||
relpath = f"{camid}/{hash_value}{ext}"
|
||||
if not os.path.exists(os.path.dirname(path)):
|
||||
os.makedirs(os.path.dirname(path))
|
||||
with open(path, 'wb') as f:
|
||||
f.write(frame_bytes)
|
||||
camhandler(path, camid, relpath)
|
||||
|
||||
def camhandler(path, camid, relpath):
|
||||
if not os.path.exists(path):
|
||||
return
|
||||
|
||||
fsize = os.path.getsize(path)
|
||||
if fsize < 100:
|
||||
os.remove(path)
|
||||
failed_download(camid, "File size too small")
|
||||
return
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"INSERT INTO camdb (camid, filepath) VALUES (%s, %s) ON CONFLICT ON CONSTRAINT filepath DO NOTHING",
|
||||
(camid, relpath)
|
||||
)
|
||||
cur.execute(
|
||||
"UPDATE cams SET lasttry = current_timestamp, totalfails = totalfails + 1, lastfailed = True "
|
||||
"WHERE camid = %s AND lastimage = %s",
|
||||
(camid, relpath)
|
||||
)
|
||||
cur.execute(
|
||||
"UPDATE cams SET lasttry = current_timestamp, lastsuccess = current_timestamp, totalfails = 0, "
|
||||
"lastfailed = False, lastimage = %s WHERE camid = %s AND (lastimage <> %s OR lastimage IS NULL)",
|
||||
(relpath, camid, relpath)
|
||||
)
|
||||
if not hasattr(commit_counter, 'count'):
|
||||
commit_counter.count = 0
|
||||
commit_counter.count += 1
|
||||
if commit_counter.count >= COMMIT_INTERVAL:
|
||||
conn.commit()
|
||||
commit_counter.count = 0
|
||||
logger.debug(f"Committed database changes after processing {COMMIT_INTERVAL} cameras")
|
||||
make_thumbnail(path, camid)
|
||||
|
||||
def make_thumbnail(path, camid):
|
||||
try:
|
||||
im = Image.open(path)
|
||||
im.thumbnail((320, 320), Image.Resampling.LANCZOS)
|
||||
im.save(f"{CAM_DIR}{camid}/latest.jpg", "JPEG")
|
||||
except Exception as e:
|
||||
logger.error(f"Thumbnail generation failed for camid {camid}: {e}")
|
||||
|
||||
def failed_download(camid, error_msg=None):
|
||||
with conn.cursor() as cur:
|
||||
if error_msg:
|
||||
# Truncate error message if it's too long for the column
|
||||
if len(error_msg) > 255:
|
||||
error_msg = error_msg[:252] + "..."
|
||||
cur.execute(
|
||||
"UPDATE cams SET lastfailed = True, totalfails = totalfails + 1, lasttry = current_timestamp, "
|
||||
"errorcode = %s WHERE camid = %s",
|
||||
(error_msg, camid)
|
||||
)
|
||||
else:
|
||||
cur.execute(
|
||||
"UPDATE cams SET lastfailed = True, totalfails = totalfails + 1, lasttry = current_timestamp "
|
||||
"WHERE camid = %s",
|
||||
(camid,)
|
||||
)
|
||||
conn.commit()
|
||||
logger.debug(f"Marked download as failed and committed for camid {camid}")
|
||||
|
||||
def clean_up(entry):
|
||||
camid, keephours, lastsuccess, active = entry
|
||||
keephours = 7*24 if not keephours else keephours
|
||||
|
||||
if not active:
|
||||
shutil.rmtree(f"{CAM_DIR}{camid}", ignore_errors=True)
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"SELECT filepath, dateutc FROM camdb WHERE camid = %s AND "
|
||||
"(EXTRACT(EPOCH FROM (current_timestamp - dateutc))/3600) > %s",
|
||||
(camid, keephours)
|
||||
)
|
||||
camfiles = cur.fetchall()
|
||||
|
||||
for filepath, _ in camfiles:
|
||||
try:
|
||||
os.remove(f"{CAM_DIR}{filepath}")
|
||||
except:
|
||||
pass
|
||||
cur.execute("DELETE FROM camdb WHERE filepath = %s", (filepath,))
|
||||
conn.commit()
|
||||
logger.debug(f"Cleanup completed for camid {camid}")
|
||||
|
||||
def fetch_cameras():
|
||||
cursor.execute("""
|
||||
SELECT camid, url, method, interval, keephours, lastsuccess
|
||||
FROM cams
|
||||
WHERE ((totalfails < 5 OR totalfails IS NULL) AND active = True)
|
||||
AND ((EXTRACT(EPOCH FROM (current_timestamp - lastsuccess))/60) > interval OR lastsuccess IS NULL)
|
||||
AND (lastfailed IS NULL OR lastfailed = false OR (lastfailed = True AND (EXTRACT(EPOCH FROM (current_timestamp - lasttry))/60) > interval/4))
|
||||
""")
|
||||
|
||||
return cursor.fetchall()
|
||||
|
||||
def fetch_failed_cameras():
|
||||
cursor.execute("""
|
||||
SELECT camid, url, method, interval, keephours, lasttry
|
||||
FROM cams
|
||||
WHERE (totalfails >= 5 AND active <> false AND
|
||||
((EXTRACT(EPOCH FROM (current_timestamp - lasttry))/60) > interval))
|
||||
""")
|
||||
return cursor.fetchall()
|
||||
|
||||
def fetch_cleanup_cameras():
|
||||
cursor.execute("SELECT camid, keephours, lastsuccess, active FROM cams")
|
||||
return cursor.fetchall()
|
||||
|
||||
def main():
|
||||
logger.info("Starting camera processing")
|
||||
|
||||
ipcamlive_update()
|
||||
conn.commit()
|
||||
allcams = fetch_cameras()
|
||||
logger.info(f"Processing {len(allcams)} active cameras")
|
||||
with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
|
||||
futures = [executor.submit(get_camera_handler(cam[2]), cam) for cam in allcams]
|
||||
for future in futures:
|
||||
try:
|
||||
future.result()
|
||||
except Exception as e:
|
||||
logger.error(f"Task execution failed: {e}")
|
||||
conn.commit()
|
||||
logger.debug("Final commit for active cameras")
|
||||
|
||||
failedcams = fetch_failed_cameras()
|
||||
logger.info(f"Processing {len(failedcams)} failed cameras")
|
||||
with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
|
||||
futures = [executor.submit(get_camera_handler(cam[2]), cam) for cam in failedcams]
|
||||
for future in futures:
|
||||
try:
|
||||
#logger.info(future)
|
||||
future.result()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed camera task execution failed: {e}")
|
||||
conn.commit()
|
||||
logger.debug("Final commit for failed cameras")
|
||||
|
||||
cleancams = fetch_cleanup_cameras()
|
||||
logger.info(f"Performing cleanup on {len(cleancams)} cameras")
|
||||
for cam in cleancams:
|
||||
clean_up(cam)
|
||||
|
||||
logger.info("Camera processing completed")
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except Exception as e:
|
||||
logger.critical(f"Main execution failed: {e}")
|
||||
conn.rollback()
|
||||
finally:
|
||||
cursor.close()
|
||||
conn.close()
|
||||
102
camapi.php
Normal file
102
camapi.php
Normal file
@@ -0,0 +1,102 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
|
||||
//select camid,url,description from cams where cwa = 'RLX' and method = 'rtsp' and active = true AND (EXTRACT(EPOCH FROM (current_timestamp - lastsuccess ))/60) < (interval + 20) and lat > %s and lat < %s and lon > %s and lon < %s order by lastsuccess desc limit 25
|
||||
|
||||
|
||||
//no gets, curent point outage info
|
||||
//error_reporting(-1); // reports all errors
|
||||
//ini_set("display_errors", "1"); // shows all errors
|
||||
//ini_set("log_errors", 1);
|
||||
//ini_set("error_log", "php-error.log");
|
||||
|
||||
|
||||
//county/state max
|
||||
|
||||
if (isset($_GET['cams'])) {
|
||||
if($_GET['lat1']) {
|
||||
$lat1 = pg_escape_string($_GET['lat1']);
|
||||
if($_GET['lon1']) {
|
||||
$lon1 = pg_escape_string($_GET['lon1']);
|
||||
if($_GET['lat2']) {
|
||||
$lat2 = pg_escape_string($_GET['lat2']);
|
||||
if($_GET['lon2']) {
|
||||
$lon2 = pg_escape_string($_GET['lon2']);
|
||||
if($_GET['elevbottom']) {
|
||||
$elevbottom = pg_escape_string($_GET['elevbottom']);
|
||||
if($_GET['elevtop']) {
|
||||
$elevtop = pg_escape_string($_GET['elevtop']);
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
"select camid,url,description from cams where method = 'rtsp' and active = true and cwa = 'RLX' and elevation > $5 and elevation < $6 and (EXTRACT(EPOCH FROM (current_timestamp - lastsuccess ))/60) < (interval + 20) and lat < $1 and lat > $2 and lon < $3 and lon > $4 order by elevation desc",
|
||||
array($lat1,$lat2,$lon1,$lon2,$elevbottom,$elevtop)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
|
||||
|
||||
//$result = pg_query_params($dbconn,
|
||||
//select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > '2023-04-01' and update < '2023-04-02' and cwa = 'RLX' group by county,state,update) as potato group by county,state;
|
||||
//"select camid,url,description from cams where method = 'rtsp' and active = true $1 '162.210.14.137' and (EXTRACT(EPOCH FROM (current_timestamp - lastsuccess ))/60) < (interval + 20) and lat < $2 and lat > $3 and lon < $4 and lon > $5",
|
||||
//array($res,$lat1,$lat2,$lon1,$lon2)) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
pg_close($dbconn);
|
||||
|
||||
}}}}}}}
|
||||
|
||||
if (isset($_GET['camstatic'])) {
|
||||
|
||||
if($_GET['lat1']) {
|
||||
$lat1 = pg_escape_string($_GET['lat1']);
|
||||
if($_GET['lon1']) {
|
||||
$lon1 = pg_escape_string($_GET['lon1']);
|
||||
if($_GET['radius']) {
|
||||
$radius = pg_escape_string($_GET['radius']);
|
||||
$rad = $radius / 70;
|
||||
|
||||
$lat1 = floatval($lat1);
|
||||
$lon1 = floatval($lon1);
|
||||
$radius = floatval($rad);
|
||||
$query = "select * from cams where method = 'rtsp' and active = true and cwa = 'RLX' and (EXTRACT(EPOCH FROM (current_timestamp - lastsuccess ))/60) < (interval + 20) and st_dwithin(geom, ST_SetSRID(ST_Point(" . strval($lon1) . ", " . strval($lat1) . "), 4326)," . strval($radius) . ") order by elevation desc";
|
||||
|
||||
|
||||
|
||||
$result = pg_query($dbconn,$query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
pg_close($dbconn);
|
||||
|
||||
|
||||
|
||||
}}}}
|
||||
|
||||
|
||||
|
||||
if (isset($_GET['camdb'])) {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT COUNT(*) FROM camdb") or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
pg_close($dbconn);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
?>
|
||||
43
camcircle.php
Normal file
43
camcircle.php
Normal file
@@ -0,0 +1,43 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
// Performing SQL query
|
||||
//$query = "SELECT distinct on (camid) camid, filepath FROM camdb order by camid,dateutc desc";
|
||||
//$query = "SELECT * FROM cams WHERE active <> false AND lastsuccess IS NOT NULL AND (EXTRACT(EPOCH FROM (current_timestamp - lastsuccess ))/60) < (interval + 20) order by elevation desc";
|
||||
//$query = "SELECT cwa,lat,lon,lastimage,county,elevation,camid,state,description FROM cams WHERE active <> false AND lastsuccess IS NOT NULL AND (EXTRACT(EPOCH FROM (current_timestamp - lastsuccess ))/60) < (interval + 20) order by elevation desc";
|
||||
//$query = "SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(WITH subquery_points AS (SELECT geom AS point_geometry FROM cams WHERE active = true and totalfails < 50) SELECT ST_Buffer(point_geometry::geography, 8000) AS circle_geometry FROM subquery_points)))";
|
||||
//"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and active = true"
|
||||
$query = "WITH subquery_points AS (
|
||||
SELECT geom AS point_geometry
|
||||
FROM cams
|
||||
WHERE active = true and lastsuccess IS NOT NULL AND (EXTRACT(EPOCH FROM (current_timestamp - lastsuccess ))/60) < 60
|
||||
)
|
||||
SELECT jsonb_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', jsonb_agg(jsonb_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(ST_Buffer(point_geometry::geography, 8000))::jsonb
|
||||
))
|
||||
) AS feature_collection
|
||||
FROM subquery_points";
|
||||
|
||||
|
||||
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Printing results in HTML
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
print_r($array[0]['feature_collection']);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Closing connection
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
56
camlist.php
Normal file
56
camlist.php
Normal file
@@ -0,0 +1,56 @@
|
||||
<?php
|
||||
header('Content-Type: application/json; charset=utf-8');
|
||||
|
||||
// Initialize response array
|
||||
$response = [];
|
||||
|
||||
// Database connection
|
||||
try {
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws");
|
||||
if (!$dbconn) {
|
||||
throw new Exception('Could not connect to database');
|
||||
}
|
||||
|
||||
// Performing SQL query
|
||||
$query = "SELECT url, lat, lon, elevation, county, state, active, aspect, bloomsky, source, method FROM cams where active = true";
|
||||
$result = pg_query($dbconn, $query);
|
||||
|
||||
if (!$result) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
// Fetch results
|
||||
$data = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
// Ensure numeric values are properly typed
|
||||
$line['lat'] = floatval($line['lat']);
|
||||
$line['lon'] = floatval($line['lon']);
|
||||
$line['elevation'] = floatval($line['elevation']);
|
||||
$line['active'] = $line['active'] === 't' ? true : false; // Convert PostgreSQL boolean
|
||||
|
||||
$data[] = $line;
|
||||
}
|
||||
|
||||
$response = [
|
||||
'status' => 'success',
|
||||
'data' => $data,
|
||||
'count' => count($data)
|
||||
];
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Close connection
|
||||
pg_close($dbconn);
|
||||
|
||||
} catch (Exception $e) {
|
||||
http_response_code(500);
|
||||
$response = [
|
||||
'status' => 'error',
|
||||
'message' => $e->getMessage()
|
||||
];
|
||||
}
|
||||
|
||||
// Output JSON
|
||||
echo json_encode($response, JSON_PRETTY_PRINT | JSON_NUMERIC_CHECK);
|
||||
?>
|
||||
384
camobs.html
Normal file
384
camobs.html
Normal file
@@ -0,0 +1,384 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>RLX Weather Camera List</title>
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
|
||||
<meta content="text/html;charset=utf-8" http-equiv="Content-Type">
|
||||
<meta content="utf-8" http-equiv="encoding">
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div id ="container" class="mosaic">
|
||||
</div>
|
||||
|
||||
<style type="text/css">
|
||||
body {
|
||||
margin-left: 0;
|
||||
margin-right:0;
|
||||
|
||||
|
||||
}
|
||||
|
||||
.mosaic {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fill, minmax(50%, 100%));
|
||||
grid-gap: 0px;
|
||||
}
|
||||
|
||||
.image {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.image img {
|
||||
display: block;
|
||||
width: 100%;
|
||||
height: auto;
|
||||
passing: 0px 0px;
|
||||
}
|
||||
|
||||
.label {
|
||||
position: absolute;
|
||||
top: 0px;
|
||||
left: 0px;
|
||||
background-color: rgba(0, 0, 0, 0.5);
|
||||
color: #fff;
|
||||
padding: 0px 0px;
|
||||
font-size: 25px;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
</style>
|
||||
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
function sleep(ms = 0) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
var querytype = getUrlVars()["type"];
|
||||
var lat1 = getUrlVars()["lat1"];
|
||||
var lon1 = getUrlVars()["lon1"];
|
||||
var lat2 = getUrlVars()["lat2"];
|
||||
var lon2 = getUrlVars()["lon2"];
|
||||
var radius = getUrlVars()["radius"];
|
||||
var elevbottom = getUrlVars()["elevbottom"];
|
||||
var elevtop = getUrlVars()["elevtop"];
|
||||
|
||||
if (querytype == "bbox") {
|
||||
var camurl = "camobs.php?camstatic=bbox&lat1=" + lat1 + "&lon1=" + lon1 + "&lat2=" + lat2 + "&lon2=" + lon2 + "&elevbottom=" + elevbottom + "&elevtop=" + elevtop;
|
||||
}
|
||||
|
||||
if (querytype == "radius") {
|
||||
var camurl = "camobs.php?camstatic=radius&lat1=" + lat1 + "&lon1=" + lon1 + "&radius=" + radius;
|
||||
}
|
||||
|
||||
console.log(camurl);
|
||||
|
||||
|
||||
function getUrlVars() {
|
||||
var vars = {};
|
||||
var parts = window.location.href.replace(/[?&]+([^=&]+)=([^&]*)/gi, function(m,key,value) {
|
||||
vars[key] = value;
|
||||
});
|
||||
return vars;
|
||||
}
|
||||
|
||||
|
||||
camimages = 20;
|
||||
function setcams() {
|
||||
camimages = document.getElementById("numberofimages").value;
|
||||
refreshcams();
|
||||
}
|
||||
|
||||
var showcams = [];
|
||||
|
||||
|
||||
function filter() {
|
||||
var showcams = [];
|
||||
var x = document.getElementById("filters");
|
||||
options = x.getElementsByTagName('option');
|
||||
var y = document.getElementById("cwa").checked;
|
||||
values = [];
|
||||
for (var i=options.length; i--;) {
|
||||
if (options[i].selected) values.push(options[i].value)
|
||||
}
|
||||
|
||||
|
||||
for(var k in allcams) {
|
||||
for(var l in values) {
|
||||
|
||||
|
||||
if (allcams[k].state == values[l]) {
|
||||
if (!y) {
|
||||
showcams.push(allcams[k].camid);
|
||||
}
|
||||
if (y && 'RLX' == allcams[k].cwa) {
|
||||
showcams.push(allcams[k].camid);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
document.getElementById(allcams[k].camid).style.display = 'none';
|
||||
for(var o in showcams) {
|
||||
document.getElementById(showcams[o]).style.display = '';
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
function sortcams() {
|
||||
var sortcams = allcams;
|
||||
var x = document.getElementById("sort");
|
||||
options = x.getElementsByTagName('option');
|
||||
values = [];
|
||||
for (var i=options.length; i--;) {
|
||||
|
||||
if (options[i].selected) {
|
||||
switch(options[i].value) {
|
||||
case "elevd":
|
||||
sortcams.sort(function(a,b) {
|
||||
return b.elevation-a.elevation
|
||||
|
||||
|
||||
|
||||
})
|
||||
|
||||
|
||||
break;
|
||||
case "eleva":
|
||||
sortcams.sort(function(a,b) {
|
||||
return a.elevation-b.elevation
|
||||
|
||||
|
||||
|
||||
})
|
||||
|
||||
|
||||
|
||||
// code block
|
||||
break;
|
||||
case "westeast":
|
||||
sortcams.sort(function(a,b) {
|
||||
if (a.lon > b.lon) {
|
||||
return -1;
|
||||
}
|
||||
if (b.lon > a.lon) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
|
||||
|
||||
|
||||
})
|
||||
break;
|
||||
case "northsouth":
|
||||
sortcams.sort(function(a,b) {
|
||||
if (a.lat> b.lat) {
|
||||
return -1;
|
||||
}
|
||||
if (b.lat > a.lat) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
|
||||
|
||||
|
||||
})
|
||||
break;
|
||||
case "countyza":
|
||||
sortcams.sort(function(a,b) {
|
||||
if (a.county > b.county) {
|
||||
return -1;
|
||||
}
|
||||
if (b.county > a.county) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
|
||||
|
||||
|
||||
})
|
||||
break;
|
||||
case "countyaz":
|
||||
sortcams.sort(function(a,b) {
|
||||
if (a.county > b.county) {
|
||||
return 1;
|
||||
}
|
||||
if (b.county > a.county) {
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
|
||||
|
||||
|
||||
})
|
||||
break;
|
||||
default:
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
for (var ii in sortcams) {
|
||||
console.log(sortcams[ii].camid);
|
||||
//var xx = document.getElementsByTagName("Figure");
|
||||
var x = document.getElementById(sortcams[ii].camid);
|
||||
x.style.order=ii
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
var allcams = [];
|
||||
|
||||
|
||||
|
||||
|
||||
$.getJSON(camurl, function(data){
|
||||
console.log(data);
|
||||
allcams = data;
|
||||
for(var i in data){
|
||||
var div = document.createElement('div');
|
||||
var a = document.createElement('a');
|
||||
div.className = "image";
|
||||
div.id = data[i].camid;
|
||||
var img = document.createElement('img');
|
||||
img.src = 'camdata/' + data[i].lastimage;
|
||||
a.href = 'one.php?camid=' + data[i].camid + '&camimages=' + camimages;
|
||||
a.appendChild(img);
|
||||
div.appendChild(a);
|
||||
//div.appendChild(img);
|
||||
var span = document.createElement('span');
|
||||
span.className = 'label';
|
||||
span.innerHTML = data[i].description + ' / ' + data[i].county + ' ' + data[i].state + ' / ' + data[i].elevation + ' ft';
|
||||
div.appendChild(span);
|
||||
document.getElementById('container').appendChild(div);
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
|
||||
function refreshcams() {
|
||||
|
||||
|
||||
$.getJSON(camurl, function(data){
|
||||
console.log(data);
|
||||
for(var i in data){
|
||||
|
||||
var updatefigure = document.getElementById(data[i].camid);
|
||||
if(updatefigure.hasChildNodes()) {
|
||||
var children = updatefigure.childNodes;
|
||||
for (var m = 0; m < children.length; m++) {
|
||||
children[m].href = 'one.php?camid=' + data[i].camid + '&camimages=' + camimages;
|
||||
if (children[m].nodeName == 'A') {
|
||||
children[m].firstElementChild.src = 'camdata/' + data[i].lastimage;
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
|
||||
const main = $('html');
|
||||
|
||||
async function scrollTopQuick() {
|
||||
$(window).scrollTop(0);
|
||||
scrollDownVH();
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
function scrollBottom() {
|
||||
console.log('scrolling to bottom')
|
||||
main.animate({scrollTop: document.body.offsetHeight},25000,"linear",scrollTopQuick).delay(2000)
|
||||
}
|
||||
|
||||
// this kicks it off
|
||||
// again only running $(document).ready once to increase performance.
|
||||
// Once scrollTop completes, it calls scrollBottom, which in turn calls scrollTop and so on
|
||||
|
||||
|
||||
async function scrollDownVH() {
|
||||
await sleep(3000);
|
||||
vh = $(window).height()
|
||||
bodyh = document.body.offsetHeight
|
||||
console.log(bodyh, window.innerHeight + window.scrollY + vh,vh);
|
||||
currentx = main.scrollTop()
|
||||
if (bodyh > (window.innerHeight + window.scrollY + vh )) {
|
||||
main.animate({scrollTop: currentx + vh},8000,"linear",scrollDownVH)
|
||||
console.log('scrolling down');
|
||||
} else {
|
||||
main.animate({scrollTop: currentx + vh},8000,"linear",scrollTopQuick)
|
||||
console.log('scrolling down then up');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
$(document).ready(scrollDownVH);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
window.setInterval(function(){
|
||||
refreshcams()
|
||||
}, 30000);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
function googleMap(lat, lon){
|
||||
return "http://maps.google.com/maps?t=k&q=loc:" + lat + "+" + lon + "&basemap=satellite";
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
</script>
|
||||
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
88
camobs.php
Normal file
88
camobs.php
Normal file
@@ -0,0 +1,88 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
|
||||
//select camid,url,description from cams where cwa = 'RLX' and method = 'rtsp' and active = true AND (EXTRACT(EPOCH FROM (current_timestamp - lastsuccess ))/60) < (interval + 20) and lat > %s and lat < %s and lon > %s and lon < %s order by lastsuccess desc limit 25
|
||||
|
||||
|
||||
//no gets, curent point outage info
|
||||
|
||||
//error_reporting(-1); // reports all errors
|
||||
//ini_set("display_errors", "1"); // shows all errors
|
||||
//ini_set("log_errors", 1);
|
||||
//ini_set("error_log", "php-error.log");
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if($_GET['camstatic']) {
|
||||
if ($_GET['camstatic'] == 'radius') {
|
||||
|
||||
|
||||
if($_GET['lat1']) {
|
||||
$lat1 = pg_escape_string($_GET['lat1']);
|
||||
if($_GET['lon1']) {
|
||||
$lon1 = pg_escape_string($_GET['lon1']);
|
||||
if($_GET['radius']) {
|
||||
$radius = pg_escape_string($_GET['radius']);
|
||||
$rad = $radius / 70;
|
||||
|
||||
$lat1 = floatval($lat1);
|
||||
$lon1 = floatval($lon1);
|
||||
$radius = floatval($rad);
|
||||
$query = "select * from cams where active = true and cwa = 'RLX' and (EXTRACT(EPOCH FROM (current_timestamp - lastsuccess ))/60) < (interval + 20) and st_dwithin(geom, ST_SetSRID(ST_Point(" . strval($lon1) . ", " . strval($lat1) . "), 4326)," . strval($radius) . ") order by elevation desc";
|
||||
$result = pg_query($dbconn,$query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
pg_close($dbconn);
|
||||
}}}}}
|
||||
|
||||
if($_GET['camstatic']) {
|
||||
if ($_GET['camstatic'] == 'bbox') {
|
||||
if($_GET['lat1']) {
|
||||
$lat1 = pg_escape_string($_GET['lat1']);
|
||||
if($_GET['lon1']) {
|
||||
$lon1 = pg_escape_string($_GET['lon1']);
|
||||
if($_GET['lat2']) {
|
||||
$lat2 = pg_escape_string($_GET['lat2']);
|
||||
if($_GET['lon2']) {
|
||||
$lon2 = pg_escape_string($_GET['lon2']);
|
||||
if($_GET['elevbottom']) {
|
||||
$elevbottom = pg_escape_string($_GET['elevbottom']);
|
||||
if($_GET['elevtop']) {
|
||||
$elevtop = pg_escape_string($_GET['elevtop']);
|
||||
|
||||
//$lat1 = floatval($lat1);
|
||||
//$lon1 = floatval($lon1);
|
||||
//$lat2 = floatval($lat2);
|
||||
//$lon2 = floatval($lon2);
|
||||
//$elevbottom = floatval($elevbottom);
|
||||
//$elevtop = floatval($elevtop);
|
||||
//echo($lat1,$lat2,$lon1,$lon2,$elevbottom,$elevtop);
|
||||
$result = pg_query_params($dbconn,
|
||||
"select * from cams where active = true and cwa = 'RLX' and elevation > $5 and elevation < $6 and (EXTRACT(EPOCH FROM (current_timestamp - lastsuccess ))/60) < (interval + 20) and lat < $1 and lat > $2 and lon < $3 and lon > $4 order by elevation desc",
|
||||
array($lat1,$lat2,$lon1,$lon2,$elevbottom,$elevtop)) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
pg_close($dbconn);
|
||||
}}}}}}}}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
?>
|
||||
585
cams.html
Normal file
585
cams.html
Normal file
@@ -0,0 +1,585 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>RLX Weather Camera List</title>
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
|
||||
<style>
|
||||
body {
|
||||
font-family: Arial, sans-serif;
|
||||
margin: 0;
|
||||
background-color: #f0f0f0;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
#menu {
|
||||
background-color: #003366;
|
||||
color: white;
|
||||
width: 180px;
|
||||
height: 100%;
|
||||
position: fixed;
|
||||
padding: 5px;
|
||||
box-shadow: 2px 0 5px rgba(0,0,0,0.3);
|
||||
transition: width 0.3s;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
||||
#menu.collapsed {
|
||||
width: 40px;
|
||||
}
|
||||
|
||||
#menu.collapsed .menu-content {
|
||||
display: none;
|
||||
}
|
||||
|
||||
#toggle-menu {
|
||||
cursor: pointer;
|
||||
padding: 8px;
|
||||
margin-bottom: 10px;
|
||||
text-align: center;
|
||||
background-color: #002244;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
#menu a {
|
||||
color: white;
|
||||
text-decoration: none;
|
||||
display: block;
|
||||
margin: 8px 0;
|
||||
padding: 8px;
|
||||
transition: background 0.3s;
|
||||
}
|
||||
|
||||
#menu a:hover {
|
||||
background-color: #005599;
|
||||
}
|
||||
|
||||
#container {
|
||||
margin-left: 180px; /* Adds space for the menu */
|
||||
padding: 12px;
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fit, minmax(275px, 1fr));
|
||||
gap: 5px;
|
||||
transition: margin-left 0.3s;
|
||||
}
|
||||
|
||||
#container.expanded {
|
||||
margin-left: 180px;
|
||||
}
|
||||
|
||||
#container.collapsed {
|
||||
margin-left: 40px;
|
||||
}
|
||||
|
||||
.image {
|
||||
position: relative;
|
||||
background-color: #fff;
|
||||
border-radius: 5px;
|
||||
overflow: hidden;
|
||||
box-shadow: 0 2px 5px rgba(0,0,0,0.2);
|
||||
transition: transform 0.3s;
|
||||
}
|
||||
|
||||
.image:hover {
|
||||
transform: scale(1.04);
|
||||
z-index:9999;
|
||||
}
|
||||
|
||||
.image img {
|
||||
width: 100%;
|
||||
height: auto;
|
||||
display: block;
|
||||
}
|
||||
|
||||
.label {
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
background-color: rgba(0, 0, 0, 0.7);
|
||||
color: #fff;
|
||||
padding: 5px 10px;
|
||||
font-size: 12px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
select, input[type=number] {
|
||||
width: calc(100% - 20px);
|
||||
padding: 8px 10px;
|
||||
margin: 10px 0;
|
||||
border: 1px solid #ccc;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
label {
|
||||
margin: 10px 0 5px;
|
||||
display: block;
|
||||
}
|
||||
|
||||
input[type="checkbox"] {
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="menu">
|
||||
<div id="toggle-menu" onclick="toggleMenu()">☰</div>
|
||||
<div class="menu-content">
|
||||
<h2>Menu</h2>
|
||||
<a href="map.html">Map</a>
|
||||
<a href="db.html">Wunderground obs</a>
|
||||
<a href="5min.html">5m ASOS obs</a>
|
||||
<a href="outage.html">Power Outages</a>
|
||||
<a href="https://docs.google.com/forms/d/1-2rTBkNyyBVe08G1vN1hcSOEOvvLUcS1Vs2SmmaudlU" target="_blank">Questions / Comments</a>
|
||||
|
||||
<label for="filters">Filters:</label>
|
||||
<label for="states">States:</label>
|
||||
<select id="filters" name="filters" onchange="filter()" multiple>
|
||||
<option selected="selected" value="WV">WV</option>
|
||||
<option selected="selected" value="KY">KY</option>
|
||||
<option selected="selected" value="VA">VA</option>
|
||||
<option selected="selected" value="OH">OH</option>
|
||||
</select>
|
||||
|
||||
<label for="cwa">
|
||||
<input type="checkbox" id="cwa" name="cwa" value="RLX" checked onchange="filter()"> RLX only
|
||||
</label>
|
||||
|
||||
<label>Misc Filters:</label>
|
||||
<label for="hydro">
|
||||
<input type="checkbox" id="hydro" name="hydro" value="true" onchange="filter()"> Hydro
|
||||
</label>
|
||||
<label for="airport">
|
||||
<input type="checkbox" id="airport" name="airport" value="true" onchange="filter()"> Airports
|
||||
</label>
|
||||
|
||||
<label for="sort">Sorting:</label>
|
||||
<select id="sort" name="sort" onchange="sortcams()">
|
||||
<option selected="selected" value="elevd">Elev Descending</option>
|
||||
<option value="eleva">Elev Ascending</option>
|
||||
<option value="countyaz">County A-Z</option>
|
||||
<option value="countyza">County Z-A</option>
|
||||
<option value="westeast">West to East</option>
|
||||
<option value="northsouth">North to South</option>
|
||||
</select>
|
||||
|
||||
<label for="numberofimages">Frames for Loops</label>
|
||||
<input type="number" id="numberofimages" name="numberofimages" value="20" onchange="setcams()">
|
||||
|
||||
<p>Archive: 240 hours</p>
|
||||
<p id="camcount"></p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="container">
|
||||
<!-- Dynamic content will be populated here -->
|
||||
</div>
|
||||
|
||||
<script>
|
||||
// Function to set a cookie
|
||||
function setCookie(name, value, days) {
|
||||
var expires = "";
|
||||
if (days) {
|
||||
var date = new Date();
|
||||
date.setTime(date.getTime() + (days * 24 * 60 * 60 * 1000));
|
||||
expires = "; expires=" + date.toUTCString();
|
||||
}
|
||||
document.cookie = name + "=" + (value || "") + expires + "; path=/";
|
||||
}
|
||||
|
||||
// Function to get a cookie
|
||||
function getCookie(name) {
|
||||
var nameEQ = name + "=";
|
||||
var ca = document.cookie.split(';');
|
||||
for(var i=0;i < ca.length;i++) {
|
||||
var c = ca[i];
|
||||
while (c.charAt(0)==' ') c = c.substring(1,c.length);
|
||||
if (c.indexOf(nameEQ) == 0) return c.substring(nameEQ.length,c.length);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// Toggle menu function
|
||||
function toggleMenu() {
|
||||
var menu = document.getElementById('menu');
|
||||
var container = document.getElementById('container');
|
||||
if (menu.classList.contains('collapsed')) {
|
||||
menu.classList.remove('collapsed');
|
||||
container.classList.remove('collapsed');
|
||||
container.classList.add('expanded');
|
||||
setCookie('menuCollapsed', 'false', 365);
|
||||
} else {
|
||||
menu.classList.add('collapsed');
|
||||
container.classList.remove('expanded');
|
||||
container.classList.add('collapsed');
|
||||
setCookie('menuCollapsed', 'true', 365);
|
||||
}
|
||||
}
|
||||
|
||||
// Load settings from cookies
|
||||
function loadSettings() {
|
||||
// Load menu state
|
||||
var menuCollapsed = getCookie('menuCollapsed');
|
||||
if (menuCollapsed === 'true') {
|
||||
document.getElementById('menu').classList.add('collapsed');
|
||||
document.getElementById('container').classList.add('collapsed');
|
||||
document.getElementById('container').classList.remove('expanded');
|
||||
} else {
|
||||
document.getElementById('menu').classList.remove('collapsed');
|
||||
document.getElementById('container').classList.add('expanded');
|
||||
document.getElementById('container').classList.remove('collapsed');
|
||||
}
|
||||
|
||||
// Load number of images
|
||||
var savedImages = getCookie('numberofimages');
|
||||
if (savedImages) {
|
||||
document.getElementById('numberofimages').value = savedImages;
|
||||
camimages = savedImages;
|
||||
} else {
|
||||
camimages = 20;
|
||||
}
|
||||
|
||||
// Load states filter
|
||||
var savedStates = getCookie('statesFilter');
|
||||
if (savedStates) {
|
||||
var statesArray = JSON.parse(savedStates);
|
||||
var statesSelect = document.getElementById('filters');
|
||||
for (var i = 0; i < statesSelect.options.length; i++) {
|
||||
statesSelect.options[i].selected = statesArray.includes(statesSelect.options[i].value);
|
||||
}
|
||||
}
|
||||
|
||||
// Load RLX checkbox
|
||||
var savedRLX = getCookie('rLXOnly');
|
||||
if (savedRLX !== null) {
|
||||
document.getElementById('cwa').checked = (savedRLX === 'true');
|
||||
}
|
||||
|
||||
// Load hydro checkbox
|
||||
var savedHydro = getCookie('hydroFilter');
|
||||
if (savedHydro !== null) {
|
||||
document.getElementById('hydro').checked = (savedHydro === 'true');
|
||||
}
|
||||
|
||||
// Load airport checkbox
|
||||
var savedAirport = getCookie('airportFilter');
|
||||
if (savedAirport !== null) {
|
||||
document.getElementById('airport').checked = (savedAirport === 'true');
|
||||
}
|
||||
|
||||
// Load sort option
|
||||
var savedSort = getCookie('sortOption');
|
||||
if (savedSort) {
|
||||
document.getElementById('sort').value = savedSort;
|
||||
}
|
||||
}
|
||||
|
||||
// Save settings to cookies
|
||||
function saveSettings() {
|
||||
// Save number of images
|
||||
setCookie('numberofimages', document.getElementById('numberofimages').value, 365);
|
||||
|
||||
// Save states filter
|
||||
var statesSelect = document.getElementById('filters');
|
||||
var selectedStates = [];
|
||||
for (var i = 0; i < statesSelect.options.length; i++) {
|
||||
if (statesSelect.options[i].selected) {
|
||||
selectedStates.push(statesSelect.options[i].value);
|
||||
}
|
||||
}
|
||||
setCookie('statesFilter', JSON.stringify(selectedStates), 365);
|
||||
|
||||
// Save RLX checkbox
|
||||
setCookie('rLXOnly', document.getElementById('cwa').checked, 365);
|
||||
|
||||
// Save hydro checkbox
|
||||
setCookie('hydroFilter', document.getElementById('hydro').checked, 365);
|
||||
|
||||
// Save airport checkbox
|
||||
setCookie('airportFilter', document.getElementById('airport').checked, 365);
|
||||
|
||||
// Save sort option
|
||||
setCookie('sortOption', document.getElementById('sort').value, 365);
|
||||
}
|
||||
|
||||
camimages = 20;
|
||||
function setcams() {
|
||||
camimages = document.getElementById("numberofimages").value;
|
||||
saveSettings(); // Save when changed
|
||||
refreshcams();
|
||||
}
|
||||
|
||||
var showcams = [];
|
||||
|
||||
function filter() {
|
||||
saveSettings(); // Save when filter changes
|
||||
showcams = [];
|
||||
var x = document.getElementById("filters");
|
||||
options = x.getElementsByTagName('option');
|
||||
var y = document.getElementById("cwa").checked;
|
||||
var hydroFilter = document.getElementById("hydro").checked;
|
||||
var airportFilter = document.getElementById("airport").checked;
|
||||
values = [];
|
||||
for (var i=options.length; i--;) {
|
||||
if (options[i].selected) values.push(options[i].value)
|
||||
}
|
||||
|
||||
console.log('Hydro filter checked:', hydroFilter); // Debug
|
||||
console.log('Airport filter checked:', airportFilter); // Debug
|
||||
for(var k in allcams) {
|
||||
// Check state filter
|
||||
var stateMatch = false;
|
||||
for(var l in values) {
|
||||
if (allcams[k].state == values[l]) {
|
||||
stateMatch = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Check RLX filter
|
||||
var rlxMatch = true;
|
||||
if (y) {
|
||||
rlxMatch = ('RLX' == allcams[k].cwa);
|
||||
}
|
||||
|
||||
// Check hydro filter
|
||||
var hydroMatch = true;
|
||||
if (hydroFilter) {
|
||||
// Only show if hydro is true when filter is selected
|
||||
console.log('Camera:', allcams[k].camid, 'hydro value:', allcams[k].hydro); // Debug
|
||||
// Handle cases where hydro might be undefined or not present
|
||||
// If hydro field doesn't exist or is not true, don't show
|
||||
hydroMatch = (allcams[k].hasOwnProperty('hydro') && allcams[k].hydro === true);
|
||||
} else {
|
||||
// When hydro filter is not selected, show all (both true and false)
|
||||
hydroMatch = true;
|
||||
}
|
||||
|
||||
// Check airport filter
|
||||
var airportMatch = true;
|
||||
if (airportFilter) {
|
||||
// Only show if airport is true when filter is selected
|
||||
console.log('Camera:', allcams[k].camid, 'airport value:', allcams[k].airport); // Debug
|
||||
// Handle cases where airport might be undefined or not present
|
||||
// If airport field doesn't exist or is not true, don't show
|
||||
airportMatch = (allcams[k].hasOwnProperty('airport') && allcams[k].airport === true);
|
||||
} else {
|
||||
// When airport filter is not selected, show all (both true and false)
|
||||
airportMatch = true;
|
||||
}
|
||||
|
||||
if (stateMatch && rlxMatch && hydroMatch && airportMatch) {
|
||||
showcams.push(allcams[k].camid);
|
||||
}
|
||||
|
||||
// Hide all first
|
||||
document.getElementById(allcams[k].camid).style.display = 'none';
|
||||
}
|
||||
|
||||
// Show matching cams
|
||||
for(var o in showcams) {
|
||||
document.getElementById(showcams[o]).style.display = '';
|
||||
}
|
||||
console.log('Number of cams to show:', showcams.length); // Debug
|
||||
|
||||
// Show a message if no cameras match
|
||||
if (showcams.length === 0) {
|
||||
console.log('No cameras match the current filters');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
function sortcams() {
|
||||
saveSettings(); // Save when sort changes
|
||||
var sortcams = allcams;
|
||||
var x = document.getElementById("sort");
|
||||
options = x.getElementsByTagName('option');
|
||||
values = [];
|
||||
for (var i=options.length; i--;) {
|
||||
|
||||
if (options[i].selected) {
|
||||
switch(options[i].value) {
|
||||
case "elevd":
|
||||
sortcams.sort(function(a,b) {
|
||||
return b.elevation-a.elevation
|
||||
|
||||
|
||||
|
||||
})
|
||||
|
||||
|
||||
break;
|
||||
case "eleva":
|
||||
sortcams.sort(function(a,b) {
|
||||
return a.elevation-b.elevation
|
||||
|
||||
|
||||
|
||||
})
|
||||
|
||||
|
||||
|
||||
// code block
|
||||
break;
|
||||
case "westeast":
|
||||
sortcams.sort(function(a,b) {
|
||||
if (a.lon > b.lon) {
|
||||
return -1;
|
||||
}
|
||||
if (b.lon > a.lon) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
|
||||
|
||||
|
||||
})
|
||||
break;
|
||||
case "northsouth":
|
||||
sortcams.sort(function(a,b) {
|
||||
if (a.lat> b.lat) {
|
||||
return -1;
|
||||
}
|
||||
if (b.lat > a.lat) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
|
||||
|
||||
|
||||
})
|
||||
break;
|
||||
case "countyza":
|
||||
sortcams.sort(function(a,b) {
|
||||
if (a.county > b.county) {
|
||||
return -1;
|
||||
}
|
||||
if (b.county > a.county) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
|
||||
|
||||
|
||||
})
|
||||
break;
|
||||
case "countyaz":
|
||||
sortcams.sort(function(a,b) {
|
||||
if (a.county > b.county) {
|
||||
return 1;
|
||||
}
|
||||
if (b.county > a.county) {
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
|
||||
|
||||
|
||||
})
|
||||
break;
|
||||
default:
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
for (var ii in sortcams) {
|
||||
//console.log(sortcams[ii].camid);
|
||||
//var xx = document.getElementsByTagName("Figure");
|
||||
var x = document.getElementById(sortcams[ii].camid);
|
||||
x.style.order=ii
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
// Load settings when the page loads
|
||||
window.onload = function() {
|
||||
loadSettings();
|
||||
$.getJSON('cam.php', function(data) {
|
||||
var wild = Math.random();
|
||||
allcams = data;
|
||||
console.log('Camera data:', data); // Debug: log camera data
|
||||
|
||||
// Check if hydro field exists in any camera
|
||||
var hasHydroField = false;
|
||||
for (var i in data) {
|
||||
if (data[i].hasOwnProperty('hydro')) {
|
||||
hasHydroField = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
console.log('Does camera data have hydro field?', hasHydroField);
|
||||
|
||||
for (var i in data) {
|
||||
var div = document.createElement('div');
|
||||
var a = document.createElement('a');
|
||||
div.className = "image";
|
||||
div.id = data[i].camid;
|
||||
var img = document.createElement('img');
|
||||
img.src = 'camdata/' + data[i].camid + "/latest.jpg" //?" + wild;
|
||||
a.href = 'one.php?camid=' + data[i].camid + '&camimages=' + camimages;
|
||||
a.target = "_blank";
|
||||
a.appendChild(img);
|
||||
div.appendChild(a);
|
||||
var span = document.createElement('span');
|
||||
span.className = 'label';
|
||||
span.innerHTML = data[i].description + ' / ' + data[i].county + ' ' + data[i].state + ' / ' + data[i].elevation + ' ft';
|
||||
div.appendChild(span);
|
||||
document.getElementById('container').appendChild(div);
|
||||
}
|
||||
filter();
|
||||
sortcams(); // Apply saved sort
|
||||
$.getJSON('camapi.php?camdb', function(data){
|
||||
document.getElementById("camcount").textContent = "Total Images: " + data[0].count;
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
function refreshcams() {
|
||||
|
||||
|
||||
$.getJSON('cam.php', function(data){
|
||||
var wild = Math.random();
|
||||
//console.log(data);
|
||||
for(var i in data){
|
||||
|
||||
var updatefigure = document.getElementById(data[i].camid);
|
||||
if (updatefigure != null) {
|
||||
if(updatefigure.hasChildNodes()) {
|
||||
var children = updatefigure.childNodes;
|
||||
for (var m = 0; m < children.length; m++) {
|
||||
children[m].href = 'one.php?camid=' + data[i].camid + '&camimages=' + camimages;
|
||||
if (children[m].nodeName == 'A') {
|
||||
children[m].firstElementChild.src = 'camdata/' + data[i].camid + "/latest.jpg"//?" + wild
|
||||
//children[m].firstElementChild.src = 'camdata/' + data[i].lastimage;
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}}
|
||||
});
|
||||
$.getJSON('camapi.php?camdb', function(data){
|
||||
document.getElementById("camcount").textContent = "Total Images: " + data[0].count;
|
||||
});
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
window.setInterval(function() {
|
||||
refreshcams();
|
||||
}, 30000);
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
228
circlemap.html
Normal file
228
circlemap.html
Normal file
@@ -0,0 +1,228 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>RLX Camera Coverage</title>
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
|
||||
<script src="https://code.jquery.com/ui/1.13.1/jquery-ui.js" integrity="sha256-6XMVI0zB8cRzfZjqKcD01PBsAy3FlDASrlC8SxCpInY=" crossorigin="anonymous"></script>
|
||||
<link rel="stylesheet" href="https://code.jquery.com/ui/1.13.1/themes/smoothness/jquery-ui.css">
|
||||
<link rel="stylesheet" href="https://www.w3schools.com/w3css/4/w3.css">
|
||||
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.7.1/dist/leaflet.css">
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/chroma-js/2.4.2/chroma.min.js" integrity="sha512-zInFF17qBFVvvvFpIfeBzo7Tj7+rQxLeTJDmbxjBz5/zIr89YVbTNelNhdTT+/DCrxoVzBeUPVFJsczKbB7sew==" crossorigin="anonymous" referrerpolicy="no-referrer"></script>
|
||||
<script src="https://unpkg.com/leaflet@1.7.1/dist/leaflet.js" integrity="sha512-XQoYMqMTK8LvdxXYG3nZ448hOEQiglfqkJs1NOQV44cWnUrBc8PkAOcXy20w0vlaXaVUearIOBhiXZ5V3ynxwA==" crossorigin=""></script>
|
||||
|
||||
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div class="box">
|
||||
|
||||
<div id="map">
|
||||
</div>
|
||||
</div>
|
||||
<div id = "slider-2"></div>
|
||||
<div class="box2">
|
||||
<div id="changelog">
|
||||
</div>
|
||||
|
||||
</div>
|
||||
<style type="text/css">
|
||||
//#mapid { height: 800px; }
|
||||
body {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
html, body {
|
||||
height: 100%;
|
||||
width: 100%
|
||||
|
||||
}
|
||||
#mapid {
|
||||
height: 100%;
|
||||
background: none !important;
|
||||
|
||||
}
|
||||
#bottombar {
|
||||
// height: 98%;
|
||||
}
|
||||
|
||||
.box{
|
||||
position: absolute;
|
||||
top: 225px;
|
||||
z-index: 9999;
|
||||
text-align: center;
|
||||
width: 250px;
|
||||
left: 10%;
|
||||
margin-left: -75px; /* half of the width */
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
.box2{
|
||||
position: absolute;
|
||||
top: 500px;
|
||||
z-index: 9999;
|
||||
text-align: center;
|
||||
width: 250px;
|
||||
left: 10%;
|
||||
margin-left: -75px; /* half of the width */
|
||||
}
|
||||
|
||||
|
||||
|
||||
input[type=number] {
|
||||
|
||||
|
||||
width: 50px;
|
||||
}
|
||||
|
||||
input[type=text] {
|
||||
|
||||
|
||||
width: 150px;
|
||||
}
|
||||
.legend {
|
||||
line-height: 18px;
|
||||
color: #555;
|
||||
}
|
||||
.legend i {
|
||||
width: 15px;
|
||||
height: 15px;
|
||||
float: left;
|
||||
margin-right: 8px;
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
.info {
|
||||
padding: 6px 8px;
|
||||
font: 14px/16px Arial, Helvetica, sans-serif;
|
||||
background: white;
|
||||
background: rgba(255,255,255,0.8);
|
||||
box-shadow: 0 0 15px rgba(0,0,0,0.2);
|
||||
border-radius: 5px;
|
||||
}
|
||||
.info h4 {
|
||||
margin: 0 0 5px;
|
||||
color: #777;
|
||||
}
|
||||
|
||||
#slider-2 {
|
||||
position: absolute;
|
||||
left: 20px;
|
||||
top: 200px;
|
||||
width:300px;
|
||||
}
|
||||
|
||||
|
||||
|
||||
</style>
|
||||
|
||||
|
||||
<div id="mapid">
|
||||
</div>
|
||||
<script>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
var mymap = L.map('mapid', {zoomDelta: 0.25, zoomSnap: 0}).setView([38.508, -81.652480], 8.0);
|
||||
|
||||
|
||||
|
||||
var Esri_WorldStreetMap = L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Street_Map/MapServer/tile/{z}/{y}/{x}', {
|
||||
attribution: 'Tiles © Esri'
|
||||
});
|
||||
|
||||
var Esri_WorldImagery = L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', {
|
||||
attribution: 'Tiles © Esri — Source: Esri, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community'
|
||||
});
|
||||
|
||||
var Esri_WorldTopoMap = L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Topo_Map/MapServer/tile/{z}/{y}/{x}', {
|
||||
attribution: 'Tiles © Esri — Esri, DeLorme, NAVTEQ, TomTom, Intermap, iPC, USGS, FAO, NPS, NRCAN, GeoBase, Kadaster NL, Ordnance Survey, Esri Japan, METI, Esri China (Hong Kong), and the GIS User Community'
|
||||
});
|
||||
var USGS_USImageryTopo = L.tileLayer('https://basemap.nationalmap.gov/arcgis/rest/services/USGSImageryTopo/MapServer/tile/{z}/{y}/{x}', {
|
||||
maxZoom: 20,
|
||||
attribution: 'Tiles courtesy of the <a href="https://usgs.gov/">U.S. Geological Survey</a>'
|
||||
});
|
||||
|
||||
var TopPlusOpen_Grey = L.tileLayer('http://sgx.geodatenzentrum.de/wmts_topplus_open/tile/1.0.0/web_grau/default/WEBMERCATOR/{z}/{y}/{x}.png', {
|
||||
maxZoom: 18,
|
||||
attribution: 'Map data: © <a href="http://www.govdata.de/dl-de/by-2-0">dl-de/by-2-0</a>'
|
||||
});
|
||||
|
||||
|
||||
var baselayers = {
|
||||
"Esri Street Map": Esri_WorldStreetMap,
|
||||
"Esri Satellite": Esri_WorldImagery,
|
||||
"Esri Topo": Esri_WorldTopoMap,
|
||||
"Dense": TopPlusOpen_Grey,
|
||||
"USGS Sat/Topo": USGS_USImageryTopo
|
||||
}
|
||||
L.control.layers(baselayers,null,{collapsed: false}).addTo(mymap);
|
||||
|
||||
Esri_WorldStreetMap.addTo(mymap);
|
||||
|
||||
|
||||
|
||||
var geoJSONcounties = L.geoJSON(false, {
|
||||
style: function (feature) {
|
||||
return {
|
||||
weight: 3,
|
||||
opacity: 1,
|
||||
color: 'black',
|
||||
fillColor: 'navy',
|
||||
fillOpacity: 0
|
||||
};
|
||||
},
|
||||
}).addTo(mymap);
|
||||
|
||||
|
||||
$.getJSON('counties.json', function(data) {
|
||||
var geojsonFeature = data;
|
||||
geoJSONcounties.addData(geojsonFeature);
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
var geoJSONcircle = L.geoJSON(false, {
|
||||
style: function (feature) {
|
||||
return {
|
||||
weight: 3,
|
||||
opacity: 1,
|
||||
color: 'black',
|
||||
fillColor: 'navy',
|
||||
fillOpacity: .5
|
||||
};
|
||||
},
|
||||
}).addTo(mymap);
|
||||
|
||||
|
||||
$.getJSON('camcircle.php', function(data) {
|
||||
var geojsonFeature = data;
|
||||
geoJSONcircle.addData(geojsonFeature);
|
||||
|
||||
|
||||
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
224
coco.py
Normal file
224
coco.py
Normal file
@@ -0,0 +1,224 @@
|
||||
|
||||
|
||||
import datetime
|
||||
import requests
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||
import csv
|
||||
from urllib.parse import urljoin
|
||||
from urllib.parse import urlencode
|
||||
import json
|
||||
import pytz
|
||||
|
||||
from requests.auth import HTTPBasicAuth
|
||||
S = requests.Session()
|
||||
S.verify = False
|
||||
eastern = pytz.timezone('US/Eastern')
|
||||
|
||||
username = "rlx.ops@noaa.gov"
|
||||
password = "rlx25303!"
|
||||
today = datetime.date.today()
|
||||
currentday = datetime.datetime.now()
|
||||
|
||||
|
||||
y = today.year
|
||||
m = today.month
|
||||
d = today.day
|
||||
|
||||
currenttime = currentday.replace(hour=12,minute=0,second=0,microsecond=0)
|
||||
entrytime = currentday.replace(hour=12,minute=30,second=0,microsecond=0)
|
||||
eastern_time = currenttime.astimezone(eastern)
|
||||
|
||||
obtime = eastern_time.strftime("%Y-%m-%dT%H:%M:%S")
|
||||
eastern_time = entrytime.astimezone(eastern)
|
||||
entrytime = eastern_time.strftime("%Y-%m-%dT%H:%M:%S")
|
||||
|
||||
|
||||
|
||||
|
||||
def getob(y,m,d):
|
||||
url = "https://mesonet.agron.iastate.edu/request/coop/obs-dl.php?network=WV_COOP&station%5B%5D=RLXW2&year1={year}&month1={month}&day1={day}&year2={year}&month2={month}&day2={day}&what=download&delim=comma".format(year=y,month=m,day=d)
|
||||
print(url)
|
||||
response = S.get(url)
|
||||
csvdecode = response.content.decode('utf-8')
|
||||
csvstuff = csv.reader(csvdecode.splitlines(), delimiter=',')
|
||||
csvlist = list(csvstuff)
|
||||
print(csvlist[1])
|
||||
print(csvlist[1][5],csvlist[1][6],csvlist[1][7])
|
||||
return create_dict(csvlist[1][5],csvlist[1][6],csvlist[1][7],csvlist[1][3],csvlist[1][4])
|
||||
|
||||
def create_dict(precipitation,snowfall,snowdepth,maxt,mint):
|
||||
data = {}
|
||||
print(precipitation)
|
||||
match precipitation:
|
||||
case "0":
|
||||
precipitation = float(precipitation)
|
||||
gaugeCatchIsTrace=False
|
||||
case "T":
|
||||
precipitation = float(0.00)
|
||||
gaugeCatchIsTrace=True
|
||||
case "0.0001":
|
||||
precipitation = float(0.00)
|
||||
gaugeCatchIsTrace=True
|
||||
case "M":
|
||||
precipitation = "NA"
|
||||
gaugeCatchIsTrace=False
|
||||
case "":
|
||||
precipitation = "NA"
|
||||
gaugeCatchIsTrace=False
|
||||
case _:
|
||||
|
||||
if float(precipitation) >= 0:
|
||||
precipitation = float(precipitation)
|
||||
gaugeCatchIsTrace=False
|
||||
else:
|
||||
precipitation = "NA"
|
||||
gaugeCatchIsTrace=False
|
||||
match snowfall:
|
||||
case "M":
|
||||
snowfall = "NA"
|
||||
snowfalltrace = False
|
||||
case "T":
|
||||
snowfall = 0.0
|
||||
snowfalltrace = True
|
||||
case _:
|
||||
snowfall = round(float(snowfall),1)
|
||||
snowfalltrace = False
|
||||
|
||||
match snowdepth:
|
||||
case "M":
|
||||
snowdepth = "NA"
|
||||
snowdepthtrace = False
|
||||
case "T":
|
||||
snowdepth = 0.0
|
||||
snowdepthtrace = True
|
||||
case _:
|
||||
snowdepth = round(float(snowdepth),1)
|
||||
snowdepthtrace = False
|
||||
station = "WV-KN-53"
|
||||
entryDateTime=datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S")
|
||||
notes="Maximum temperature: " + maxt + "F minimum temperature: " + mint + "F"
|
||||
|
||||
weatherdata = {
|
||||
"id": 0,
|
||||
"stationNumber": station,
|
||||
"stationName": "South Charleston 0.3 SSW",
|
||||
"obsDateTime": obtime,
|
||||
"gaugeCatch": {
|
||||
"sortValue": 0,
|
||||
"precipValue": precipitation,
|
||||
"formatValue": "0.00",
|
||||
"value": precipitation,
|
||||
"isTrace": gaugeCatchIsTrace,
|
||||
"isSnowDepth": False,
|
||||
"units": "US Units"
|
||||
},
|
||||
"snowfallDepth": {
|
||||
"sortValue": None,
|
||||
"precipValue": snowfall,
|
||||
"formatValue": 0.0,
|
||||
"value": snowfall,
|
||||
"isTrace": snowfalltrace,
|
||||
"isSnowDepth": True,
|
||||
"units": "US Units"
|
||||
},
|
||||
"snowfallSwe": {
|
||||
"sortValue": None,
|
||||
"precipValue": 0,
|
||||
"formatValue": "NA",
|
||||
"value": None,
|
||||
"isTrace": False,
|
||||
"isSnowDepth": False,
|
||||
"units": "US Units"
|
||||
},
|
||||
"snowpackDepth": {
|
||||
"sortValue": None,
|
||||
"precipValue": snowdepth,
|
||||
"formatValue": 0.0,
|
||||
"value": snowdepth,
|
||||
"isTrace": snowdepthtrace,
|
||||
"isSnowDepth": True,
|
||||
"units": "US Units"
|
||||
},
|
||||
"snowpackSwe": {
|
||||
"sortValue": None,
|
||||
"precipValue": 0,
|
||||
"formatValue": "NA",
|
||||
"value": None,
|
||||
"isTrace": False,
|
||||
"isSnowDepth": False,
|
||||
"units": "US Units"
|
||||
},
|
||||
"notes": notes,
|
||||
"entryDateTime": entrytime,
|
||||
"dateTimeStamp": "",
|
||||
"units": "US Units"
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return weatherdata
|
||||
|
||||
|
||||
|
||||
def coco(data):
|
||||
# Login URL
|
||||
login_url = "https://mobile.cocorahs.org/login?_data=routes%2F_auth.login"
|
||||
|
||||
# Login Credentials
|
||||
credentials = {
|
||||
"username": "rlx.ops@noaa.gov",
|
||||
"password": "rlx25303!",
|
||||
}
|
||||
|
||||
# Headers for login request (simplified from the JS fetch headers)
|
||||
headers = {
|
||||
"accept": "*/*",
|
||||
"accept-language": "en-US,en;q=0.9",
|
||||
"content-type": "application/x-www-form-urlencoded;charset=UTF-8",
|
||||
"sec-fetch-mode": "cors",
|
||||
"sec-fetch-site": "same-origin"
|
||||
}
|
||||
|
||||
# Prepare the payload for POST request
|
||||
payload = urlencode(credentials)
|
||||
|
||||
# Session for maintaining cookies
|
||||
session = requests.Session()
|
||||
|
||||
# Perform login
|
||||
login_response = session.post(login_url, headers=headers, data=payload)
|
||||
|
||||
if login_response.status_code == 204: # Assuming 204 means success for login
|
||||
url = "https://mobile.cocorahs.org/mydata/dailyprecip/add?_data=routes%2Fmydata.dailyprecip.add"
|
||||
response = session.post(url, headers=headers, json=data)
|
||||
if response.status_code == 200 or response.status_code == 204:
|
||||
print("Data submitted successfully!")
|
||||
print(response.text)
|
||||
else:
|
||||
print(f"Failed to submit data. Status code: {response.status_code}")
|
||||
print(response.text)
|
||||
|
||||
|
||||
else:
|
||||
print("Login failed.")
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
data = getob(y, m, d) # This should return the expected data format
|
||||
coco(data)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
9
cook.txt
Normal file
9
cook.txt
Normal file
@@ -0,0 +1,9 @@
|
||||
# Netscape HTTP Cookie File
|
||||
# This file is generated by youtube-dl. Do not edit.
|
||||
|
||||
.youtube.com TRUE / FALSE 0 CONSENT YES+cb.20210328-17-p0.en+FX+882
|
||||
.youtube.com TRUE / TRUE 1729387626 GPS 1
|
||||
.youtube.com TRUE / TRUE 1744937826 VISITOR_INFO1_LIVE _wIP9Hd0HAk
|
||||
.youtube.com TRUE / TRUE 1744937826 VISITOR_PRIVACY_METADATA CgJVUxIEGgAgRg%3D%3D
|
||||
.youtube.com TRUE / TRUE 0 YSC SwaLDf_2zXc
|
||||
.youtube.com TRUE / FALSE 0 wide 1
|
||||
1
counties.geojson
Normal file
1
counties.geojson
Normal file
File diff suppressed because one or more lines are too long
51
counties.json
Normal file
51
counties.json
Normal file
@@ -0,0 +1,51 @@
|
||||
{"type":"FeatureCollection", "features": [
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.91519928,38.325611115],[-81.900993347,38.395011902],[-81.899299622,38.399112701],[-81.835296631,38.429012299],[-81.823196411,38.430011749],[-81.810997009,38.435913086],[-81.777694702,38.446811676],[-81.723197937,38.468212128],[-81.734199524,38.500411987],[-81.743293762,38.510410309],[-81.74319458,38.517810822],[-81.753395081,38.524410248],[-81.747795105,38.552711487],[-81.748497009,38.567012787],[-81.755493164,38.581611633],[-81.750396729,38.59041214],[-81.738197327,38.599010468],[-81.729797363,38.59671402],[-81.695899963,38.625911713],[-81.747398376,38.683811188],[-81.772598267,38.680812836],[-81.955795288,38.657310486],[-82.064193726,38.608112335],[-82.063697815,38.596111298],[-82.055099487,38.474514008],[-82.05039978,38.404212952],[-82.047294617,38.374511719],[-82.04309845,38.337112427],[-82.048400879,38.292011261],[-82.02809906,38.279613495],[-82.015899658,38.279312134],[-82.016296387,38.2721138],[-82.004196167,38.27161026],[-81.997497559,38.26701355],[-81.990997314,38.275310516],[-81.973594666,38.28301239],[-81.91519928,38.325611115]]]},"properties":null,"id":"Putnam"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-82.147598267,37.04101181],[-82.136795044,37.050510406],[-82.138900757,37.064113617],[-82.145294189,37.076610565],[-82.156494141,37.088111877],[-82.161697388,37.100513458],[-82.158195496,37.110610962],[-82.168792725,37.114013672],[-82.174499512,37.12191391],[-82.172897339,37.129112244],[-82.182197571,37.140411377],[-82.189598083,37.145313263],[-82.191894531,37.156013489],[-82.203895569,37.160011292],[-82.235992432,37.211811066],[-82.249893188,37.270713806],[-82.256095886,37.278411865],[-82.265197754,37.279811859],[-82.270195007,37.28881073],[-82.293792725,37.305812836],[-82.300697327,37.304710388],[-82.31489563,37.295413971],[-82.32460022,37.283512115],[-82.34299469,37.280513763],[-82.34299469,37.274112701],[-82.351295471,37.26701355],[-82.377998352,37.259910583],[-82.384895325,37.259613037],[-82.411994934,37.253112793],[-82.431999207,37.246711731],[-82.447998047,37.244010925],[-82.464797974,37.235813141],[-82.477600098,37.234111786],[-82.50819397,37.222312927],[-82.520195007,37.212612152],[-82.529296875,37.213413239],[-82.534095764,37.206611633],[-82.553894043,37.202613831],[-82.546600342,37.129211426],[-82.54019928,37.119911194],[-82.499893188,37.067611694],[-82.493095398,37.057510376],[-82.491699219,37.043712616],[-82.477592468,37.033813477],[-82.476898193,37.02281189],[-82.463294983,37.019813538],[-82.403793335,36.985912323],[-82.379798889,36.979110718],[-82.365493774,36.968410492],[-82.362594604,36.957111359],[-82.347694397,36.958713531],[-82.292694092,36.992210388],[-82.27519989,37.000911713],[-82.261795044,36.989810944],[-82.255897522,36.989711761],[-82.24319458,36.99641037],[-82.199195862,37.028911591],[-82.184494019,37.026912689],[-82.180892944,37.033512115],[-82.1690979,37.040611267],[-82.161193848,37.037513733],[-82.147598267,37.04101181]]]},"properties":null,"id":"Dickenson"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.927200317,37.5119133],[-81.945098877,37.515113831],[-81.946800232,37.531414032],[-81.954299927,37.528110504],[-81.959396362,37.53521347],[-81.968193054,37.537811279],[-82.050598145,37.480712891],[-82.062698364,37.471012115],[-82.144798279,37.413612366],[-82.185096741,37.386413574],[-82.289497375,37.313411713],[-82.300697327,37.304710388],[-82.293792725,37.305812836],[-82.270195007,37.28881073],[-82.265197754,37.279811859],[-82.256095886,37.278411865],[-82.249893188,37.270713806],[-82.235992432,37.211811066],[-82.203895569,37.160011292],[-82.191894531,37.156013489],[-82.189598083,37.145313263],[-82.182197571,37.140411377],[-82.172897339,37.129112244],[-82.174499512,37.12191391],[-82.168792725,37.114013672],[-82.158195496,37.110610962],[-82.161697388,37.100513458],[-82.156494141,37.088111877],[-82.145294189,37.076610565],[-82.138900757,37.064113617],[-82.136795044,37.050510406],[-82.147598267,37.04101181],[-82.137496948,37.043113708],[-82.116592407,37.039512634],[-82.106994629,37.042510986],[-82.106697083,37.04801178],[-82.096794128,37.051311493],[-82.086997986,37.041313171],[-82.076400757,37.038913727],[-82.055999756,37.048213959],[-82.041694641,37.048511505],[-82.030899048,37.065113068],[-82.018096924,37.069313049],[-82.014694214,37.079311371],[-82.02129364,37.085411072],[-82.016593933,37.100914001],[-82.007194519,37.116012573],[-81.98739624,37.123710632],[-81.97769928,37.12071228],[-81.967895508,37.122112274],[-81.964195251,37.127410889],[-81.953697205,37.126213074],[-81.94619751,37.129711151],[-81.9375,37.126811981],[-81.926994324,37.1328125],[-81.914100647,37.131813049],[-81.908798218,37.140213013],[-81.901298523,37.142913818],[-81.897796631,37.151412964],[-81.890197754,37.149410248],[-81.877098083,37.15221405],[-81.867698669,37.157611847],[-81.871192932,37.162910461],[-81.85369873,37.174713135],[-81.844398499,37.173610687],[-81.831100464,37.180412292],[-81.821395874,37.179111481],[-81.811897278,37.185112],[-81.798095703,37.18611145],[-81.784393311,37.19411087],[-81.773895264,37.203613281],[-81.764892578,37.20111084],[-81.757995605,37.206012726],[-81.755493164,37.218513489],[-81.741096497,37.226711273],[-81.740493774,37.237213135],[-81.743995667,37.243312836],[-81.741699219,37.254512787],[-81.746795654,37.263214111],[-81.756500244,37.268211365],[-81.757598877,37.275611867],[-81.776695251,37.275611877],[-81.78969574,37.283912659],[-81.804595947,37.285511017],[-81.82459259,37.279510498],[-81.838699341,37.286113739],[-81.852996826,37.287811279],[-81.854400635,37.30531311],[-81.860694885,37.314113617],[-81.870796204,37.321510315],[-81.878799438,37.331413269],[-81.895896912,37.332111359],[-81.899497986,37.340511322],[-81.917198181,37.349811554],[-81.928100586,37.360412598],[-81.936096191,37.38161087],[-81.929199219,37.393512726],[-81.929000854,37.405612946],[-81.92339325,37.411212921],[-81.938095093,37.421310425],[-81.940093994,37.429210663],[-81.935897827,37.438510895],[-81.945495605,37.440910339],[-81.957893372,37.448612213],[-81.966293335,37.446811676],[-81.975296021,37.456611633],[-81.985298157,37.454410553],[-81.992294312,37.461513519],[-81.995697021,37.477210999],[-81.990699768,37.484313965],[-81.97869873,37.484912872],[-81.964393616,37.4935112],[-81.953399658,37.492012024],[-81.95249939,37.501712799],[-81.943496704,37.507511139],[-81.927200317,37.5119133]]]},"properties":null,"id":"Buchanan"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-80.082397461,38.945812225],[-80.079696655,38.951412201],[-80.073699951,38.951213837],[-80.063499451,38.960613251],[-80.061599731,38.974811554],[-80.053199768,38.975013733],[-80.051292419,38.993312836],[-80.047698975,38.99641037],[-80.054100037,39.005512238],[-80.044799805,39.01001358],[-80.058097839,39.022911072],[-80.054092407,39.035011292],[-80.067497253,39.037910461],[-80.127799988,39.037811279],[-80.144096375,39.056713104],[-80.151496887,39.071212769],[-80.167800903,39.087612152],[-80.19960022,39.098510742],[-80.209793091,39.09771347],[-80.21799469,39.107913971],[-80.22769928,39.113613129],[-80.240493774,39.112911224],[-80.249298096,39.100811005],[-80.300292969,39.104011536],[-80.305198669,39.061210632],[-80.318695068,39.012710571],[-80.334495544,38.996612549],[-80.32559967,38.974212646],[-80.319892883,38.969512939],[-80.317993164,38.958610535],[-80.328399658,38.954612732],[-80.334999084,38.947013855],[-80.346298218,38.941513062],[-80.339599609,38.93221283],[-80.349197388,38.921012878],[-80.348396301,38.916812897],[-80.3618927,38.905513763],[-80.361099243,38.900310516],[-80.36819458,38.887813568],[-80.380096436,38.887313843],[-80.387496948,38.872211456],[-80.409500122,38.768211365],[-80.392593384,38.727611542],[-80.388793945,38.729812622],[-80.364898682,38.731010437],[-80.353096008,38.729812622],[-80.344795227,38.720813751],[-80.335494995,38.720413208],[-80.330993652,38.701213837],[-80.32169342,38.694713593],[-80.319099426,38.684711456],[-80.312095642,38.684013367],[-80.288795471,38.690612793],[-80.280395508,38.694911957],[-80.236198425,38.743812561],[-80.126098633,38.745811462],[-80.103797913,38.821910858],[-80.098594666,38.841812134],[-80.109100342,38.897212982],[-80.101493835,38.907913208],[-80.093399048,38.910312653],[-80.088096619,38.922210693],[-80.096694946,38.928012848],[-80.087593079,38.935012817],[-80.09009552,38.939712524],[-80.082397461,38.945812225]]]},"properties":null,"id":"Upshur"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.456794739,37.987510681],[-81.462493896,37.98991394],[-81.481697083,37.986812592],[-81.486793518,37.982112885],[-81.506393433,37.987911224],[-81.513397217,37.984413147],[-81.526992798,37.971313477],[-81.539299011,37.970710754],[-81.545097351,37.961013794],[-81.555793762,37.953811646],[-81.55569458,37.946212769],[-81.571296692,37.927810669],[-81.5625,37.918613434],[-81.563995361,37.903110504],[-81.554397583,37.893512726],[-81.561897278,37.888111115],[-81.558494568,37.877311707],[-81.564300537,37.873813629],[-81.561195374,37.84331131],[-81.552497864,37.82541275],[-81.541793823,37.827911377],[-81.532699585,37.825912476],[-81.517700195,37.817813873],[-81.520095825,37.79731369],[-81.51449585,37.791213989],[-81.492897034,37.790813446],[-81.487594604,37.783210754],[-81.475700378,37.781513214],[-81.4713974,37.785110474],[-81.458496094,37.784313202],[-81.444595337,37.761211395],[-81.436599731,37.752910614],[-81.428398132,37.750110626],[-81.396697998,37.706012726],[-81.314498901,37.587810516],[-81.310592651,37.576713562],[-81.316299438,37.562110901],[-81.312294006,37.553512573],[-81.303497314,37.546310425],[-81.285499573,37.540912628],[-81.262496948,37.527610779],[-81.254096985,37.532310486],[-81.245994568,37.530212402],[-81.238296509,37.523212433],[-81.22479248,37.516010284],[-81.222595215,37.510414124],[-81.214393616,37.50851059],[-81.207298279,37.524711609],[-81.210395813,37.530014038],[-81.199897766,37.539913177],[-81.2029953,37.550010681],[-81.199195862,37.559810638],[-81.197494507,37.575611115],[-81.184997559,37.581111908],[-81.177497864,37.580112457],[-81.172492981,37.586112976],[-81.144798279,37.595912933],[-81.135398865,37.59771347],[-81.117492676,37.588813782],[-81.108299255,37.593212128],[-81.094093323,37.588611603],[-81.087394714,37.598011017],[-81.07359314,37.602012634],[-81.064598083,37.610813141],[-81.069396973,37.628612518],[-81.068595886,37.640010834],[-81.040092468,37.652812958],[-81.021194458,37.669712067],[-81.007095337,37.674510956],[-81.005699158,37.678310394],[-80.990493774,37.685512543],[-80.98500061,37.693313599],[-80.974197388,37.696113586],[-80.978996277,37.705013275],[-80.975692749,37.714611053],[-80.960296631,37.712013245],[-80.94569397,37.715312958],[-80.929695129,37.709411621],[-80.925193787,37.704311371],[-80.914894104,37.703212738],[-80.905296326,37.69751358],[-80.897598267,37.687911987],[-80.899299622,37.67521286],[-80.879493713,37.682712555],[-80.879997253,37.694713593],[-80.889892578,37.701812744],[-80.892196655,37.719211578],[-80.897796631,37.72851181],[-80.917396545,37.737411499],[-80.92339325,37.742912292],[-80.919494629,37.750713348],[-80.90259552,37.75661087],[-80.893600464,37.771713257],[-80.902297974,37.782310486],[-80.920799255,37.79291153],[-80.924797058,37.807411194],[-80.936393738,37.816413879],[-80.944396973,37.819213867],[-80.964500427,37.831310272],[-80.978096008,37.833110809],[-80.991500854,37.831012726],[-81.028198242,37.829711914],[-81.040100098,37.833713531],[-81.055496216,37.834312439],[-81.058395386,37.842811584],[-81.049095154,37.851211548],[-81.048995972,37.856113434],[-81.073295593,37.853912354],[-81.085693359,37.850212097],[-81.099594116,37.854412079],[-81.097496033,37.860111237],[-81.073799133,37.86051178],[-81.070594788,37.865711212],[-81.073493958,37.875911713],[-81.119697571,37.879211426],[-81.249794006,37.885810852],[-81.295097351,37.889411926],[-81.299995422,37.895713806],[-81.294898987,37.906513214],[-81.297798157,37.910812378],[-81.301094055,37.932411194],[-81.29599762,37.938213348],[-81.298095703,37.9453125],[-81.326599121,37.956512451],[-81.329399109,37.96661377],[-81.343093872,37.970314026],[-81.351097107,37.976512909],[-81.362197876,37.971912384],[-81.379295349,37.968811035],[-81.396697998,37.96761322],[-81.413497925,37.973911285],[-81.422698975,37.96761322],[-81.431297302,37.971813202],[-81.456497192,37.978313446],[-81.456794739,37.987510681]]]},"properties":null,"id":"Raleigh"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-79.626594543,38.664413452],[-79.634994507,38.665611267],[-79.626495361,38.677513123],[-79.633895874,38.681812286],[-79.644294739,38.681110382],[-79.658493042,38.689411163],[-79.678092957,38.681911469],[-79.679199219,38.664913177],[-79.697296143,38.666313171],[-79.70199585,38.679611206],[-79.708595276,38.684810638],[-79.71849823,38.685012817],[-79.723594666,38.680213928],[-79.739295959,38.680011749],[-79.735397339,38.712310791],[-79.739097595,38.71761322],[-79.750198364,38.721912384],[-79.754997253,38.733810425],[-79.767196655,38.735111237],[-79.776092529,38.739711761],[-79.802093506,38.709812164],[-79.799194336,38.701713562],[-79.808395386,38.691711426],[-79.808197021,38.682811737],[-79.814193726,38.676013947],[-79.823097229,38.655410767],[-79.833396912,38.646812439],[-79.830200195,38.6369133],[-79.835700989,38.630210876],[-79.846496582,38.629611969],[-79.843093872,38.607013702],[-79.856399536,38.583511353],[-79.855697632,38.576213837],[-79.863296509,38.551113129],[-79.891799927,38.534912109],[-80.028694153,38.459312439],[-80.116195679,38.473812103],[-80.245498657,38.388412476],[-80.330200195,38.335811615],[-80.35219574,38.345314026],[-80.360099792,38.225811005],[-80.363098145,38.114212036],[-80.264694214,38.046611786],[-80.229095459,38.04221344],[-80.217796326,38.042510986],[-80.206497192,38.037212372],[-80.198493958,38.04291153],[-80.188392639,38.039211273],[-80.168800354,38.036312103],[-80.159896851,38.03811264],[-80.106895447,38.055812836],[-79.960197449,38.063911438],[-79.953598022,38.081413269],[-79.934898376,38.096912384],[-79.926399231,38.106712341],[-79.938293457,38.111011505],[-79.943893433,38.133010864],[-79.933998108,38.135311127],[-79.925895691,38.149414062],[-79.919296265,38.154613495],[-79.916999817,38.177112579],[-79.913192749,38.188011169],[-79.897895813,38.193412781],[-79.888298035,38.207111359],[-79.88419342,38.207511902],[-79.849693298,38.234413147],[-79.845100403,38.240810394],[-79.834693909,38.242713928],[-79.831298828,38.249313354],[-79.819694519,38.248313904],[-79.811393738,38.260410309],[-79.801696777,38.261211395],[-79.789497375,38.268413544],[-79.786994934,38.274810791],[-79.79549408,38.290210724],[-79.80279541,38.292510986],[-79.809898376,38.306312561],[-79.79649353,38.323513031],[-79.784599304,38.331512451],[-79.773895264,38.334812164],[-79.76449585,38.356212616],[-79.757598877,38.35761261],[-79.741798401,38.35351181],[-79.726295471,38.362911224],[-79.731399536,38.374313354],[-79.726295471,38.3869133],[-79.706893921,38.415313721],[-79.690093994,38.431110382],[-79.688598633,38.457813263],[-79.699295044,38.474411011],[-79.69379425,38.47971344],[-79.694900513,38.493110657],[-79.683097839,38.502010345],[-79.680297852,38.510112762],[-79.670097351,38.50851059],[-79.662796021,38.514911652],[-79.67199707,38.528614044],[-79.67049408,38.540710449],[-79.662696838,38.570110321],[-79.640296936,38.604110718],[-79.627700806,38.608112335],[-79.619796753,38.622512817],[-79.625694275,38.625110626],[-79.633499146,38.637012482],[-79.618499756,38.658111572],[-79.626594543,38.664413452]]]},"properties":null,"id":"Pocahontas"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.007499695,39.350311279],[-81.015296936,39.410610199],[-81.023094177,39.441810608],[-81.025596619,39.463111877],[-81.031799316,39.468212128],[-81.054100037,39.467712402],[-81.060699463,39.463813782],[-81.07849884,39.467510223],[-81.095695496,39.457611084],[-81.107398987,39.463111877],[-81.121498108,39.457813263],[-81.133399963,39.445713043],[-81.163497925,39.4412117],[-81.185096741,39.431510925],[-81.18989563,39.424510956],[-81.205200195,39.410812378],[-81.211196899,39.402511597],[-81.211296082,39.394210815],[-81.223594666,39.386013031],[-81.241798401,39.390312195],[-81.269897461,39.386112213],[-81.28099823,39.379413605],[-81.296798706,39.374713898],[-81.320098877,39.361011505],[-81.350799561,39.344612122],[-81.370994568,39.341213226],[-81.365592957,39.323810577],[-81.348495483,39.328613281],[-81.341499329,39.31911087],[-81.333999634,39.324012756],[-81.325897217,39.319812775],[-81.328094482,39.311313629],[-81.323593139,39.298912057],[-81.312095642,39.299812317],[-81.282997131,39.296611786],[-81.260993958,39.291713715],[-81.2602005,39.282012939],[-81.24949646,39.277011871],[-81.239593506,39.268310547],[-81.231895447,39.27381134],[-81.231498718,39.289710999],[-81.22480011,39.301311493],[-81.214897156,39.308612823],[-81.190093994,39.306812286],[-81.190093994,39.313613892],[-81.172096252,39.305713654],[-81.165596008,39.312011719],[-81.158599854,39.310211182],[-81.141700745,39.313510895],[-81.12159729,39.314311981],[-81.112495422,39.324012756],[-81.102996826,39.323413849],[-81.08139801,39.330513],[-81.060195923,39.323513031],[-81.043495178,39.330612183],[-81.043495178,39.337810516],[-81.035697937,39.345813751],[-81.024795532,39.346813202],[-81.016098022,39.35131073],[-81.007499695,39.350311279]]]},"properties":null,"id":"Pleasants"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-80.651794434,38.526313782],[-80.670700073,38.528011322],[-80.69619751,38.536113739],[-80.704795837,38.533412933],[-80.733093262,38.533813477],[-80.742294312,38.52671051],[-80.749595642,38.526012421],[-80.760597229,38.541412354],[-80.775497437,38.54731369],[-80.781593323,38.556812286],[-80.796897888,38.551612854],[-80.880599976,38.507411957],[-80.865196228,38.500610352],[-80.825500488,38.494411469],[-80.817695618,38.478912354],[-80.827400208,38.471813202],[-80.91129303,38.414813995],[-80.999794006,38.372310638],[-81.124794006,38.313713074],[-81.202194214,38.277610779],[-81.229095459,38.263813019],[-81.220397949,38.261611938],[-81.203796387,38.248710632],[-81.193496704,38.250011444],[-81.189498901,38.231113434],[-81.173194885,38.232810974],[-81.153297424,38.22631073],[-81.14339447,38.236110687],[-81.13319397,38.235111237],[-81.116294861,38.220813751],[-81.109695435,38.212612152],[-81.095794678,38.205310822],[-81.084793091,38.204013824],[-81.073394775,38.196113586],[-81.068199158,38.211410522],[-81.051193237,38.213012695],[-81.057594299,38.222812653],[-81.046600342,38.223613739],[-81.029693604,38.238113403],[-81.027694702,38.22971344],[-81.031692505,38.218013763],[-81.021598816,38.215312958],[-81.019493103,38.206413269],[-81.007194519,38.206012726],[-81.011299133,38.215110779],[-81.007995605,38.221012115],[-80.983894348,38.222412109],[-80.969200134,38.215312958],[-80.964698792,38.201213837],[-80.957199097,38.202510834],[-80.944396973,38.193511963],[-80.950294495,38.176612854],[-80.94960022,38.169811249],[-80.937896729,38.153312683],[-80.927497864,38.15071106],[-80.918693542,38.141513824],[-80.908493042,38.142711639],[-80.910293579,38.132911682],[-80.902496338,38.122711182],[-80.885894775,38.135913849],[-80.882598877,38.133010864],[-80.88759613,38.122112274],[-80.876792908,38.110313416],[-80.881599426,38.103111267],[-80.867797852,38.101810455],[-80.754196167,38.086914062],[-80.711997986,38.079113007],[-80.690696716,38.09161377],[-80.572898865,38.171813965],[-80.525596619,38.203411102],[-80.459899902,38.249713898],[-80.43649292,38.267311096],[-80.497093201,38.299713135],[-80.609596252,38.359012604],[-80.61239624,38.366111755],[-80.625099182,38.354812622],[-80.634597778,38.375411987],[-80.661697388,38.412513733],[-80.640792847,38.424510956],[-80.677696228,38.504013062],[-80.651794434,38.526313782]]]},"properties":null,"id":"Nicholas"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.802894592,37.66091156],[-81.815696716,37.655910492],[-81.837997437,37.671913147],[-81.848495483,37.664611816],[-81.855400085,37.667613983],[-81.862693787,37.676513672],[-81.870796204,37.679813385],[-81.874397278,37.670711517],[-81.890098572,37.665912628],[-81.896392822,37.661411285],[-81.905799866,37.66311264],[-81.912094116,37.671611786],[-81.913795471,37.680011749],[-81.927696228,37.68221283],[-81.946495056,37.671611786],[-81.951095581,37.674510956],[-81.966094971,37.665412903],[-81.978393555,37.66791153],[-81.986297607,37.658611298],[-81.987792969,37.650211334],[-81.994697571,37.643013],[-82.002296448,37.655410767],[-82.002098083,37.662612915],[-82.022697449,37.669212341],[-82.023696907,37.681110391],[-82.032096863,37.686710358],[-82.041397095,37.701412201],[-82.05039978,37.700813293],[-82.06439209,37.705310822],[-82.07989502,37.700111389],[-82.085899353,37.707710266],[-82.086097717,37.732013702],[-82.094093323,37.735412598],[-82.091697693,37.742313385],[-82.102798462,37.746112823],[-82.112197876,37.753013611],[-82.110397335,37.766010283],[-82.10369873,37.771011353],[-82.100395203,37.786510468],[-82.105400085,37.800811768],[-82.112800598,37.801712036],[-82.126098633,37.810012817],[-82.132598877,37.810413361],[-82.134094238,37.826412201],[-82.145896912,37.839313507],[-82.133293152,37.846912384],[-82.114494324,37.852413177],[-82.128295898,37.878913879],[-82.11869812,37.882312775],[-82.121192932,37.893211365],[-82.128898621,37.901012421],[-82.143798828,37.900913239],[-82.160293579,37.908611298],[-82.164299011,37.914611816],[-82.163795471,37.923713684],[-82.171295166,37.926811218],[-82.172897339,37.936611176],[-82.185195923,37.942111969],[-82.181495667,37.950511932],[-82.197097778,37.963512421],[-82.190597534,37.973613739],[-82.192993164,37.974212646],[-82.30619812,37.943710327],[-82.403694153,37.862911224],[-82.394393921,37.842811584],[-82.409599304,37.848613739],[-82.415100098,37.856212616],[-82.420097351,37.84671402],[-82.412696838,37.845111847],[-82.40839386,37.836112976],[-82.399597168,37.82951355],[-82.401794434,37.810813904],[-82.389694214,37.816711426],[-82.380096436,37.811611176],[-82.377197266,37.803413391],[-82.339294434,37.784713745],[-82.335998535,37.774711609],[-82.31879425,37.764610291],[-82.321395874,37.751613617],[-82.333999634,37.742012024],[-82.318397522,37.732913971],[-82.316299438,37.722213745],[-82.307197571,37.708312988],[-82.297096252,37.702911377],[-82.302696228,37.69311142],[-82.296798706,37.684810638],[-82.294494629,37.670913696],[-82.287994385,37.668510437],[-82.281295776,37.674812317],[-82.271194458,37.663513184],[-82.254592896,37.656612396],[-82.240196228,37.661712646],[-82.225997925,37.652812958],[-82.224693298,37.646511078],[-82.216796875,37.641010284],[-82.220092773,37.634113312],[-82.215194702,37.626811981],[-82.186500549,37.628311157],[-82.190994263,37.643013],[-82.18359375,37.649112701],[-82.172897339,37.633113861],[-82.181900024,37.625911713],[-82.176597595,37.618312836],[-82.164497375,37.61971283],[-82.168395996,37.609012604],[-82.156799316,37.609012604],[-82.156600952,37.592910767],[-82.149093628,37.591110229],[-82.141395569,37.595111847],[-82.133598328,37.593612671],[-82.127593994,37.586711884],[-82.124595642,37.576412201],[-82.128395081,37.572311401],[-82.143493652,37.5703125],[-82.133796692,37.561710358],[-82.134094238,37.554111481],[-82.125900269,37.551212311],[-82.116600037,37.559513092],[-82.102493286,37.559112549],[-82.100097656,37.553211212],[-82.075500488,37.555912018],[-82.067893982,37.548610687],[-82.064399719,37.537910461],[-82.042396545,37.534812927],[-82.044692993,37.546413422],[-82.028594971,37.537712097],[-82.020195007,37.540512085],[-82.014900208,37.533912659],[-82.007896423,37.533611298],[-82.000297546,37.542110443],[-81.994499207,37.537712097],[-81.969894409,37.546310425],[-81.968193054,37.537811279],[-81.959396362,37.53521347],[-81.954299927,37.528110504],[-81.946800232,37.531414032],[-81.945098877,37.515113831],[-81.927200317,37.5119133],[-81.922492981,37.512210846],[-81.909698486,37.52671051],[-81.89289856,37.525512695],[-81.883094788,37.538013458],[-81.868995667,37.538410187],[-81.855895996,37.549613953],[-81.85559845,37.560710907],[-81.840499878,37.5662117],[-81.837799072,37.574913025],[-81.828193665,37.587612152],[-81.828094482,37.598011017],[-81.81489563,37.600112915],[-81.80279541,37.61341095],[-81.800697327,37.625312805],[-81.795799255,37.631813049],[-81.802398682,37.644412994],[-81.79989624,37.651611328],[-81.802894592,37.66091156]]]},"properties":null,"id":"Mingo"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.93359375,38.025413513],[-81.943595886,38.021411896],[-81.988899231,38.024311066],[-81.99899292,38.020713806],[-82.013595581,38.0221138],[-82.017700195,38.017513275],[-82.044898987,38.014610291],[-82.124900818,37.993011475],[-82.152496338,37.984313965],[-82.188293457,37.975410461],[-82.190597534,37.973613739],[-82.197097778,37.963512421],[-82.181495667,37.950511932],[-82.185195923,37.942111969],[-82.172897339,37.936611176],[-82.171295166,37.926811218],[-82.163795471,37.923713684],[-82.164299011,37.914611816],[-82.160293579,37.908611298],[-82.143798828,37.900913239],[-82.128898621,37.901012421],[-82.121192932,37.893211365],[-82.11869812,37.882312775],[-82.128295898,37.878913879],[-82.114494324,37.852413177],[-82.133293152,37.846912384],[-82.145896912,37.839313507],[-82.134094238,37.826412201],[-82.132598877,37.810413361],[-82.126098633,37.810012817],[-82.112800598,37.801712036],[-82.105400085,37.800811768],[-82.100395203,37.786510468],[-82.10369873,37.771011353],[-82.110397335,37.766010283],[-82.112197876,37.753013611],[-82.102798462,37.746112823],[-82.091697693,37.742313385],[-82.094093323,37.735412598],[-82.086097717,37.732013702],[-82.085899353,37.707710266],[-82.07989502,37.700111389],[-82.06439209,37.705310822],[-82.05039978,37.700813293],[-82.041397095,37.701412201],[-82.032096863,37.686710358],[-82.023696907,37.681110391],[-82.022697449,37.669212341],[-82.002098083,37.662612915],[-82.002296448,37.655410767],[-81.994697571,37.643013],[-81.987792969,37.650211334],[-81.986297607,37.658611298],[-81.978393555,37.66791153],[-81.966094971,37.665412903],[-81.951095581,37.674510956],[-81.946495056,37.671611786],[-81.927696228,37.68221283],[-81.913795471,37.680011749],[-81.912094116,37.671611786],[-81.905799866,37.66311264],[-81.896392822,37.661411285],[-81.890098572,37.665912628],[-81.874397278,37.670711517],[-81.870796204,37.679813385],[-81.862693787,37.676513672],[-81.855400085,37.667613983],[-81.848495483,37.664611816],[-81.837997437,37.671913147],[-81.815696716,37.655910492],[-81.802894592,37.66091156],[-81.792098999,37.671112061],[-81.786300659,37.67011261],[-81.775497437,37.678211212],[-81.763198853,37.678710937],[-81.754600525,37.685512543],[-81.746994019,37.680011749],[-81.736694336,37.682811737],[-81.73349762,37.691711426],[-81.722297668,37.698413849],[-81.70249939,37.701511383],[-81.763793945,37.751811981],[-81.747299194,37.762413025],[-81.707496643,37.764610291],[-81.685195923,37.77571106],[-81.674293518,37.773612976],[-81.654495239,37.778911591],[-81.638298035,37.77791214],[-81.606994629,37.788711548],[-81.620697021,37.801212311],[-81.631599426,37.803310394],[-81.632598877,37.808013916],[-81.64629364,37.817710876],[-81.644096375,37.823013306],[-81.658294678,37.833812714],[-81.66759491,37.829711914],[-81.678199768,37.830810547],[-81.691497803,37.821910858],[-81.700996399,37.821212769],[-81.707199097,37.813411713],[-81.723098755,37.810611725],[-81.732299805,37.817211151],[-81.730895996,37.823913574],[-81.744995117,37.830112457],[-81.74609375,37.843013763],[-81.76159668,37.849411011],[-81.772193909,37.861412048],[-81.781799316,37.868713379],[-81.759498596,37.885311127],[-81.758293152,37.892711639],[-81.761497498,37.910812378],[-81.77230072,37.913311005],[-81.774200439,37.922412872],[-81.788894653,37.936511993],[-81.799194336,37.935913086],[-81.814193726,37.946712494],[-81.86089325,37.948413849],[-81.901893616,37.961410522],[-81.902694702,37.968212128],[-81.911499023,37.974212646],[-81.923095703,37.976810455],[-81.939598083,37.976810455],[-81.95009613,37.971912384],[-81.976997375,37.982212067],[-81.979698181,37.991111755],[-81.960800171,38.007511139],[-81.93359375,38.025413513]]]},"properties":null,"id":"Logan"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.907897949,38.878112793],[-81.927497864,38.892612457],[-81.926696777,38.901313782],[-81.900398254,38.924911499],[-81.900398254,38.937011719],[-81.908996582,38.948711395],[-81.91759491,38.965610504],[-81.932693481,38.986812592],[-81.950996399,38.995910645],[-81.974494934,38.992412567],[-81.9815979,38.995010376],[-81.984893799,39.0078125],[-81.991798401,39.018913269],[-82.003593445,39.028511047],[-82.019798279,39.029911041],[-82.036697388,39.024913788],[-82.051399231,38.99471283],[-82.06879425,38.985012054],[-82.089393616,38.975711823],[-82.097694397,38.960613251],[-82.10899353,38.9453125],[-82.111999512,38.933113098],[-82.130393982,38.907711029],[-82.142799377,38.898212433],[-82.144996643,38.882991791],[-82.139396667,38.863513947],[-82.144996643,38.840511322],[-82.162597656,38.824012756],[-82.176399231,38.818412781],[-82.190292358,38.815513611],[-82.21509552,38.798110962],[-82.221694946,38.7864151],[-82.216995239,38.768894196],[-82.201499939,38.760494232],[-82.195594788,38.752494812],[-82.189796448,38.737392426],[-82.182594299,38.707511902],[-82.190406799,38.686313629],[-82.184997559,38.658691406],[-82.176994324,38.643493652],[-82.172096252,38.619693756],[-82.175796509,38.606712341],[-82.181297302,38.599910736],[-82.19379425,38.593112946],[-82.21849823,38.591812134],[-82.211296082,38.586112976],[-82.055099487,38.474514008],[-82.063697815,38.596111298],[-82.064193726,38.608112335],[-81.955795288,38.657310486],[-81.772598267,38.680812836],[-81.811195374,38.712810516],[-81.8125,38.715511322],[-81.791793823,38.733810425],[-81.903297424,38.864112854],[-81.907897949,38.878112793]]]},"properties":null,"id":"Mason"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.31199646,37.424613953],[-81.324699402,37.429611206],[-81.327796936,37.434711456],[-81.346694946,37.434013367],[-81.358795166,37.440013885],[-81.372192383,37.442913055],[-81.379295349,37.44821167],[-81.385299683,37.460510254],[-81.403594971,37.459312439],[-81.416694641,37.456111908],[-81.426696777,37.460811615],[-81.431495667,37.449611664],[-81.438995361,37.457611084],[-81.449295044,37.461711884],[-81.455497742,37.46931076],[-81.45059967,37.476211548],[-81.465698242,37.484512329],[-81.4815979,37.479511261],[-81.492195129,37.480712891],[-81.496994019,37.47461319],[-81.507095337,37.477012634],[-81.52759552,37.476612091],[-81.535194397,37.485012054],[-81.553894043,37.491611481],[-81.57119751,37.487510681],[-81.583595276,37.487911224],[-81.58769989,37.494411469],[-81.5963974,37.491512299],[-81.603096008,37.495613098],[-81.613098145,37.517311096],[-81.621398926,37.520713806],[-81.635093689,37.510410309],[-81.649093628,37.515613556],[-81.654594421,37.523113251],[-81.67199707,37.51701355],[-81.687194824,37.514511108],[-81.685394287,37.505912781],[-81.715698242,37.497310638],[-81.73639679,37.495811462],[-81.752296448,37.506412506],[-81.763793945,37.502910614],[-81.771598816,37.505210876],[-81.779594421,37.50151062],[-81.791397095,37.503112793],[-81.795593262,37.509113312],[-81.818595886,37.507312775],[-81.828193665,37.513313293],[-81.823799133,37.524311066],[-81.825393677,37.534511566],[-81.834793091,37.537010193],[-81.842193604,37.548511505],[-81.855895996,37.549613953],[-81.868995667,37.538410187],[-81.883094788,37.538013458],[-81.89289856,37.525512695],[-81.909698486,37.52671051],[-81.922492981,37.512210846],[-81.927200317,37.5119133],[-81.943496704,37.507511139],[-81.95249939,37.501712799],[-81.953399658,37.492012024],[-81.964393616,37.4935112],[-81.97869873,37.484912872],[-81.990699768,37.484313965],[-81.995697021,37.477210999],[-81.992294312,37.461513519],[-81.985298157,37.454410553],[-81.975296021,37.456611633],[-81.966293335,37.446811676],[-81.957893372,37.448612213],[-81.945495605,37.440910339],[-81.935897827,37.438510895],[-81.940093994,37.429210663],[-81.938095093,37.421310425],[-81.92339325,37.411212921],[-81.929000854,37.405612946],[-81.929199219,37.393512726],[-81.936096191,37.38161087],[-81.928100586,37.360412598],[-81.917198181,37.349811554],[-81.899497986,37.340511322],[-81.895896912,37.332111359],[-81.878799438,37.331413269],[-81.870796204,37.321510315],[-81.860694885,37.314113617],[-81.854400635,37.30531311],[-81.852996826,37.287811279],[-81.838699341,37.286113739],[-81.82459259,37.279510498],[-81.804595947,37.285511017],[-81.78969574,37.283912659],[-81.776695251,37.275611877],[-81.757598877,37.275611867],[-81.756500244,37.268211365],[-81.746795654,37.263214111],[-81.741699219,37.254512787],[-81.743995667,37.243312836],[-81.740493774,37.237213135],[-81.723999023,37.240512848],[-81.715393066,37.229011536],[-81.704795837,37.223213196],[-81.695198059,37.214012146],[-81.687095642,37.213111877],[-81.678398132,37.202213287],[-81.66179657,37.202213287],[-81.562698364,37.206611633],[-81.553695679,37.208511353],[-81.545700073,37.219211578],[-81.521194458,37.227111816],[-81.506996155,37.234912872],[-81.506095886,37.245311737],[-81.498596191,37.257213593],[-81.49319458,37.251613617],[-81.480194092,37.251712799],[-81.466094971,37.258411407],[-81.459693909,37.26581192],[-81.447692871,37.27161026],[-81.436096191,37.27381134],[-81.424598694,37.27161026],[-81.416595459,37.273612976],[-81.404197693,37.298412323],[-81.397895813,37.303913116],[-81.398399353,37.310112],[-81.385498047,37.319713593],[-81.375,37.318611145],[-81.367599487,37.327613831],[-81.313194275,37.421112061],[-81.31199646,37.424613953]]]},"properties":null,"id":"McDowell"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-82.047294617,38.374511719],[-82.065597534,38.376213074],[-82.077796936,38.370010376],[-82.097595215,38.367210388],[-82.10949707,38.359012604],[-82.117095947,38.344913483],[-82.125099182,38.346611023],[-82.139694214,38.341712952],[-82.144798279,38.342910767],[-82.169197083,38.335514069],[-82.177696228,38.339611053],[-82.183494568,38.32951355],[-82.196594238,38.322811127],[-82.197998047,38.311012268],[-82.211997986,38.310710907],[-82.207496643,38.301410675],[-82.210899353,38.290111542],[-82.219497681,38.280410767],[-82.234794617,38.271411896],[-82.242996216,38.270011902],[-82.247695923,38.261211395],[-82.245292664,38.256313324],[-82.25919342,38.24641037],[-82.26499939,38.228912354],[-82.268997192,38.217510223],[-82.266593933,38.211711884],[-82.27759552,38.204711914],[-82.271697998,38.178913116],[-82.272399902,38.169113159],[-82.278495789,38.160713196],[-82.285797119,38.144313812],[-82.257499695,38.132213593],[-82.267700195,38.114711761],[-82.263893127,38.109111786],[-82.241096497,38.097511292],[-82.234992981,38.076812744],[-82.224494934,38.070411682],[-82.205993652,38.065811157],[-82.210693359,38.057312012],[-82.206993103,38.050312042],[-82.192199707,38.04731369],[-82.205497742,38.029411316],[-82.205497742,38.023311615],[-82.216995239,38.020011902],[-82.235595703,38.003112793],[-82.244598389,37.99981308],[-82.274597168,37.97271347],[-82.290496826,37.955612183],[-82.30619812,37.943710327],[-82.192993164,37.974212646],[-82.190597534,37.973613739],[-82.188293457,37.975410461],[-82.152496338,37.984313965],[-82.124900818,37.993011475],[-82.044898987,38.014610291],[-82.017700195,38.017513275],[-82.013595581,38.0221138],[-81.99899292,38.020713806],[-81.988899231,38.024311066],[-81.943595886,38.021411896],[-81.93359375,38.025413513],[-81.928199768,38.02791214],[-81.908798218,38.029212952],[-81.917495728,38.035011292],[-81.932495117,38.034412384],[-81.954200745,38.039310455],[-81.971199036,38.049312592],[-81.971893311,38.065410614],[-81.966499329,38.069511414],[-81.968795776,38.076511383],[-81.963600159,38.089111328],[-81.947593689,38.104110718],[-81.945899963,38.108913422],[-81.953399658,38.118312836],[-81.946792603,38.125411987],[-81.945899963,38.134712219],[-81.933494568,38.139713287],[-81.928695679,38.151412964],[-81.916397095,38.149414062],[-81.903793335,38.141613007],[-81.890098572,38.142311096],[-81.881698608,38.136810303],[-81.879295349,38.149810791],[-81.886398315,38.151012421],[-81.88710022,38.167312622],[-81.87210083,38.164310455],[-81.872894287,38.176013947],[-81.851997375,38.17401123],[-81.850799561,38.18731308],[-81.845298767,38.19821167],[-81.832893372,38.209114075],[-81.834999084,38.222110748],[-81.818695068,38.220012665],[-81.802093506,38.223011017],[-81.810295105,38.228813171],[-81.812698364,38.240211487],[-81.803398132,38.250511169],[-81.810295105,38.257011414],[-81.802398682,38.262111664],[-81.807395935,38.270011902],[-81.808898926,38.285511017],[-81.806594849,38.293510437],[-81.811698914,38.300811768],[-81.812599182,38.310913086],[-81.829498291,38.322013855],[-81.846794128,38.313312531],[-81.885795593,38.314712524],[-81.91519928,38.325611115],[-81.973594666,38.28301239],[-81.990997314,38.275310516],[-81.997497559,38.26701355],[-82.004196167,38.27161026],[-82.016296387,38.2721138],[-82.015899658,38.279312134],[-82.02809906,38.279613495],[-82.048400879,38.292011261],[-82.04309845,38.337112427],[-82.047294617,38.374511719]]]},"properties":null,"id":"Lincoln"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.522193909,38.612312317],[-81.578193665,38.577114105],[-81.632293701,38.554611206],[-81.635696411,38.56231308],[-81.643699646,38.567012787],[-81.695899963,38.625911713],[-81.729797363,38.59671402],[-81.738197327,38.599010468],[-81.750396729,38.59041214],[-81.755493164,38.581611633],[-81.748497009,38.567012787],[-81.747795105,38.552711487],[-81.753395081,38.524410248],[-81.74319458,38.517810822],[-81.743293762,38.510410309],[-81.734199524,38.500411987],[-81.723197937,38.468212128],[-81.777694702,38.446811676],[-81.810997009,38.435913086],[-81.823196411,38.430011749],[-81.835296631,38.429012299],[-81.899299622,38.399112701],[-81.900993347,38.395011902],[-81.91519928,38.325611115],[-81.885795593,38.314712524],[-81.846794128,38.313312531],[-81.829498291,38.322013855],[-81.812599182,38.310913086],[-81.811698914,38.300811768],[-81.806594849,38.293510437],[-81.808898926,38.285511017],[-81.807395935,38.270011902],[-81.802398682,38.262111664],[-81.810295105,38.257011414],[-81.803398132,38.250511169],[-81.812698364,38.240211487],[-81.810295105,38.228813171],[-81.802093506,38.223011017],[-81.818695068,38.220012665],[-81.834999084,38.222110748],[-81.832893372,38.209114075],[-81.826599121,38.202812195],[-81.820495605,38.186511993],[-81.813896179,38.193710327],[-81.811096191,38.207611084],[-81.79309845,38.208011627],[-81.785995483,38.214111328],[-81.788597107,38.222011566],[-81.784095765,38.230312337],[-81.765396118,38.227710724],[-81.756797791,38.220012665],[-81.754798889,38.204914093],[-81.75,38.200611115],[-81.73690033,38.198413849],[-81.726898193,38.204013824],[-81.721496582,38.214710236],[-81.723899841,38.222011566],[-81.708099365,38.223114014],[-81.696495056,38.226913452],[-81.686393738,38.218212128],[-81.668495178,38.220012665],[-81.658599854,38.223312378],[-81.643798828,38.220611572],[-81.64339447,38.210411072],[-81.648597717,38.205211639],[-81.644699097,38.194911957],[-81.648399353,38.185310364],[-81.628997803,38.172611237],[-81.625099182,38.178413391],[-81.608093262,38.174911499],[-81.597000122,38.167011261],[-81.590393066,38.154712677],[-81.565696716,38.137710571],[-81.555496216,38.141113281],[-81.5440979,38.1369133],[-81.539596558,38.130111694],[-81.545799255,38.119510651],[-81.534095764,38.121311188],[-81.516494751,38.111911774],[-81.521697998,38.100914001],[-81.534896851,38.094612122],[-81.539497375,38.079612732],[-81.506195068,38.060813904],[-81.493995667,38.066810608],[-81.479194641,38.060310364],[-81.477096558,38.049812317],[-81.464393616,38.036010742],[-81.460395813,38.02791214],[-81.46689606,38.023410796],[-81.468193054,38.016010284],[-81.460998535,38.000011444],[-81.453994751,37.992511749],[-81.456794739,37.987510681],[-81.456497192,37.978313446],[-81.431297302,37.971813202],[-81.422698975,37.96761322],[-81.413497925,37.973911285],[-81.396697998,37.96761322],[-81.379295349,37.968811035],[-81.373397827,38.001010895],[-81.346595764,38.105712891],[-81.331596375,38.166511536],[-81.326797485,38.174411774],[-81.328994751,38.182312012],[-81.31539917,38.180511475],[-81.244194031,38.251113892],[-81.229095459,38.263813019],[-81.234893799,38.280712128],[-81.254394531,38.375110626],[-81.276298523,38.445011139],[-81.283294678,38.470211029],[-81.281196594,38.473812103],[-81.239700317,38.500110626],[-81.194099426,38.527610779],[-81.220596313,38.53181076],[-81.274299622,38.521812439],[-81.303497314,38.534812927],[-81.348495483,38.541213989],[-81.461196899,38.549510956],[-81.470695496,38.546813965],[-81.507598877,38.582710266],[-81.522193909,38.612312317]]]},"properties":null,"id":"Kanawha"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.581497192,39.026210785],[-81.671195984,39.063110352],[-81.747299194,39.095413208],[-81.76159668,39.083713531],[-81.779594421,39.078113556],[-81.807296753,39.083911896],[-81.813697815,39.078811646],[-81.812095642,39.061012268],[-81.803894043,39.048511505],[-81.772399902,39.025611877],[-81.765296936,39.017211914],[-81.765098572,39.003112793],[-81.774597168,38.992610931],[-81.776199341,38.979911804],[-81.781692505,38.963611603],[-81.778793335,38.955913544],[-81.756195068,38.93441391],[-81.75919342,38.9269104],[-81.774093628,38.92301178],[-81.793395996,38.930213928],[-81.806900024,38.942611694],[-81.824996948,38.946613312],[-81.838096619,38.937110901],[-81.845298767,38.922813416],[-81.845596313,38.911411285],[-81.84929657,38.901012421],[-81.858894348,38.890213013],[-81.888694763,38.874610901],[-81.907897949,38.878112793],[-81.903297424,38.864112854],[-81.791793823,38.733810425],[-81.8125,38.715511322],[-81.811195374,38.712810516],[-81.772598267,38.680812836],[-81.747398376,38.683811188],[-81.695899963,38.625911713],[-81.643699646,38.567012787],[-81.635696411,38.56231308],[-81.632293701,38.554611206],[-81.578193665,38.577114105],[-81.522193909,38.612312317],[-81.537597656,38.646213531],[-81.546592712,38.672412872],[-81.538795471,38.698112488],[-81.533493042,38.730113983],[-81.528999329,38.750110626],[-81.521697998,38.82321167],[-81.528999329,38.891010284],[-81.526992798,38.901912689],[-81.51159668,38.904613495],[-81.502799988,38.917713165],[-81.512496948,38.919013977],[-81.518493652,38.931613922],[-81.526596069,38.941310883],[-81.538192749,38.945011139],[-81.54989624,38.960113525],[-81.548995972,38.967712402],[-81.530395508,38.977413177],[-81.53729248,38.990913391],[-81.532394409,39.005912781],[-81.581497192,39.026210785]]]},"properties":null,"id":"Jackson"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-79.626594543,38.664413452],[-79.622993469,38.668613434],[-79.596199036,38.66311264],[-79.601097107,38.679412842],[-79.597000122,38.687213898],[-79.588096619,38.690013885],[-79.577400208,38.702213287],[-79.57169342,38.712612152],[-79.560997009,38.717910767],[-79.556999207,38.730613708],[-79.546600342,38.736110687],[-79.542694092,38.746612549],[-79.535392761,38.747112274],[-79.526298523,38.764011383],[-79.51449585,38.780014038],[-79.510093689,38.781414032],[-79.50629425,38.793113708],[-79.507698059,38.800312042],[-79.525192261,38.813411713],[-79.529495239,38.819313049],[-79.523796082,38.829410553],[-79.538894653,38.841312408],[-79.530197144,38.852012634],[-79.515396115,38.864112852],[-79.510192871,38.874912262],[-79.519393921,38.886810303],[-79.511299133,38.894012451],[-79.482696533,38.886711121],[-79.472099304,38.892211914],[-79.463798523,38.892711639],[-79.446098328,38.898910522],[-79.456794739,38.924911499],[-79.453094482,38.930412292],[-79.443199158,38.932411194],[-79.424499512,38.929012299],[-79.415100098,38.923110962],[-79.389595032,38.923213959],[-79.376693726,38.920913696],[-79.368797302,38.924713135],[-79.356796265,38.943611145],[-79.34980011,38.958110809],[-79.363899231,38.967010498],[-79.377494812,38.965911865],[-79.397994995,38.972110748],[-79.409698486,38.973712921],[-79.421592712,38.971912384],[-79.43989563,38.97631073],[-79.463394165,38.974910736],[-79.472198486,38.977912903],[-79.488998413,38.975013733],[-79.496696472,38.976913452],[-79.50819397,38.973613739],[-79.536193848,38.981311798],[-79.758499146,39.031311035],[-79.785499573,39.03881073],[-79.777694702,39.047210693],[-79.778900146,39.055511475],[-79.774299622,39.063411713],[-79.775398254,39.076313019],[-79.788795471,39.096912384],[-79.798797607,39.106910706],[-79.82509613,39.115810394],[-79.824699402,39.10641098],[-79.836097717,39.096412659],[-79.849395752,39.094413757],[-79.862693787,39.072113037],[-79.870292664,39.043113708],[-79.887496948,39.001213074],[-79.892196655,38.980613708],[-79.898696899,38.973011017],[-79.985595703,38.960411072],[-80.081100464,38.947711945],[-80.082397461,38.945812225],[-80.09009552,38.939712524],[-80.087593079,38.935012817],[-80.096694946,38.928012848],[-80.088096619,38.922210693],[-80.093399048,38.910312653],[-80.101493835,38.907913208],[-80.109100342,38.897212982],[-80.098594666,38.841812134],[-80.103797913,38.821910858],[-80.126098633,38.745811462],[-80.236198425,38.743812561],[-80.280395508,38.694911957],[-80.193695068,38.542812347],[-80.18409729,38.525413513],[-80.230697632,38.420711517],[-80.245498657,38.388412476],[-80.116195679,38.473812103],[-80.028694153,38.459312439],[-79.891799927,38.534912109],[-79.863296509,38.551113129],[-79.855697632,38.576213837],[-79.856399536,38.583511353],[-79.843093872,38.607013702],[-79.846496582,38.629611969],[-79.835700989,38.630210876],[-79.830200195,38.6369133],[-79.833396912,38.646812439],[-79.823097229,38.655410767],[-79.814193726,38.676013947],[-79.808197021,38.682811737],[-79.808395386,38.691711426],[-79.799194336,38.701713562],[-79.802093506,38.709812164],[-79.776092529,38.739711761],[-79.767196655,38.735111237],[-79.754997253,38.733810425],[-79.750198364,38.721912384],[-79.739097595,38.71761322],[-79.735397339,38.712310791],[-79.739295959,38.680011749],[-79.723594666,38.680213928],[-79.71849823,38.685012817],[-79.708595276,38.684810638],[-79.70199585,38.679611206],[-79.697296143,38.666313171],[-79.679199219,38.664913177],[-79.678092957,38.681911469],[-79.658493042,38.689411163],[-79.644294739,38.681110382],[-79.633895874,38.681812286],[-79.626495361,38.677513123],[-79.634994507,38.665611267],[-79.626594543,38.664413452]]]},"properties":null,"id":"Randolph"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-80.728294373,39.095710754],[-80.749198914,39.099811554],[-80.805999756,39.108310699],[-80.812797546,39.108211517],[-80.811096191,39.099113464],[-80.81879425,39.092212677],[-80.8279953,39.092010498],[-80.837394714,39.082012177],[-80.850898743,39.084411621],[-80.859100342,39.074211121],[-80.876998901,39.07321167],[-80.886695862,39.064311981],[-80.895095825,39.063812256],[-80.897293091,39.057113647],[-80.91859436,39.043712616],[-80.933494568,39.044712067],[-80.952194214,39.038414001],[-80.960197449,39.038013458],[-80.970596313,39.027511597],[-80.98109436,39.026611328],[-80.998397827,39.012313843],[-81.007194519,39.009113312],[-81.028999329,39.012012482],[-81.033996582,39.009513855],[-81.032196045,39.00151062],[-81.035896301,38.992713928],[-81.031295776,38.984413147],[-81.019592285,38.97851181],[-81.007797241,38.968711853],[-81.004295349,38.96131134],[-81.010696411,38.956512451],[-81.002998352,38.932312012],[-81.012893677,38.922710419],[-81.007293701,38.897312164],[-81.003196716,38.846313477],[-81.011695862,38.839813232],[-81.01309967,38.822811127],[-81.034294128,38.809612274],[-81.032699585,38.805110931],[-81.055999756,38.792411804],[-81.051498413,38.779510498],[-81.037193298,38.762813568],[-81.019996643,38.760311127],[-80.999694824,38.750213623],[-80.985694885,38.718914032],[-80.973098755,38.72461319],[-80.849693298,38.792812347],[-80.702400208,38.878513336],[-80.671699524,38.889614105],[-80.665397644,38.881912231],[-80.659194946,38.887813568],[-80.646598816,38.886512756],[-80.631492615,38.897212982],[-80.607597351,38.904613495],[-80.623893738,38.921211243],[-80.71169281,39.003513336],[-80.716995239,39.009410858],[-80.728897095,39.083511353],[-80.728294373,39.095710754]]]},"properties":null,"id":"Gilmer"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-80.457199097,38.738910675],[-80.451698303,38.748710632],[-80.452293396,38.7619133],[-80.444000244,38.775310516],[-80.457099915,38.775310516],[-80.463493347,38.78421402],[-80.462799072,38.79511261],[-80.47429657,38.794910431],[-80.485298157,38.801113129],[-80.473899841,38.829811096],[-80.490097046,38.844612122],[-80.500595093,38.845111847],[-80.508499146,38.854812622],[-80.522895813,38.862113953],[-80.531700134,38.856212616],[-80.545394897,38.854312897],[-80.558395386,38.867111206],[-80.58089447,38.858310699],[-80.586097717,38.860713959],[-80.592895508,38.872310638],[-80.591094971,38.883712769],[-80.597496033,38.895011902],[-80.607597351,38.904613495],[-80.631492615,38.897212982],[-80.646598816,38.886512756],[-80.659194946,38.887813568],[-80.665397644,38.881912231],[-80.671699524,38.889614105],[-80.702400208,38.878513336],[-80.849693298,38.792812347],[-80.973098755,38.72461319],[-80.985694885,38.718914032],[-80.987197876,38.715511322],[-81.031700134,38.667812347],[-80.976799011,38.625411987],[-80.978393555,38.613113403],[-80.971000671,38.610412599],[-80.970199585,38.593711853],[-80.959793091,38.597110748],[-80.947593689,38.591712952],[-80.939292908,38.5846138],[-80.930892944,38.583213806],[-80.92489624,38.572113037],[-80.913398743,38.563312531],[-80.914596558,38.554912567],[-80.908996582,38.54291153],[-80.893699646,38.537712097],[-80.880599976,38.507411957],[-80.796897888,38.551612854],[-80.781593323,38.556812286],[-80.775497437,38.54731369],[-80.760597229,38.541412354],[-80.749595642,38.526012421],[-80.742294312,38.52671051],[-80.733093262,38.533813477],[-80.704795837,38.533412933],[-80.69619751,38.536113739],[-80.670700073,38.528011322],[-80.651794434,38.526313782],[-80.627998352,38.545612335],[-80.50869751,38.645511627],[-80.457199097,38.738910675]]]},"properties":null,"id":"Braxton"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-80.881599426,38.103111267],[-80.876792908,38.110313416],[-80.88759613,38.122112274],[-80.882598877,38.133010864],[-80.885894775,38.135913849],[-80.902496338,38.122711182],[-80.910293579,38.132911682],[-80.908493042,38.142711639],[-80.918693542,38.141513824],[-80.927497864,38.15071106],[-80.937896729,38.153312683],[-80.94960022,38.169811249],[-80.950294495,38.176612854],[-80.944396973,38.193511963],[-80.957199097,38.202510834],[-80.964698792,38.201213837],[-80.969200134,38.215312958],[-80.983894348,38.222412109],[-81.007995605,38.221012115],[-81.011299133,38.215110779],[-81.007194519,38.206012726],[-81.019493103,38.206413269],[-81.021598816,38.215312958],[-81.031692505,38.218013763],[-81.027694702,38.22971344],[-81.029693604,38.238113403],[-81.046600342,38.223613739],[-81.057594299,38.222812653],[-81.051193237,38.213012695],[-81.068199158,38.211410522],[-81.073394775,38.196113586],[-81.084793091,38.204013824],[-81.095794678,38.205310822],[-81.109695435,38.212612152],[-81.116294861,38.220813751],[-81.13319397,38.235111237],[-81.14339447,38.236110687],[-81.153297424,38.22631073],[-81.173194885,38.232810974],[-81.189498901,38.231113434],[-81.193496704,38.250011444],[-81.203796387,38.248710632],[-81.220397949,38.261611938],[-81.229095459,38.263813019],[-81.244194031,38.251113892],[-81.31539917,38.180511475],[-81.328994751,38.182312012],[-81.326797485,38.174411774],[-81.331596375,38.166511536],[-81.346595764,38.105712891],[-81.373397827,38.001010895],[-81.379295349,37.968811035],[-81.362197876,37.971912384],[-81.351097107,37.976512909],[-81.343093872,37.970314026],[-81.329399109,37.96661377],[-81.326599121,37.956512451],[-81.298095703,37.9453125],[-81.29599762,37.938213348],[-81.301094055,37.932411194],[-81.297798157,37.910812378],[-81.294898987,37.906513214],[-81.299995422,37.895713806],[-81.295097351,37.889411926],[-81.249794006,37.885810852],[-81.119697571,37.879211426],[-81.073493958,37.875911713],[-81.070594788,37.865711212],[-81.073799133,37.86051178],[-81.097496033,37.860111237],[-81.099594116,37.854412079],[-81.085693359,37.850212097],[-81.073295593,37.853912354],[-81.048995972,37.856113434],[-81.049095154,37.851211548],[-81.058395386,37.842811584],[-81.055496216,37.834312439],[-81.040100098,37.833713531],[-81.028198242,37.829711914],[-80.991500854,37.831012726],[-80.978096008,37.833110809],[-80.964500427,37.831310272],[-80.944396973,37.819213867],[-80.80909729,37.867912292],[-80.800994873,37.868610382],[-80.753997803,37.865413666],[-80.755699158,37.874511719],[-80.810493469,37.948612213],[-80.770095825,37.985012054],[-80.784500122,37.989311218],[-80.789993286,37.996013641],[-80.788200378,38.009811401],[-80.79649353,38.017311096],[-80.799499512,38.026611328],[-80.823493958,38.027412415],[-80.829399109,38.042011261],[-80.812698364,38.047012329],[-80.811599731,38.062713623],[-80.820999146,38.064613342],[-80.832595825,38.059513092],[-80.839996338,38.052810669],[-80.861198425,38.056713104],[-80.87789917,38.065711975],[-80.881095886,38.071113586],[-80.893699646,38.07951355],[-80.884094238,38.083213806],[-80.881599426,38.103111267]]]},"properties":null,"id":"Fayette"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-80.880599976,38.507411957],[-80.893699646,38.537712097],[-80.908996582,38.54291153],[-80.914596558,38.554912567],[-80.913398743,38.563312531],[-80.92489624,38.572113037],[-80.930892944,38.583213806],[-80.939292908,38.5846138],[-80.947593689,38.591712952],[-80.959793091,38.597110748],[-80.970199585,38.593711853],[-80.971000671,38.610412599],[-80.978393555,38.613113403],[-80.976799011,38.625411987],[-81.031700134,38.667812347],[-81.08379364,38.612613678],[-81.124794006,38.571113586],[-81.132896423,38.564613342],[-81.194099426,38.527610779],[-81.239700317,38.500110626],[-81.281196594,38.473812103],[-81.283294678,38.470211029],[-81.276298523,38.445011139],[-81.254394531,38.375110626],[-81.234893799,38.280712128],[-81.229095459,38.263813019],[-81.202194214,38.277610779],[-81.124794006,38.313713074],[-80.999794006,38.372310638],[-80.91129303,38.414813995],[-80.827400208,38.471813202],[-80.817695618,38.478912354],[-80.825500488,38.494411469],[-80.865196228,38.500610352],[-80.880599976,38.507411957]]]},"properties":null,"id":"Clay"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.456794739,37.987510681],[-81.453994751,37.992511749],[-81.460998535,38.000011444],[-81.468193054,38.016010284],[-81.46689606,38.023410796],[-81.460395813,38.02791214],[-81.464393616,38.036010742],[-81.477096558,38.049812317],[-81.479194641,38.060310364],[-81.493995667,38.066810608],[-81.506195068,38.060813904],[-81.539497375,38.079612732],[-81.534896851,38.094612122],[-81.521697998,38.100914001],[-81.516494751,38.111911774],[-81.534095764,38.121311188],[-81.545799255,38.119510651],[-81.539596558,38.130111694],[-81.5440979,38.1369133],[-81.555496216,38.141113281],[-81.565696716,38.137710571],[-81.590393066,38.154712677],[-81.597000122,38.167011261],[-81.608093262,38.174911499],[-81.625099182,38.178413391],[-81.628997803,38.172611237],[-81.648399353,38.185310364],[-81.644699097,38.194911957],[-81.648597717,38.205211639],[-81.64339447,38.210411072],[-81.643798828,38.220611572],[-81.658599854,38.223312378],[-81.668495178,38.220012665],[-81.686393738,38.218212128],[-81.696495056,38.226913452],[-81.708099365,38.223114014],[-81.723899841,38.222011566],[-81.721496582,38.214710236],[-81.726898193,38.204013824],[-81.73690033,38.198413849],[-81.75,38.200611115],[-81.754798889,38.204914093],[-81.756797791,38.220012665],[-81.765396118,38.227710724],[-81.784095765,38.230312337],[-81.788597107,38.222011566],[-81.785995483,38.214111328],[-81.79309845,38.208011627],[-81.811096191,38.207611084],[-81.813896179,38.193710327],[-81.820495605,38.186511993],[-81.826599121,38.202812195],[-81.832893372,38.209114075],[-81.845298767,38.19821167],[-81.850799561,38.18731308],[-81.851997375,38.17401123],[-81.872894287,38.176013947],[-81.87210083,38.164310455],[-81.88710022,38.167312622],[-81.886398315,38.151012421],[-81.879295349,38.149810791],[-81.881698608,38.136810303],[-81.890098572,38.142311096],[-81.903793335,38.141613007],[-81.916397095,38.149414062],[-81.928695679,38.151412964],[-81.933494568,38.139713287],[-81.945899963,38.134712219],[-81.946792603,38.125411987],[-81.953399658,38.118312836],[-81.945899963,38.108913422],[-81.947593689,38.104110718],[-81.963600159,38.089111328],[-81.968795776,38.076511383],[-81.966499329,38.069511414],[-81.971893311,38.065410614],[-81.971199036,38.049312592],[-81.954200745,38.039310455],[-81.932495117,38.034412384],[-81.917495728,38.035011292],[-81.908798218,38.029212952],[-81.928199768,38.02791214],[-81.93359375,38.025413513],[-81.960800171,38.007511139],[-81.979698181,37.991111755],[-81.976997375,37.982212067],[-81.95009613,37.971912384],[-81.939598083,37.976810455],[-81.923095703,37.976810455],[-81.911499023,37.974212646],[-81.902694702,37.968212128],[-81.901893616,37.961410522],[-81.86089325,37.948413849],[-81.814193726,37.946712494],[-81.799194336,37.935913086],[-81.788894653,37.936511993],[-81.774200439,37.922412872],[-81.77230072,37.913311005],[-81.761497498,37.910812378],[-81.758293152,37.892711639],[-81.759498596,37.885311127],[-81.781799316,37.868713379],[-81.772193909,37.861412048],[-81.76159668,37.849411011],[-81.74609375,37.843013763],[-81.744995117,37.830112457],[-81.730895996,37.823913574],[-81.732299805,37.817211151],[-81.723098755,37.810611725],[-81.707199097,37.813411713],[-81.700996399,37.821212769],[-81.691497803,37.821910858],[-81.678199768,37.830810547],[-81.66759491,37.829711914],[-81.658294678,37.833812714],[-81.644096375,37.823013306],[-81.64629364,37.817710876],[-81.632598877,37.808013916],[-81.631599426,37.803310394],[-81.620697021,37.801212311],[-81.606994629,37.788711548],[-81.597099304,37.785511017],[-81.583496094,37.776210785],[-81.576599121,37.763813019],[-81.563293457,37.767612457],[-81.559295654,37.773212433],[-81.548698425,37.777511597],[-81.53049469,37.789211273],[-81.521697998,37.786510468],[-81.51449585,37.791213989],[-81.520095825,37.79731369],[-81.517700195,37.817813873],[-81.532699585,37.825912476],[-81.541793823,37.827911377],[-81.552497864,37.82541275],[-81.561195374,37.84331131],[-81.564300537,37.873813629],[-81.558494568,37.877311707],[-81.561897278,37.888111115],[-81.554397583,37.893512726],[-81.563995361,37.903110504],[-81.5625,37.918613434],[-81.571296692,37.927810669],[-81.55569458,37.946212769],[-81.555793762,37.953811646],[-81.545097351,37.961013794],[-81.539299011,37.970710754],[-81.526992798,37.971313477],[-81.513397217,37.984413147],[-81.506393433,37.987911224],[-81.486793518,37.982112885],[-81.481697083,37.986812592],[-81.462493896,37.98991394],[-81.456794739,37.987510681]]]},"properties":null,"id":"Boone"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.033996582,39.009513855],[-81.060295105,39.008712769],[-81.076393127,39.010612488],[-81.082298279,39.016311646],[-81.093093872,39.016311646],[-81.111000061,39.030212402],[-81.116096497,39.04221344],[-81.12789917,39.044013977],[-81.156394958,39.039710999],[-81.162696838,39.03081131],[-81.230995178,38.970912933],[-81.272293091,38.936210632],[-81.267295837,38.9269104],[-81.277099609,38.92401123],[-81.277397156,38.915611267],[-81.258796692,38.907211304],[-81.256095886,38.898513794],[-81.259597778,38.893112183],[-81.238899231,38.881111145],[-81.241996765,38.873111725],[-81.23589325,38.862613678],[-81.244796753,38.858913422],[-81.245796204,38.852710724],[-81.220397949,38.842712402],[-81.218795776,38.834213257],[-81.222000122,38.805713654],[-81.218597412,38.798110962],[-81.226997375,38.792613983],[-81.209899902,38.785312653],[-81.211799622,38.780212402],[-81.196594238,38.767410278],[-81.18359375,38.747612],[-81.17489624,38.742511749],[-81.169898987,38.73261261],[-81.175895691,38.723114014],[-81.17149353,38.72051239],[-81.171600342,38.706611633],[-81.167098999,38.694911957],[-81.173194885,38.686912537],[-81.171798706,38.67591095],[-81.16179657,38.672012329],[-81.165794373,38.660614014],[-81.158195496,38.644012451],[-81.131599426,38.637710571],[-81.120498657,38.630912781],[-81.102294922,38.638713837],[-81.083297729,38.619213104],[-81.08379364,38.612613678],[-81.031700134,38.667812347],[-80.987197876,38.715511322],[-80.985694885,38.718914032],[-80.999694824,38.750213623],[-81.019996643,38.760311127],[-81.037193298,38.762813568],[-81.051498413,38.779510498],[-81.055999756,38.792411804],[-81.032699585,38.805110931],[-81.034294128,38.809612274],[-81.01309967,38.822811127],[-81.011695862,38.839813232],[-81.003196716,38.846313477],[-81.007293701,38.897312164],[-81.012893677,38.922710419],[-81.002998352,38.932312012],[-81.010696411,38.956512451],[-81.004295349,38.96131134],[-81.007797241,38.968711853],[-81.019592285,38.97851181],[-81.031295776,38.984413147],[-81.035896301,38.992713928],[-81.032196045,39.00151062],[-81.033996582,39.009513855]]]},"properties":null,"id":"Calhoun"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-82.21849823,38.591812134],[-82.25,38.598712921],[-82.263595581,38.597511292],[-82.274299622,38.593711853],[-82.288696289,38.581512451],[-82.293792725,38.572910309],[-82.293395996,38.557113647],[-82.29599762,38.537811279],[-82.302696228,38.523712158],[-82.304893494,38.494113922],[-82.310592651,38.483112335],[-82.315200806,38.464412689],[-82.323699951,38.449913025],[-82.339393616,38.441413879],[-82.389396667,38.43441391],[-82.404396057,38.439212799],[-82.430793762,38.431110382],[-82.485794067,38.418312073],[-82.507698059,38.410812378],[-82.352798462,38.310112],[-82.341094971,38.306713104],[-82.328895569,38.315113068],[-82.330299377,38.323810577],[-82.309295654,38.329113007],[-82.298492432,38.327613831],[-82.297195435,38.322811127],[-82.286193848,38.319911957],[-82.276100159,38.304813385],[-82.283798218,38.301410675],[-82.291099548,38.304012299],[-82.304496765,38.296413422],[-82.312599182,38.269012451],[-82.311096191,38.256412506],[-82.295097351,38.236812592],[-82.274993896,38.228210449],[-82.26499939,38.228912354],[-82.25919342,38.24641037],[-82.245292664,38.256313324],[-82.247695923,38.261211395],[-82.242996216,38.270011902],[-82.234794617,38.271411896],[-82.219497681,38.280410767],[-82.210899353,38.290111542],[-82.207496643,38.301410675],[-82.211997986,38.310710907],[-82.197998047,38.311012268],[-82.196594238,38.322811127],[-82.183494568,38.32951355],[-82.177696228,38.339611053],[-82.169197083,38.335514069],[-82.144798279,38.342910767],[-82.139694214,38.341712952],[-82.125099182,38.346611023],[-82.117095947,38.344913483],[-82.10949707,38.359012604],[-82.097595215,38.367210388],[-82.077796936,38.370010376],[-82.065597534,38.376213074],[-82.047294617,38.374511719],[-82.05039978,38.404212952],[-82.055099487,38.474514008],[-82.211296082,38.586112976],[-82.21849823,38.591812134]]]},"properties":null,"id":"Cabell"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.162696838,39.03081131],[-81.174995422,39.03521347],[-81.179695129,39.040611267],[-81.200996399,39.038211823],[-81.21169281,39.04511261],[-81.224899292,39.040210724],[-81.228294373,39.032512665],[-81.2397995,39.037910461],[-81.249992371,39.035011292],[-81.250495911,39.050411224],[-81.253799438,39.055412292],[-81.272697449,39.059612274],[-81.257896423,39.069812775],[-81.260498047,39.084911346],[-81.25869751,39.093612671],[-81.2654953,39.107913971],[-81.276100159,39.108211517],[-81.278900146,39.121212006],[-81.289299011,39.132213593],[-81.298294067,39.130111694],[-81.323699951,39.142311096],[-81.328399658,39.151012421],[-81.310493469,39.158611298],[-81.315895081,39.166713715],[-81.305595398,39.171611786],[-81.29599762,39.181312561],[-81.297393799,39.185512543],[-81.407394409,39.134910583],[-81.425895691,39.134811401],[-81.447898865,39.120910645],[-81.498893738,39.085811615],[-81.553398132,39.049411774],[-81.581497192,39.026210785],[-81.532394409,39.005912781],[-81.53729248,38.990913391],[-81.530395508,38.977413177],[-81.548995972,38.967712402],[-81.54989624,38.960113525],[-81.538192749,38.945011139],[-81.526596069,38.941310883],[-81.518493652,38.931613922],[-81.512496948,38.919013977],[-81.502799988,38.917713165],[-81.495994568,38.924613953],[-81.480995178,38.924713135],[-81.457595825,38.932712555],[-81.449699402,38.940311432],[-81.435096741,38.933311462],[-81.415100098,38.920711517],[-81.34299469,38.89591217],[-81.308799744,38.908111572],[-81.277397156,38.915611267],[-81.277099609,38.92401123],[-81.267295837,38.9269104],[-81.272293091,38.936210632],[-81.230995178,38.970912933],[-81.162696838,39.03081131]]]},"properties":null,"id":"Wirt"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.239593506,39.268310547],[-81.24949646,39.277011871],[-81.2602005,39.282012939],[-81.260993958,39.291713715],[-81.282997131,39.296611786],[-81.312095642,39.299812317],[-81.323593139,39.298912057],[-81.328094482,39.311313629],[-81.325897217,39.319812775],[-81.333999634,39.324012756],[-81.341499329,39.31911087],[-81.348495483,39.328613281],[-81.365592957,39.323810577],[-81.370994568,39.341213226],[-81.383293152,39.343013763],[-81.395896912,39.355510712],[-81.406898499,39.388412476],[-81.420593262,39.400413513],[-81.435195923,39.408313751],[-81.456092834,39.409313202],[-81.473693848,39.399711609],[-81.488998413,39.384113312],[-81.513999939,39.366710663],[-81.533599854,39.358612061],[-81.557594299,39.338813782],[-81.560195923,39.317913055],[-81.565093994,39.29391098],[-81.56539917,39.276012421],[-81.569900513,39.268112183],[-81.585296631,39.26871109],[-81.608894348,39.276012421],[-81.621795654,39.273712158],[-81.643798828,39.27721405],[-81.672393799,39.275512695],[-81.682792664,39.271411896],[-81.696296689,39.25661087],[-81.696899414,39.247810364],[-81.691497803,39.227813721],[-81.70249939,39.22051239],[-81.725296021,39.216213226],[-81.732696533,39.207111359],[-81.73589325,39.195911407],[-81.756095886,39.18031311],[-81.756095886,39.1769104],[-81.743293762,39.145511627],[-81.744598389,39.126811981],[-81.74369812,39.103713989],[-81.747299194,39.095413208],[-81.671195984,39.063110352],[-81.581497192,39.026210785],[-81.553398132,39.049411774],[-81.498893738,39.085811615],[-81.447898865,39.120910645],[-81.425895691,39.134811401],[-81.407394409,39.134910583],[-81.297393799,39.185512543],[-81.301597595,39.200210571],[-81.29119873,39.223712921],[-81.277297974,39.234111786],[-81.277099609,39.242210388],[-81.271194458,39.24861145],[-81.266593933,39.26071167],[-81.249198914,39.270812988],[-81.239593506,39.268310547]]]},"properties":null,"id":"Wood"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.222595215,37.510414124],[-81.22479248,37.516010284],[-81.238296509,37.523212433],[-81.245994568,37.530212402],[-81.254096985,37.532310486],[-81.262496948,37.527610779],[-81.285499573,37.540912628],[-81.303497314,37.546310425],[-81.312294006,37.553512573],[-81.316299438,37.562110901],[-81.310592651,37.576713562],[-81.314498901,37.587810516],[-81.396697998,37.706012726],[-81.428398132,37.750110626],[-81.436599731,37.752910614],[-81.444595337,37.761211395],[-81.458496094,37.784313202],[-81.4713974,37.785110474],[-81.475700378,37.781513214],[-81.487594604,37.783210754],[-81.492897034,37.790813446],[-81.51449585,37.791213989],[-81.521697998,37.786510468],[-81.53049469,37.789211273],[-81.548698425,37.777511597],[-81.559295654,37.773212433],[-81.563293457,37.767612457],[-81.576599121,37.763813019],[-81.583496094,37.776210785],[-81.597099304,37.785511017],[-81.606994629,37.788711548],[-81.638298035,37.77791214],[-81.654495239,37.778911591],[-81.674293518,37.773612976],[-81.685195923,37.77571106],[-81.707496643,37.764610291],[-81.747299194,37.762413025],[-81.763793945,37.751811981],[-81.70249939,37.701511383],[-81.722297668,37.698413849],[-81.73349762,37.691711426],[-81.736694336,37.682811737],[-81.746994019,37.680011749],[-81.754600525,37.685512543],[-81.763198853,37.678710937],[-81.775497437,37.678211212],[-81.786300659,37.67011261],[-81.792098999,37.671112061],[-81.802894592,37.66091156],[-81.79989624,37.651611328],[-81.802398682,37.644412994],[-81.795799255,37.631813049],[-81.800697327,37.625312805],[-81.80279541,37.61341095],[-81.81489563,37.600112915],[-81.828094482,37.598011017],[-81.828193665,37.587612152],[-81.837799072,37.574913025],[-81.840499878,37.5662117],[-81.85559845,37.560710907],[-81.855895996,37.549613953],[-81.842193604,37.548511505],[-81.834793091,37.537010193],[-81.825393677,37.534511566],[-81.823799133,37.524311066],[-81.828193665,37.513313293],[-81.818595886,37.507312775],[-81.795593262,37.509113312],[-81.791397095,37.503112793],[-81.779594421,37.50151062],[-81.771598816,37.505210876],[-81.763793945,37.502910614],[-81.752296448,37.506412506],[-81.73639679,37.495811462],[-81.715698242,37.497310638],[-81.685394287,37.505912781],[-81.687194824,37.514511108],[-81.67199707,37.51701355],[-81.654594421,37.523113251],[-81.649093628,37.515613556],[-81.635093689,37.510410309],[-81.621398926,37.520713806],[-81.613098145,37.517311096],[-81.603096008,37.495613098],[-81.5963974,37.491512299],[-81.58769989,37.494411469],[-81.583595276,37.487911224],[-81.57119751,37.487510681],[-81.553894043,37.491611481],[-81.535194397,37.485012054],[-81.52759552,37.476612091],[-81.507095337,37.477012634],[-81.496994019,37.47461319],[-81.492195129,37.480712891],[-81.4815979,37.479511261],[-81.465698242,37.484512329],[-81.45059967,37.476211548],[-81.455497742,37.46931076],[-81.449295044,37.461711884],[-81.438995361,37.457611084],[-81.431495667,37.449611664],[-81.426696777,37.460811615],[-81.416694641,37.456111908],[-81.403594971,37.459312439],[-81.385299683,37.460510254],[-81.379295349,37.44821167],[-81.372192383,37.442913055],[-81.358795166,37.440013885],[-81.346694946,37.434013367],[-81.327796936,37.434711456],[-81.324699402,37.429611206],[-81.31199646,37.424613953],[-81.305793762,37.423110962],[-81.292900085,37.413311005],[-81.28339386,37.419712067],[-81.287895203,37.433513641],[-81.303993225,37.448913574],[-81.306694031,37.457111359],[-81.299499512,37.460811615],[-81.29359436,37.469612122],[-81.278594971,37.472213745],[-81.271697998,37.484313965],[-81.26449585,37.489013672],[-81.258399963,37.498710632],[-81.239395142,37.506813049],[-81.222595215,37.510414124]]]},"properties":null,"id":"Wyoming"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-79.82509613,39.115810394],[-79.829193115,39.125511169],[-79.836494446,39.127910614],[-79.832298279,39.155612946],[-79.828094482,39.160312653],[-79.82849884,39.176811218],[-79.8227005,39.185913086],[-79.826393127,39.195011139],[-79.820198059,39.199310303],[-79.813995361,39.211212158],[-79.816398621,39.218212128],[-79.809494019,39.22341156],[-79.809700012,39.235412598],[-79.820999146,39.240810394],[-79.836898804,39.241512299],[-79.84879303,39.246013641],[-79.853294373,39.271312714],[-79.873100281,39.294910431],[-79.895294189,39.299613953],[-79.902099609,39.302810669],[-79.926193237,39.288711548],[-79.936195374,39.296413422],[-79.950195313,39.282012939],[-79.955497742,39.282211304],[-79.960998535,39.269313812],[-79.97089386,39.264614105],[-80.022499084,39.248210907],[-80.149993896,39.238212585],[-80.16559601,39.242012021],[-80.174293518,39.232410431],[-80.224494934,39.171211243],[-80.226493835,39.153411865],[-80.224693298,39.142311096],[-80.22769928,39.113613129],[-80.21799469,39.107913971],[-80.209793091,39.09771347],[-80.19960022,39.098510742],[-80.167800903,39.087612152],[-80.151496887,39.071212769],[-80.144096375,39.056713104],[-80.127799988,39.037811279],[-80.067497253,39.037910461],[-80.054092407,39.035011292],[-80.058097839,39.022911072],[-80.044799805,39.01001358],[-80.054100037,39.005512238],[-80.047698975,38.99641037],[-80.051292419,38.993312836],[-80.053199768,38.975013733],[-80.061599731,38.974811554],[-80.063499451,38.960613251],[-80.073699951,38.951213837],[-80.079696655,38.951412201],[-80.082397461,38.945812225],[-80.081100464,38.947711945],[-79.985595703,38.960411072],[-79.898696899,38.973011017],[-79.892196655,38.980613708],[-79.887496948,39.001213074],[-79.870292664,39.043113708],[-79.862693787,39.072113037],[-79.849395752,39.094413757],[-79.836097717,39.096412659],[-79.824699402,39.10641098],[-79.82509613,39.115810394]]]},"properties":null,"id":"Barbour"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-79.895294189,39.299613953],[-79.894798279,39.435710907],[-79.938194275,39.452213287],[-79.94329834,39.45041275],[-80.010993958,39.417610168],[-80.024894714,39.401512146],[-80.086593628,39.386711121],[-80.097595215,39.40171051],[-80.111694336,39.409812927],[-80.122497559,39.401313782],[-80.197296143,39.392711639],[-80.203796387,39.384910583],[-80.202293396,39.371112823],[-80.20249939,39.280612946],[-80.199699402,39.271110535],[-80.187896729,39.267211914],[-80.16559601,39.242012021],[-80.149993896,39.238212585],[-80.022499084,39.248210907],[-79.97089386,39.264614105],[-79.960998535,39.269313812],[-79.955497742,39.282211304],[-79.950195313,39.282012939],[-79.936195374,39.296413422],[-79.926193237,39.288711548],[-79.902099609,39.302810669],[-79.895294189,39.299613953]]]},"properties":null,"id":"Taylor"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-80.392593384,38.727611542],[-80.400596619,38.729812622],[-80.41230011,38.727710724],[-80.433197021,38.729312897],[-80.445098877,38.726913452],[-80.457199097,38.738910675],[-80.50869751,38.645511627],[-80.627998352,38.545612335],[-80.651794434,38.526313782],[-80.677696228,38.504013062],[-80.640792847,38.424510956],[-80.661697388,38.412513733],[-80.634597778,38.375411987],[-80.625099182,38.354812622],[-80.61239624,38.366111755],[-80.609596252,38.359012604],[-80.497093201,38.299713135],[-80.43649292,38.267311096],[-80.360099792,38.225811005],[-80.35219574,38.345314026],[-80.330200195,38.335811615],[-80.245498657,38.388412476],[-80.230697632,38.420711517],[-80.18409729,38.525413513],[-80.193695068,38.542812347],[-80.280395508,38.694911957],[-80.288795471,38.690612793],[-80.312095642,38.684013367],[-80.319099426,38.684711456],[-80.32169342,38.694713593],[-80.330993652,38.701213837],[-80.335494995,38.720413208],[-80.344795227,38.720813751],[-80.353096008,38.729812622],[-80.364898682,38.731010437],[-80.388793945,38.729812622],[-80.392593384,38.727611542]]]},"properties":null,"id":"Webster"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-82.507698059,38.410812378],[-82.531196594,38.404911041],[-82.550895691,38.403213501],[-82.577796936,38.409011841],[-82.59349823,38.421512604],[-82.597198486,38.411911011],[-82.594795227,38.399311066],[-82.599395752,38.389610291],[-82.593399048,38.379512787],[-82.597595215,38.364311218],[-82.597900391,38.345012665],[-82.590194702,38.340610504],[-82.576194763,38.326610565],[-82.5727005,38.312412262],[-82.582496643,38.298610687],[-82.578399658,38.28081131],[-82.574699402,38.274513245],[-82.574798584,38.263313293],[-82.585296631,38.245910645],[-82.604698181,38.247810364],[-82.61239624,38.235111237],[-82.608695984,38.22341156],[-82.599197388,38.217910767],[-82.599197388,38.19751358],[-82.612297058,38.170913696],[-82.619995117,38.169113159],[-82.638397217,38.171211243],[-82.644393921,38.16601181],[-82.638893127,38.156211853],[-82.635894775,38.137710571],[-82.621696472,38.1328125],[-82.619895935,38.12141037],[-82.6065979,38.120811462],[-82.585800171,38.106910706],[-82.585296631,38.093513489],[-82.573493958,38.081912994],[-82.566093445,38.080711365],[-82.559593201,38.072811127],[-82.550895691,38.070213318],[-82.544799805,38.05431366],[-82.537399292,38.044811249],[-82.534599304,38.032112122],[-82.525894165,38.026111603],[-82.523094177,38.013912201],[-82.508598328,38.001411438],[-82.486694336,37.997112274],[-82.483596802,37.984512329],[-82.472198486,37.986713409],[-82.465194702,37.984210968],[-82.469398499,37.973110199],[-82.481300354,37.972812653],[-82.482498169,37.962913513],[-82.471992493,37.959911346],[-82.48639679,37.945613861],[-82.497596741,37.945812225],[-82.491394043,37.935813904],[-82.496696472,37.927513123],[-82.482398987,37.926811218],[-82.487594604,37.919013977],[-82.475097656,37.911113739],[-82.467697144,37.914012909],[-82.452095032,37.90871048],[-82.437896729,37.899711609],[-82.432693481,37.890712738],[-82.421798706,37.885810852],[-82.418998718,37.873710632],[-82.411697388,37.865211487],[-82.422798157,37.863513947],[-82.415100098,37.856212616],[-82.409599304,37.848613739],[-82.394393921,37.842811584],[-82.403694153,37.862911224],[-82.30619812,37.943710327],[-82.290496826,37.955612183],[-82.274597168,37.97271347],[-82.244598389,37.99981308],[-82.235595703,38.003112793],[-82.216995239,38.020011902],[-82.205497742,38.023311615],[-82.205497742,38.029411316],[-82.192199707,38.04731369],[-82.206993103,38.050312042],[-82.210693359,38.057312012],[-82.205993652,38.065811157],[-82.224494934,38.070411682],[-82.234992981,38.076812744],[-82.241096497,38.097511292],[-82.263893127,38.109111786],[-82.267700195,38.114711761],[-82.257499695,38.132213593],[-82.285797119,38.144313812],[-82.278495789,38.160713196],[-82.272399902,38.169113159],[-82.271697998,38.178913116],[-82.27759552,38.204711914],[-82.266593933,38.211711884],[-82.268997192,38.217510223],[-82.26499939,38.228912354],[-82.274993896,38.228210449],[-82.295097351,38.236812592],[-82.311096191,38.256412506],[-82.312599182,38.269012451],[-82.304496765,38.296413422],[-82.291099548,38.304012299],[-82.283798218,38.301410675],[-82.276100159,38.304813385],[-82.286193848,38.319911957],[-82.297195435,38.322811127],[-82.298492432,38.327613831],[-82.309295654,38.329113007],[-82.330299377,38.323810577],[-82.328895569,38.315113068],[-82.341094971,38.306713104],[-82.352798462,38.310112],[-82.507698059,38.410812378]]]},"properties":null,"id":"Wayne"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-80.619499207,39.449310303],[-80.632598877,39.456512451],[-80.641395569,39.466712952],[-80.671295166,39.466411591],[-80.670295715,39.474212646],[-80.680000305,39.477611542],[-80.685997009,39.47051239],[-80.693397522,39.473812103],[-80.710693359,39.475711823],[-80.720397949,39.480911255],[-80.723197937,39.491210937],[-80.734596251,39.492012026],[-80.740898132,39.511611938],[-80.784095764,39.536411285],[-80.783195496,39.547710419],[-80.81149292,39.561012268],[-80.820098877,39.554710388],[-80.827194214,39.562911987],[-80.831497192,39.576812744],[-80.842697144,39.583610535],[-80.849098206,39.591011047],[-80.850898743,39.60131073],[-80.885993958,39.580310822],[-80.933700562,39.600212097],[-80.94379425,39.606910706],[-80.970397949,39.590110779],[-80.996795654,39.56911087],[-81.023696899,39.552513123],[-81.038398743,39.540412903],[-81.071098328,39.515411377],[-81.100799561,39.486812592],[-81.121498108,39.457813263],[-81.107398987,39.463111877],[-81.095695496,39.457611084],[-81.07849884,39.467510223],[-81.060699463,39.463813782],[-81.054100037,39.467712402],[-81.031799316,39.468212128],[-81.025596619,39.463111877],[-81.023094177,39.441810608],[-81.015296936,39.410610199],[-81.007499695,39.350311279],[-81.002197266,39.341510773],[-80.994499207,39.341011047],[-80.986999512,39.346813202],[-80.958198547,39.340213776],[-80.946800232,39.353713989],[-80.941299438,39.364513397],[-80.942893982,39.378410339],[-80.934898376,39.388011932],[-80.918899536,39.384010315],[-80.910095215,39.375110626],[-80.898300171,39.37361145],[-80.894996644,39.367912292],[-80.88369751,39.360713959],[-80.885696411,39.351512909],[-80.88469696,39.330711365],[-80.891395569,39.326313019],[-80.891494751,39.314910889],[-80.884796143,39.30891037],[-80.888397217,39.29511261],[-80.829193115,39.34331131],[-80.733398438,39.395412445],[-80.713798523,39.429813385],[-80.698493958,39.43221283],[-80.690696716,39.426811218],[-80.676895142,39.428012848],[-80.658699036,39.433013916],[-80.648498535,39.443012238],[-80.632194519,39.443611145],[-80.619499207,39.449310303]]]},"properties":null,"id":"Tyler"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.277397156,38.915611267],[-81.308799744,38.908111572],[-81.34299469,38.89591217],[-81.415100098,38.920711517],[-81.435096741,38.933311462],[-81.449699402,38.940311432],[-81.457595825,38.932712555],[-81.480995178,38.924713135],[-81.495994568,38.924613953],[-81.502799988,38.917713165],[-81.51159668,38.904613495],[-81.526992798,38.901912689],[-81.528999329,38.891010284],[-81.521697998,38.82321167],[-81.528999329,38.750110626],[-81.533493042,38.730113983],[-81.538795471,38.698112488],[-81.546592712,38.672412872],[-81.537597656,38.646213531],[-81.522193909,38.612312317],[-81.507598877,38.582710266],[-81.470695496,38.546813965],[-81.461196899,38.549510956],[-81.348495483,38.541213989],[-81.303497314,38.534812927],[-81.274299622,38.521812439],[-81.220596313,38.53181076],[-81.194099426,38.527610779],[-81.132896423,38.564613342],[-81.124794006,38.571113586],[-81.08379364,38.612613678],[-81.083297729,38.619213104],[-81.102294922,38.638713837],[-81.120498657,38.630912781],[-81.131599426,38.637710571],[-81.158195496,38.644012451],[-81.165794373,38.660614014],[-81.16179657,38.672012329],[-81.171798706,38.67591095],[-81.173194885,38.686912537],[-81.167098999,38.694911957],[-81.171600342,38.706611633],[-81.17149353,38.72051239],[-81.175895691,38.723114014],[-81.169898987,38.73261261],[-81.17489624,38.742511749],[-81.18359375,38.747612],[-81.196594238,38.767410278],[-81.211799622,38.780212402],[-81.209899902,38.785312653],[-81.226997375,38.792613983],[-81.218597412,38.798110962],[-81.222000122,38.805713654],[-81.218795776,38.834213257],[-81.220397949,38.842712402],[-81.245796204,38.852710724],[-81.244796753,38.858913422],[-81.23589325,38.862613678],[-81.241996765,38.873111725],[-81.238899231,38.881111145],[-81.259597778,38.893112183],[-81.256095886,38.898513794],[-81.258796692,38.907211304],[-81.277397156,38.915611267]]]},"properties":null,"id":"Roane"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-80.888397217,39.29511261],[-80.884796143,39.30891037],[-80.891494751,39.314910889],[-80.891395569,39.326313019],[-80.88469696,39.330711365],[-80.885696411,39.351512909],[-80.88369751,39.360713959],[-80.894996644,39.367912292],[-80.898300171,39.37361145],[-80.910095215,39.375110626],[-80.918899536,39.384010315],[-80.934898376,39.388011932],[-80.942893982,39.378410339],[-80.941299438,39.364513397],[-80.946800232,39.353713989],[-80.958198547,39.340213776],[-80.986999512,39.346813202],[-80.994499207,39.341011047],[-81.002197266,39.341510773],[-81.007499695,39.350311279],[-81.016098022,39.35131073],[-81.024795532,39.346813202],[-81.035697937,39.345813751],[-81.043495178,39.337810516],[-81.043495178,39.330612183],[-81.060195923,39.323513031],[-81.08139801,39.330513],[-81.102996826,39.323413849],[-81.112495422,39.324012756],[-81.12159729,39.314311981],[-81.141700745,39.313510895],[-81.158599854,39.310211182],[-81.165596008,39.312011719],[-81.172096252,39.305713654],[-81.190093994,39.313613892],[-81.190093994,39.306812286],[-81.214897156,39.308612823],[-81.22480011,39.301311493],[-81.231498718,39.289710999],[-81.231895447,39.27381134],[-81.239593506,39.268310547],[-81.249198914,39.270812988],[-81.266593933,39.26071167],[-81.271194458,39.24861145],[-81.277099609,39.242210388],[-81.277297974,39.234111786],[-81.29119873,39.223712921],[-81.301597595,39.200210571],[-81.297393799,39.185512543],[-81.29599762,39.181312561],[-81.305595398,39.171611786],[-81.315895081,39.166713715],[-81.310493469,39.158611298],[-81.328399658,39.151012421],[-81.323699951,39.142311096],[-81.298294067,39.130111694],[-81.289299011,39.132213593],[-81.278900146,39.121212006],[-81.276100159,39.108211517],[-81.2654953,39.107913971],[-81.25869751,39.093612671],[-81.260498047,39.084911346],[-81.257896423,39.069812775],[-81.272697449,39.059612274],[-81.253799438,39.055412292],[-81.250495911,39.050411224],[-81.249992371,39.035011292],[-81.2397995,39.037910461],[-81.228294373,39.032512665],[-81.224899292,39.040210724],[-81.21169281,39.04511261],[-81.200996399,39.038211823],[-81.179695129,39.040611267],[-81.174995422,39.03521347],[-81.162696838,39.03081131],[-81.156394958,39.039710999],[-81.12789917,39.044013977],[-81.116096497,39.04221344],[-81.111000061,39.030212402],[-81.093093872,39.016311646],[-81.082298279,39.016311646],[-81.076393127,39.010612488],[-81.060295105,39.008712769],[-81.033996582,39.009513855],[-81.028999329,39.012012482],[-81.007194519,39.009113312],[-80.998397827,39.012313843],[-80.98109436,39.026611328],[-80.970596313,39.027511597],[-80.960197449,39.038013458],[-80.952194214,39.038414001],[-80.933494568,39.044712067],[-80.91859436,39.043712616],[-80.897293091,39.057113647],[-80.895095825,39.063812256],[-80.886695862,39.064311981],[-80.876998901,39.07321167],[-80.859100342,39.074211121],[-80.850898743,39.084411621],[-80.837394714,39.082012177],[-80.8279953,39.092010498],[-80.81879425,39.092212677],[-80.811096191,39.099113464],[-80.812797546,39.108211517],[-80.849494934,39.171413422],[-80.913795471,39.272911072],[-80.888397217,39.29511261]]]},"properties":null,"id":"Ritchie"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-82.604698181,38.247810364],[-82.585296631,38.245910645],[-82.574798584,38.263313293],[-82.574699402,38.274513245],[-82.578399658,38.28081131],[-82.582496643,38.298610687],[-82.5727005,38.312412262],[-82.576194763,38.326610565],[-82.590194702,38.340610504],[-82.597900391,38.345012665],[-82.597595215,38.364311218],[-82.593399048,38.379512787],[-82.599395752,38.389610291],[-82.594795227,38.399311066],[-82.597198486,38.411911011],[-82.59349823,38.421512604],[-82.600799561,38.437713623],[-82.603897095,38.458511353],[-82.609992981,38.470912933],[-82.624198914,38.479610443],[-82.636993408,38.484210968],[-82.656494141,38.49641037],[-82.665794373,38.506011963],[-82.742393494,38.451812744],[-82.770195008,38.432613376],[-82.779396057,38.414310455],[-82.78679657,38.412311554],[-82.793395996,38.401611328],[-82.808197021,38.395210266],[-82.809593201,38.386810303],[-82.817497253,38.374011993],[-82.785194397,38.365211487],[-82.773193359,38.363113403],[-82.77179718,38.358512878],[-82.781196594,38.34431076],[-82.785995483,38.303710937],[-82.793296814,38.250110626],[-82.79309845,38.244411469],[-82.786895752,38.244613647],[-82.763595581,38.235012054],[-82.754295349,38.237411499],[-82.73690033,38.237812042],[-82.724594116,38.243412018],[-82.721794128,38.248710632],[-82.695495605,38.262313843],[-82.63759613,38.266811371],[-82.628196716,38.26871109],[-82.609695435,38.264011383],[-82.604598999,38.255413055],[-82.604698181,38.247810364]]]},"properties":null,"id":"Boyd"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-82.817497253,38.374011993],[-82.912796021,38.39881134],[-82.928894043,38.397010803],[-82.938293457,38.401313782],[-82.958198547,38.394611359],[-82.970199585,38.393913269],[-82.980697632,38.400211334],[-82.987098694,38.41021347],[-83.001495361,38.411510468],[-83.001792908,38.419311523],[-83.007896423,38.425811768],[-83.051895142,38.465213776],[-83.06489563,38.466110229],[-83.089294434,38.480113983],[-83.121398926,38.485912323],[-83.135795593,38.49571228],[-83.147994995,38.497413635],[-83.159095764,38.503913879],[-83.166893005,38.504112244],[-83.176696777,38.503112793],[-83.183998108,38.496513367],[-83.187698364,38.480010986],[-83.186897278,38.470413208],[-83.196998596,38.464412689],[-83.199493408,38.454914093],[-83.213195801,38.454612732],[-83.208496094,38.442012787],[-83.220596313,38.432113647],[-83.236198425,38.4269104],[-83.23449707,38.410511017],[-83.237197876,38.398014069],[-83.232093811,38.383613586],[-83.234199524,38.380313873],[-83.234100342,38.359912872],[-83.239295959,38.35641098],[-83.231300354,38.339412689],[-83.266693115,38.326313019],[-83.276794434,38.323913574],[-83.292297363,38.329013824],[-83.319793701,38.331813812],[-83.330200195,38.321411133],[-83.340499878,38.315513611],[-83.32749939,38.291110992],[-83.33429718,38.286613464],[-83.316894531,38.267513275],[-83.318595886,38.261211395],[-83.308799744,38.252811432],[-83.312294006,38.243011475],[-83.306793213,38.234313965],[-83.306900024,38.226512909],[-83.297599792,38.21761322],[-83.300796509,38.210411072],[-83.285293579,38.212013245],[-83.285797119,38.206310272],[-83.275695801,38.203010559],[-83.272895813,38.19601059],[-83.26210022,38.189910889],[-83.254592896,38.193813324],[-83.242195129,38.191711426],[-83.232696533,38.19751358],[-83.221794128,38.210613251],[-83.223197937,38.221710205],[-83.218696594,38.234012604],[-83.203598022,38.246212006],[-83.185798645,38.253810883],[-83.176696777,38.266410828],[-83.164398193,38.266010284],[-83.134994507,38.241912842],[-83.118995667,38.233711243],[-83.111793517,38.235610964],[-83.103096008,38.227012634],[-83.091896057,38.227310181],[-83.075897217,38.207210541],[-83.062393188,38.19751358],[-83.060195923,38.191810608],[-83.048194885,38.191913605],[-83.041496277,38.198810577],[-83.027297974,38.195510864],[-83.01259613,38.204410553],[-83.009399414,38.193611145],[-82.990898132,38.190612793],[-82.989494324,38.179710388],[-82.976196289,38.172412872],[-82.958496094,38.173912048],[-82.944099426,38.168811798],[-82.924095154,38.174613953],[-82.897094727,38.183410645],[-82.882293701,38.195911407],[-82.853797913,38.19021225],[-82.838493347,38.202812195],[-82.824493408,38.207710266],[-82.813293457,38.21931076],[-82.823699951,38.229011536],[-82.810897827,38.238311768],[-82.794494629,38.240310669],[-82.79309845,38.244411469],[-82.793296814,38.250110626],[-82.785995483,38.303710937],[-82.781196594,38.34431076],[-82.77179718,38.358512878],[-82.773193359,38.363113403],[-82.785194397,38.365211487],[-82.817497253,38.374011993]]]},"properties":null,"id":"Carter"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-82.815292358,38.570713043],[-82.841995239,38.588314056],[-82.847396851,38.595710754],[-82.855194092,38.617412567],[-82.857093811,38.632110596],[-82.8565979,38.648513794],[-82.861999512,38.667011261],[-82.875198364,38.683712006],[-82.877494812,38.6912117],[-82.869895935,38.728713989],[-82.871894836,38.741111755],[-82.876098633,38.747913361],[-82.89339447,38.75661087],[-82.928993225,38.748512268],[-82.943199158,38.743312836],[-82.969299316,38.728610992],[-82.979393005,38.726013184],[-83.003593445,38.729812622],[-83.026893616,38.72751236],[-83.038093567,38.6994133],[-83.029296875,38.687511444],[-83.041496277,38.663612366],[-83.051193237,38.662712097],[-83.049499512,38.651210785],[-83.051795959,38.640411377],[-83.063796997,38.638111115],[-83.073394775,38.6328125],[-83.069999695,38.621112823],[-83.08039856,38.612213135],[-83.077392578,38.609111786],[-83.073898315,38.590812683],[-83.081298828,38.587612152],[-83.098693848,38.564212799],[-83.10269928,38.553512573],[-83.124595642,38.534912109],[-83.137992859,38.535411835],[-83.146697998,38.530212402],[-83.150497437,38.519111633],[-83.155593872,38.518611908],[-83.166893005,38.504112244],[-83.159095764,38.503913879],[-83.147994995,38.497413635],[-83.135795593,38.49571228],[-83.121398926,38.485912323],[-83.089294434,38.480113983],[-83.06489563,38.466110229],[-83.051895142,38.465213776],[-83.007896423,38.425811768],[-83.001792908,38.419311523],[-83.001495361,38.411510468],[-82.987098694,38.41021347],[-82.980697632,38.400211334],[-82.970199585,38.393913269],[-82.958198547,38.394611359],[-82.938293457,38.401313782],[-82.928894043,38.397010803],[-82.912796021,38.39881134],[-82.817497253,38.374011993],[-82.809593201,38.386810303],[-82.808197021,38.395210266],[-82.793395996,38.401611328],[-82.78679657,38.412311554],[-82.779396057,38.414310455],[-82.770195008,38.432613376],[-82.742393494,38.451812744],[-82.665794373,38.506011963],[-82.674499512,38.514610291],[-82.689598083,38.535511017],[-82.700798035,38.544612885],[-82.729194641,38.55891037],[-82.764900208,38.560512543],[-82.779495239,38.559013367],[-82.794593811,38.561210632],[-82.815292358,38.570713043]]]},"properties":null,"id":"Greenup"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-80.300292969,39.104011536],[-80.458297729,39.139812469],[-80.467193604,39.145313263],[-80.480400085,39.145111084],[-80.487594604,39.148212433],[-80.507797241,39.149211884],[-80.52129364,39.14591217],[-80.536094666,39.1471138],[-80.546592712,39.152313232],[-80.550292969,39.159111023],[-80.563400269,39.161312103],[-80.57699585,39.16721344],[-80.590293884,39.168811798],[-80.604598999,39.155910492],[-80.616096497,39.154411316],[-80.624099731,39.142612457],[-80.637001038,39.13791275],[-80.650497437,39.137813568],[-80.656394958,39.141010284],[-80.664398193,39.131511688],[-80.681694031,39.120311737],[-80.681495667,39.112911224],[-80.703895569,39.090812683],[-80.728294373,39.095710754],[-80.728897095,39.083511353],[-80.716995239,39.009410858],[-80.71169281,39.003513336],[-80.623893738,38.921211243],[-80.607597351,38.904613495],[-80.597496033,38.895011902],[-80.591094971,38.883712769],[-80.592895508,38.872310638],[-80.586097717,38.860713959],[-80.58089447,38.858310699],[-80.558395386,38.867111206],[-80.545394897,38.854312897],[-80.531700134,38.856212616],[-80.522895813,38.862113953],[-80.508499146,38.854812622],[-80.500595093,38.845111847],[-80.490097046,38.844612122],[-80.473899841,38.829811096],[-80.485298157,38.801113129],[-80.47429657,38.794910431],[-80.462799072,38.79511261],[-80.463493347,38.78421402],[-80.457099915,38.775310516],[-80.444000244,38.775310516],[-80.452293396,38.7619133],[-80.451698303,38.748710632],[-80.457199097,38.738910675],[-80.445098877,38.726913452],[-80.433197021,38.729312897],[-80.41230011,38.727710724],[-80.400596619,38.729812622],[-80.392593384,38.727611542],[-80.409500122,38.768211365],[-80.387496948,38.872211456],[-80.380096436,38.887313843],[-80.36819458,38.887813568],[-80.361099243,38.900310516],[-80.3618927,38.905513763],[-80.348396301,38.916812897],[-80.349197388,38.921012878],[-80.339599609,38.93221283],[-80.346298218,38.941513062],[-80.334999084,38.947013855],[-80.328399658,38.954612732],[-80.317993164,38.958610535],[-80.319892883,38.969512939],[-80.32559967,38.974212646],[-80.334495544,38.996612549],[-80.318695068,39.012710571],[-80.305198669,39.061210632],[-80.300292969,39.104011536]]]},"properties":null,"id":"Lewis"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-80.197296143,39.392711639],[-80.252998352,39.421512604],[-80.28339386,39.435710907],[-80.293296814,39.43441391],[-80.333396912,39.441013336],[-80.377593994,39.449314117],[-80.49029541,39.468811035],[-80.494400024,39.461112976],[-80.51210022,39.458610535],[-80.517997742,39.449611664],[-80.519195557,39.434711456],[-80.532096863,39.433513641],[-80.544494629,39.428310394],[-80.537597656,39.419712067],[-80.539398193,39.410312653],[-80.547798157,39.403713226],[-80.546699524,39.396411896],[-80.538597107,39.385913849],[-80.539794922,39.373413086],[-80.544197083,39.367210388],[-80.532699585,39.360813141],[-80.533500671,39.355010986],[-80.542793274,39.352413177],[-80.546592712,39.344413757],[-80.553794861,39.340213776],[-80.561798096,39.329410553],[-80.565093994,39.320213318],[-80.582199097,39.316310883],[-80.586196899,39.303512573],[-80.593193054,39.3019104],[-80.603500366,39.291313171],[-80.596595764,39.286212921],[-80.578796387,39.282711029],[-80.578399658,39.276111603],[-80.585899353,39.270210266],[-80.58719635,39.261310577],[-80.569496155,39.251113892],[-80.561599731,39.24281311],[-80.550697327,39.237911224],[-80.547798157,39.229011536],[-80.560394287,39.231113434],[-80.565994263,39.221912384],[-80.554496765,39.21421051],[-80.546897888,39.216510773],[-80.526695251,39.209911346],[-80.531799316,39.194812775],[-80.550895691,39.186412811],[-80.557197571,39.190311432],[-80.562194824,39.178710937],[-80.575500488,39.17521286],[-80.591400146,39.182113647],[-80.590293884,39.168811798],[-80.57699585,39.16721344],[-80.563400269,39.161312103],[-80.550292969,39.159111023],[-80.546592712,39.152313232],[-80.536094666,39.1471138],[-80.52129364,39.14591217],[-80.507797241,39.149211884],[-80.487594604,39.148212433],[-80.480400085,39.145111084],[-80.467193604,39.145313263],[-80.458297729,39.139812469],[-80.300292969,39.104011536],[-80.249298096,39.100811005],[-80.240493774,39.112911224],[-80.22769928,39.113613129],[-80.224693298,39.142311096],[-80.226493835,39.153411865],[-80.224494934,39.171211243],[-80.174293518,39.232410431],[-80.16559601,39.242012021],[-80.187896729,39.267211914],[-80.199699402,39.271110535],[-80.20249939,39.280612946],[-80.202293396,39.371112823],[-80.203796387,39.384910583],[-80.197296143,39.392711639]]]},"properties":null,"id":"Harrison"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-80.544494629,39.428310394],[-80.557693481,39.432910919],[-80.559494019,39.439311981],[-80.578193665,39.434612274],[-80.577796936,39.444511414],[-80.583099365,39.451412201],[-80.596694946,39.448612213],[-80.605796814,39.443611145],[-80.613395691,39.450313568],[-80.619499207,39.449310303],[-80.632194519,39.443611145],[-80.648498535,39.443012238],[-80.658699036,39.433013916],[-80.676895142,39.428012848],[-80.690696716,39.426811218],[-80.698493958,39.43221283],[-80.713798523,39.429813385],[-80.733398438,39.395412445],[-80.829193115,39.34331131],[-80.888397217,39.29511261],[-80.913795471,39.272911072],[-80.849494934,39.171413422],[-80.812797546,39.108211517],[-80.805999756,39.108310699],[-80.749198914,39.099811554],[-80.728294373,39.095710754],[-80.703895569,39.090812683],[-80.681495667,39.112911224],[-80.681694031,39.120311737],[-80.664398193,39.131511688],[-80.656394958,39.141010284],[-80.650497437,39.137813568],[-80.637001038,39.13791275],[-80.624099731,39.142612457],[-80.616096497,39.154411316],[-80.604598999,39.155910492],[-80.590293884,39.168811798],[-80.591400146,39.182113647],[-80.575500488,39.17521286],[-80.562194824,39.178710937],[-80.557197571,39.190311432],[-80.550895691,39.186412811],[-80.531799316,39.194812775],[-80.526695251,39.209911346],[-80.546897888,39.216510773],[-80.554496765,39.21421051],[-80.565994263,39.221912384],[-80.560394287,39.231113434],[-80.547798157,39.229011536],[-80.550697327,39.237911224],[-80.561599731,39.24281311],[-80.569496155,39.251113892],[-80.58719635,39.261310577],[-80.585899353,39.270210266],[-80.578399658,39.276111603],[-80.578796387,39.282711029],[-80.596595764,39.286212921],[-80.603500366,39.291313171],[-80.593193054,39.3019104],[-80.586196899,39.303512573],[-80.582199097,39.316310883],[-80.565093994,39.320213318],[-80.561798096,39.329410553],[-80.553794861,39.340213776],[-80.546592712,39.344413757],[-80.542793274,39.352413177],[-80.533500671,39.355010986],[-80.532699585,39.360813141],[-80.544197083,39.367210388],[-80.539794922,39.373413086],[-80.538597107,39.385913849],[-80.546699524,39.396411896],[-80.547798157,39.403713226],[-80.539398193,39.410312653],[-80.537597656,39.419712067],[-80.544494629,39.428310394]]]},"properties":null,"id":"Doddridge"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-82.435287476,39.035293579],[-82.424697876,39.138011932],[-82.49319458,39.14081192],[-82.540298462,39.144512177],[-82.537094116,39.1810112],[-82.574493408,39.185512543],[-82.611000061,39.185611725],[-82.651496887,39.189613342],[-82.650993347,39.202312469],[-82.762893677,39.207813263],[-82.767196655,39.168010712],[-82.785896301,39.168811798],[-82.796699524,39.0662117],[-82.798698425,39.028511047],[-82.801498413,39.006313324],[-82.807098389,38.94821167],[-82.769599915,38.946212769],[-82.753494263,38.944011688],[-82.758499146,38.886310577],[-82.761894226,38.869911194],[-82.760696411,38.854911804],[-82.650093079,38.849113464],[-82.575593031,38.844512999],[-82.574394226,38.853591919],[-82.49949646,38.848693848],[-82.455184937,38.84469223],[-82.439399719,38.98481369],[-82.435287476,39.035293579]]]},"properties":null,"id":"Jackson"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-82.047798157,39.550613403],[-82.159194946,39.556312561],[-82.166595459,39.474514008],[-82.167999268,39.466213226],[-82.279594421,39.472610474],[-82.282699585,39.449611664],[-82.289497375,39.38401413],[-82.298599243,39.294910431],[-82.2602005,39.29291153],[-82.268699646,39.203811646],[-82.189399719,39.199211121],[-82.058700562,39.192512512],[-81.945198059,39.187610626],[-81.756095886,39.18031311],[-81.73589325,39.195911407],[-81.732696533,39.207111359],[-81.725296021,39.216213226],[-81.721694946,39.269412994],[-81.81829834,39.272212982],[-81.816596985,39.301811218],[-81.835594177,39.303512573],[-81.83480072,39.317310333],[-81.853492737,39.317913055],[-81.844894409,39.4494133],[-81.875495911,39.451511383],[-81.940597534,39.454212189],[-82.056098938,39.460010529],[-82.047393799,39.548610687],[-82.047798157,39.550613403]]]},"properties":null,"id":"Athens"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-82.21849823,38.591812134],[-82.19379425,38.593112946],[-82.181297302,38.599910736],[-82.175796509,38.606712341],[-82.172096252,38.619693756],[-82.176994324,38.643493652],[-82.184997559,38.658691406],[-82.190406799,38.686313629],[-82.182594299,38.707511902],[-82.189796448,38.737392426],[-82.195594788,38.752494812],[-82.201499939,38.760494232],[-82.216995239,38.768894196],[-82.221694946,38.7864151],[-82.21509552,38.798110962],[-82.190292358,38.815513611],[-82.176399231,38.818412781],[-82.162597656,38.824012756],[-82.144996643,38.840511322],[-82.139396667,38.863513947],[-82.144996643,38.882991791],[-82.142799377,38.898212433],[-82.130393982,38.907711029],[-82.111999512,38.933113098],[-82.10899353,38.9453125],[-82.097694397,38.960613251],[-82.098587036,38.963993073],[-82.09589386,38.998893738],[-82.096694946,39.002994537],[-82.246795654,39.00989151],[-82.274093628,39.010692596],[-82.323890686,39.014091492],[-82.322990417,39.026893616],[-82.32849884,39.028194427],[-82.435287476,39.035293579],[-82.439399719,38.98481369],[-82.455184937,38.84469223],[-82.49949646,38.848693848],[-82.574394226,38.853591919],[-82.575593031,38.844512999],[-82.581993103,38.779212952],[-82.500198364,38.773513794],[-82.484298706,38.771713257],[-82.486595154,38.743011475],[-82.467102051,38.741012573],[-82.472496033,38.682312012],[-82.354507446,38.676113129],[-82.361595154,38.586112976],[-82.357894897,38.585010529],[-82.292999268,38.583011627],[-82.288696289,38.581512451],[-82.274299622,38.593711853],[-82.263595581,38.597511292],[-82.25,38.598712921],[-82.21849823,38.591812134]]]},"properties":null,"id":"Gallia"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-82.288696289,38.581512451],[-82.292999268,38.583011627],[-82.357894897,38.585010529],[-82.361595154,38.586112976],[-82.354507446,38.676113129],[-82.472496033,38.682312012],[-82.467102051,38.741012573],[-82.486595154,38.743011475],[-82.484298706,38.771713257],[-82.500198364,38.773513794],[-82.581993103,38.779212952],[-82.575593031,38.844512999],[-82.650093079,38.849113464],[-82.65839386,38.771011353],[-82.662796021,38.749111176],[-82.695396423,38.750011444],[-82.701599121,38.690811157],[-82.706695557,38.677711487],[-82.763893127,38.680011749],[-82.766098022,38.644611359],[-82.74079895,38.598011017],[-82.748794556,38.59431076],[-82.815292358,38.570713043],[-82.794593811,38.561210632],[-82.779495239,38.559013367],[-82.764900208,38.560512543],[-82.729194641,38.55891037],[-82.700798035,38.544612885],[-82.689598083,38.535511017],[-82.674499512,38.514610291],[-82.665794373,38.506011963],[-82.656494141,38.49641037],[-82.636993408,38.484210968],[-82.624198914,38.479610443],[-82.609992981,38.470912933],[-82.603897095,38.458511353],[-82.600799561,38.437713623],[-82.59349823,38.421512604],[-82.577796936,38.409011841],[-82.550895691,38.403213501],[-82.531196594,38.404911041],[-82.507698059,38.410812378],[-82.485794067,38.418312073],[-82.430793762,38.431110382],[-82.404396057,38.439212799],[-82.389396667,38.43441391],[-82.339393616,38.441413879],[-82.323699951,38.449913025],[-82.315200806,38.464412689],[-82.310592651,38.483112335],[-82.304893494,38.494113922],[-82.302696228,38.523712158],[-82.29599762,38.537811279],[-82.293395996,38.557113647],[-82.293792725,38.572910309],[-82.288696289,38.581512451]]]},"properties":null,"id":"Lawrence"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.756095886,39.18031311],[-81.945198059,39.187610626],[-82.058700562,39.192512512],[-82.189399719,39.199211121],[-82.268699646,39.203811646],[-82.306297302,39.205413818],[-82.322990417,39.026893616],[-82.323890686,39.014091492],[-82.274093628,39.010692596],[-82.246795654,39.00989151],[-82.096694946,39.002994537],[-82.09589386,38.998893738],[-82.098587036,38.963993073],[-82.097694397,38.960613251],[-82.089393616,38.975711823],[-82.06879425,38.985012054],[-82.051399231,38.99471283],[-82.036697388,39.024913788],[-82.019798279,39.029911041],[-82.003593445,39.028511047],[-81.991798401,39.018913269],[-81.984893799,39.0078125],[-81.9815979,38.995010376],[-81.974494934,38.992412567],[-81.950996399,38.995910645],[-81.932693481,38.986812592],[-81.91759491,38.965610504],[-81.908996582,38.948711395],[-81.900398254,38.937011719],[-81.900398254,38.924911499],[-81.926696777,38.901313782],[-81.927497864,38.892612457],[-81.907897949,38.878112793],[-81.888694763,38.874610901],[-81.858894348,38.890213013],[-81.84929657,38.901012421],[-81.845596313,38.911411285],[-81.845298767,38.922813416],[-81.838096619,38.937110901],[-81.824996948,38.946613312],[-81.806900024,38.942611694],[-81.793395996,38.930213928],[-81.774093628,38.92301178],[-81.75919342,38.9269104],[-81.756195068,38.93441391],[-81.778793335,38.955913544],[-81.781692505,38.963611603],[-81.776199341,38.979911804],[-81.774597168,38.992610931],[-81.765098572,39.003112793],[-81.765296936,39.017211914],[-81.772399902,39.025611877],[-81.803894043,39.048511505],[-81.812095642,39.061012268],[-81.813697815,39.078811646],[-81.807296753,39.083911896],[-81.779594421,39.078113556],[-81.76159668,39.083713531],[-81.747299194,39.095413208],[-81.74369812,39.103713989],[-81.744598389,39.126811981],[-81.743293762,39.145511627],[-81.756095886,39.1769104],[-81.756095886,39.18031311]]]},"properties":null,"id":"Meigs"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.588195801,39.587013245],[-81.586097717,39.663013458],[-81.594696045,39.664310455],[-81.63759613,39.665512085],[-81.643096924,39.666912079],[-81.639793396,39.752613068],[-81.917793274,39.764812469],[-81.964698792,39.76581192],[-82.003898621,39.768211365],[-82.034294128,39.76871109],[-82.076599121,39.77091217],[-82.080192566,39.727710724],[-82.024200439,39.724712372],[-82.039596558,39.55021286],[-82.047798157,39.550613403],[-82.047393799,39.548610687],[-82.056098938,39.460010529],[-81.940597534,39.454212189],[-81.875495911,39.451511383],[-81.844894409,39.4494133],[-81.825798035,39.449211121],[-81.823295593,39.48991394],[-81.821899414,39.494113922],[-81.783493042,39.492610931],[-81.783798218,39.489513397],[-81.760993958,39.487812042],[-81.749694824,39.482013702],[-81.708496094,39.480812073],[-81.705093384,39.530212402],[-81.721794128,39.532012939],[-81.723197937,39.54901123],[-81.717597961,39.561912537],[-81.716392517,39.581611633],[-81.709197998,39.586112976],[-81.696395874,39.586811066],[-81.687393188,39.590511322],[-81.588195801,39.587013245]]]},"properties":null,"id":"Morgan"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-82.076599121,39.77091217],[-82.072998047,39.815811157],[-82.155197144,39.820011139],[-82.169998169,39.821712494],[-82.164596558,39.878513336],[-82.162796021,39.909412384],[-82.218193054,39.912513733],[-82.30619812,39.916213989],[-82.412895203,39.922111511],[-82.418197632,39.927913666],[-82.462799072,39.930412292],[-82.463996887,39.922710419],[-82.471992493,39.837711334],[-82.458198547,39.836212158],[-82.396400452,39.832611084],[-82.403999329,39.744613647],[-82.366096497,39.742511749],[-82.374595642,39.654911041],[-82.379798889,39.596813202],[-82.312599182,39.592811584],[-82.262794495,39.590610504],[-82.263595581,39.562110901],[-82.159194946,39.556312561],[-82.047798157,39.550613403],[-82.039596558,39.55021286],[-82.024200439,39.724712372],[-82.080192566,39.727710724],[-82.076599121,39.77091217]]]},"properties":null,"id":"Perry"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-82.289497375,39.38401413],[-82.42049408,39.391212463],[-82.51449585,39.395812988],[-82.517593384,39.369510651],[-82.634399414,39.375610352],[-82.635696411,39.361610413],[-82.685195923,39.363811493],[-82.748596191,39.368213654],[-82.762893677,39.207813263],[-82.650993347,39.202312469],[-82.651496887,39.189613342],[-82.611000061,39.185611725],[-82.574493408,39.185512543],[-82.537094116,39.1810112],[-82.540298462,39.144512177],[-82.49319458,39.14081192],[-82.424697876,39.138011932],[-82.435287476,39.035293579],[-82.32849884,39.028194427],[-82.322990417,39.026893616],[-82.306297302,39.205413818],[-82.268699646,39.203811646],[-82.2602005,39.29291153],[-82.298599243,39.294910431],[-82.289497375,39.38401413]]]},"properties":null,"id":"Vinton"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-81.038398743,39.540412903],[-81.037094116,39.571613312],[-81.045692444,39.572410583],[-81.13659668,39.575710297],[-81.246299744,39.579013824],[-81.264099121,39.576812744],[-81.264297485,39.605312347],[-81.283096313,39.606712341],[-81.283500671,39.591812134],[-81.356292725,39.593513489],[-81.356292725,39.600914001],[-81.394897461,39.601512909],[-81.394699097,39.614810944],[-81.412994385,39.615913391],[-81.414199829,39.630210876],[-81.450996399,39.631111145],[-81.451896667,39.645412445],[-81.473197937,39.646011353],[-81.473495483,39.583610535],[-81.525398254,39.584712982],[-81.588195801,39.587013245],[-81.687393188,39.590511322],[-81.696395874,39.586811066],[-81.709197998,39.586112976],[-81.716392517,39.581611633],[-81.717597961,39.561912537],[-81.723197937,39.54901123],[-81.721794128,39.532012939],[-81.705093384,39.530212402],[-81.708496094,39.480812073],[-81.749694824,39.482013702],[-81.760993958,39.487812042],[-81.783798218,39.489513397],[-81.783493042,39.492610931],[-81.821899414,39.494113922],[-81.823295593,39.48991394],[-81.825798035,39.449211121],[-81.844894409,39.4494133],[-81.853492737,39.317913055],[-81.83480072,39.317310333],[-81.835594177,39.303512573],[-81.816596985,39.301811218],[-81.81829834,39.272212982],[-81.721694946,39.269412994],[-81.725296021,39.216213226],[-81.70249939,39.22051239],[-81.691497803,39.227813721],[-81.696899414,39.247810364],[-81.696296689,39.25661087],[-81.682792664,39.271411896],[-81.672393799,39.275512695],[-81.643798828,39.27721405],[-81.621795654,39.273712158],[-81.608894348,39.276012421],[-81.585296631,39.26871109],[-81.569900513,39.268112183],[-81.56539917,39.276012421],[-81.565093994,39.29391098],[-81.560195923,39.317913055],[-81.557594299,39.338813782],[-81.533599854,39.358612061],[-81.513999939,39.366710663],[-81.488998413,39.384113312],[-81.473693848,39.399711609],[-81.456092834,39.409313202],[-81.435195923,39.408313751],[-81.420593262,39.400413513],[-81.406898499,39.388412476],[-81.395896912,39.355510712],[-81.383293152,39.343013763],[-81.370994568,39.341213226],[-81.350799561,39.344612122],[-81.320098877,39.361011505],[-81.296798706,39.374713898],[-81.28099823,39.379413605],[-81.269897461,39.386112213],[-81.241798401,39.390312195],[-81.223594666,39.386013031],[-81.211296082,39.394210815],[-81.211196899,39.402511597],[-81.205200195,39.410812378],[-81.18989563,39.424510956],[-81.185096741,39.431510925],[-81.163497925,39.4412117],[-81.133399963,39.445713043],[-81.121498108,39.457813263],[-81.100799561,39.486812592],[-81.071098328,39.515411377],[-81.038398743,39.540412903]]]},"properties":null,"id":"Washington"},
|
||||
{"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-82.604698181,38.247810364],[-82.604598999,38.255413055],[-82.609695435,38.264011383],[-82.628196716,38.26871109],[-82.63759613,38.266811371],[-82.695495605,38.262313843],[-82.721794128,38.248710632],[-82.724594116,38.243412018],[-82.73690033,38.237812042],[-82.754295349,38.237411499],[-82.763595581,38.235012054],[-82.786895752,38.244613647],[-82.79309845,38.244411469],[-82.794494629,38.240310669],[-82.810897827,38.238311768],[-82.823699951,38.229011536],[-82.813293457,38.21931076],[-82.824493408,38.207710266],[-82.838493347,38.202812195],[-82.853797913,38.19021225],[-82.882293701,38.195911407],[-82.897094727,38.183410645],[-82.924095154,38.174613953],[-82.915893555,38.172912598],[-82.916297913,38.163211823],[-82.907997131,38.156414032],[-82.90599823,38.132713318],[-82.899398804,38.129112244],[-82.89679718,38.12071228],[-82.888999939,38.112010956],[-82.904899597,38.103912354],[-82.910400391,38.096012115],[-82.919692993,38.096912384],[-82.92779541,38.086513519],[-82.941497803,38.076713562],[-82.942298889,38.063713074],[-82.95009613,38.059810638],[-82.952896118,38.052810669],[-82.961296082,38.049312592],[-82.969200134,38.051612854],[-82.982093811,38.048912048],[-82.984298706,38.043712616],[-83.002197266,38.032913208],[-83.002693176,38.023910522],[-83.021995544,38.006412506],[-83.019699097,37.999011993],[-83.024299622,37.993213654],[-83.02179718,37.981811523],[-83.014198303,37.979412079],[-83.007896423,37.968013763],[-82.999099731,37.96761322],[-82.992599487,37.961513519],[-82.978599548,37.974411011],[-82.976699829,37.984210968],[-82.963195801,37.994911194],[-82.948493958,38.000312805],[-82.939094543,37.998912811],[-82.874694824,37.977611542],[-82.831695557,37.967811584],[-82.73589325,37.916110992],[-82.676399231,37.892112732],[-82.613197327,37.879310608],[-82.608596802,37.887413025],[-82.59589386,37.891010284],[-82.59879303,37.898712158],[-82.594993591,37.90921402],[-82.601097107,37.912811279],[-82.601898193,37.920413971],[-82.594398499,37.922012329],[-82.590499878,37.930812836],[-82.581497192,37.930412292],[-82.581092834,37.95331192],[-82.569694519,37.956111908],[-82.565193176,37.952510834],[-82.554199219,37.959213257],[-82.541595459,37.957511902],[-82.534698486,37.962711334],[-82.497596741,37.945812225],[-82.48639679,37.945613861],[-82.471992493,37.959911346],[-82.482498169,37.962913513],[-82.481300354,37.972812653],[-82.469398499,37.973110199],[-82.465194702,37.984210968],[-82.472198486,37.986713409],[-82.483596802,37.984512329],[-82.486694336,37.997112274],[-82.508598328,38.001411438],[-82.523094177,38.013912201],[-82.525894165,38.026111603],[-82.534599304,38.032112122],[-82.537399292,38.044811249],[-82.544799805,38.05431366],[-82.550895691,38.070213318],[-82.559593201,38.072811127],[-82.566093445,38.080711365],[-82.573493958,38.081912994],[-82.585296631,38.093513489],[-82.585800171,38.106910706],[-82.6065979,38.120811462],[-82.619895935,38.12141037],[-82.621696472,38.1328125],[-82.635894775,38.137710571],[-82.638893127,38.156211853],[-82.644393921,38.16601181],[-82.638397217,38.171211243],[-82.619995117,38.169113159],[-82.612297058,38.170913696],[-82.599197388,38.19751358],[-82.599197388,38.217910767],[-82.608695984,38.22341156],[-82.61239624,38.235111237],[-82.604698181,38.247810364]]]},"properties":null,"id":"Lawrence"}
|
||||
]}
|
||||
71
covid.py
Normal file
71
covid.py
Normal file
@@ -0,0 +1,71 @@
|
||||
import time
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from psycopg2.extras import Json
|
||||
import re
|
||||
import pandas as pd
|
||||
import requests
|
||||
import xmltodict
|
||||
import datetime
|
||||
from tabulate import tabulate
|
||||
|
||||
allobs = []
|
||||
|
||||
states = ['wv', 'oh', 'va', 'ky']
|
||||
ohcounties = ['-LW-', '-GL-', '-JC-', '-MS-', '-AT-', '-PY-', '-WS-', '-MG-', '-VN-']
|
||||
vacounties = ['-DC-', '-BC-']
|
||||
kycounties = ['-LR-', '-CT-', '-GP-', '-BD-']
|
||||
datewanted = datetime.date.today().strftime("%m/%d/%Y")
|
||||
|
||||
try:
|
||||
for state in states:
|
||||
url = f'https://data.cocorahs.org/export/exportreports.aspx?state={state}&Format=XML&Date={datewanted}&responsefields=all'
|
||||
response = requests.get(url)
|
||||
response.raise_for_status() # Check for HTTP errors
|
||||
data = xmltodict.parse(response.content.decode('utf-8')) # Explicitly decode as UTF-8
|
||||
|
||||
try:
|
||||
reports = data['Cocorahs']['DailyPrecipReports']['DailyPrecipReport']
|
||||
# Handle case where reports might be a single dict or a list
|
||||
if isinstance(reports, dict):
|
||||
reports = [reports]
|
||||
|
||||
for report in reports:
|
||||
if state == 'wv':
|
||||
allobs.append(report)
|
||||
else:
|
||||
for county in eval(state + 'counties'):
|
||||
if county in report['StationNumber']:
|
||||
allobs.append(report)
|
||||
except (KeyError, TypeError) as e:
|
||||
print(f"Error processing data for state {state}: {e}")
|
||||
continue
|
||||
|
||||
# Process observations
|
||||
finalobs = []
|
||||
for obs in allobs:
|
||||
tempob = [
|
||||
obs.get('DateTimeStamp', ''),
|
||||
obs.get('StationNumber', ''),
|
||||
obs.get('StationName', ''),
|
||||
obs.get('TotalPrecipAmt', ''),
|
||||
obs.get('NewSnowDepth', ''),
|
||||
obs.get('TotalSnowDepth', ''),
|
||||
obs.get('Notes', '')
|
||||
]
|
||||
finalobs.append(tempob)
|
||||
|
||||
# Write to file with UTF-8 encoding
|
||||
with open('/var/www/html/work/today.txt', 'w', encoding='utf-8') as f:
|
||||
f.write(tabulate(
|
||||
finalobs,
|
||||
headers=["Date/Time of Ob (Z)", "Station Number", "Station Name",
|
||||
"New Precip", "New Snow", "Snow Depth", "Comments"],
|
||||
tablefmt='plain' # Changed to 'plain' for simpler text output
|
||||
))
|
||||
|
||||
except requests.RequestException as e:
|
||||
print(f"Error fetching data: {e}")
|
||||
except Exception as e:
|
||||
print(f"Unexpected error: {e}")
|
||||
144
db.html
Normal file
144
db.html
Normal file
@@ -0,0 +1,144 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Wunderground obs</title>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
|
||||
|
||||
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.5.1/jquery.js" integrity="sha512-WNLxfP/8cVYL9sj8Jnp6et0BkubLP31jhTG9vhL/F5uEZmg5wEzKoXp1kJslzPQWwPT1eyMiSxlKCgzHLOTOTQ==" crossorigin="anonymous"></script>
|
||||
<link href="https://unpkg.com/tabulator-tables@4.7.2/dist/css/tabulator.min.css" rel="stylesheet">
|
||||
<script type="text/javascript" src="https://cdn.jsdelivr.net/npm/luxon@2.3.1/build/global/luxon.min.js"></script>
|
||||
<script type="text/javascript" src="https://unpkg.com/tabulator-tables@4.7.2/dist/js/tabulator.min.js"></script>
|
||||
<button onclick="reloadData()">Data autorefreshes every 5 minutes, click to refresh now</button>
|
||||
<a>QPE 00L is direct from WU, the other QPE are derived and may be off if time settings on the individual PWS are incorrect</a>
|
||||
<input type="checkbox" id="cwa" name="cwa" value="RLX" onchange="filters()" checked>
|
||||
<label for="cwa">RLX only</label><br>
|
||||
<div id="wunderobs"></div>
|
||||
|
||||
<script>
|
||||
var dataurl = 'db.php'
|
||||
|
||||
|
||||
function googleMap(cell, formatterParams){
|
||||
return "http://maps.google.com/maps?t=k&q=loc:" + cell.getData().lat + "+" + cell.getData().lon + "&basemap=satellite";
|
||||
}
|
||||
|
||||
function calculateApparentTemperature(temperature, dewPoint, windSpeed) {
|
||||
// Heat Index calculation for temperatures above 80°F (26.7°C)
|
||||
function heatIndex(temp, dp) {
|
||||
if (temp < 80) return temp;
|
||||
|
||||
let hi = 0.5 * (temp + 61.0 + ((temp - 68.0) * 1.2) + (dp * 0.094));
|
||||
|
||||
if (hi > 79) {
|
||||
hi = -42.379 +
|
||||
2.04901523 * temp +
|
||||
10.14333127 * dp -
|
||||
0.22475541 * temp * dp -
|
||||
6.83783 * Math.pow(10, -3) * Math.pow(temp, 2) -
|
||||
5.481717 * Math.pow(10, -2) * Math.pow(dp, 2) +
|
||||
1.22874 * Math.pow(10, -3) * Math.pow(temp, 2) * dp +
|
||||
8.5282 * Math.pow(10, -4) * temp * Math.pow(dp, 2) -
|
||||
1.99 * Math.pow(10, -6) * Math.pow(temp, 2) * Math.pow(dp, 2);
|
||||
|
||||
// Adjustments for high temperatures
|
||||
if (dp < 13) {
|
||||
hi -= ((13 - dp) / 4) * Math.sqrt((17 - Math.abs(temp - 95)) / 17);
|
||||
}
|
||||
if (dp > 85) {
|
||||
hi += ((dp - 85) / 10) * ((87 - temp) / 5);
|
||||
}
|
||||
}
|
||||
|
||||
return hi;
|
||||
}
|
||||
|
||||
// Wind Chill calculation for temperatures below 50°F (10°C)
|
||||
function windChill(temp, wind) {
|
||||
if (temp > 50 || wind < 3) return temp;
|
||||
|
||||
let wc = 35.74 + 0.6215 * temp - 35.75 * Math.pow(wind, 0.16) + 0.4275 * temp * Math.pow(wind, 0.16);
|
||||
return wc;
|
||||
}
|
||||
|
||||
let apparentTemp;
|
||||
|
||||
if (temperature > 80) {
|
||||
// High temperature scenario, use heat index
|
||||
apparentTemp = heatIndex(temperature, dewPoint);
|
||||
} else if (temperature < 50 && windSpeed > 3) {
|
||||
// Low temperature scenario, use wind chill
|
||||
apparentTemp = windChill(temperature, windSpeed);
|
||||
} else {
|
||||
// Moderate temperature, just return the actual temperature
|
||||
apparentTemp = temperature;
|
||||
}
|
||||
|
||||
// Ensure the result is within reasonable bounds
|
||||
apparentTemp = Math.min(140, Math.max(-20, apparentTemp));
|
||||
|
||||
return apparentTemp.toFixed(1);
|
||||
}
|
||||
|
||||
function reloadData() {
|
||||
table.replaceData(dataurl);
|
||||
}
|
||||
|
||||
var table = new Tabulator("#wunderobs", {
|
||||
responsiveLayout:true,
|
||||
tooltipsHeader:true,
|
||||
columns:[
|
||||
{title:"Station", field:"stationid", formatter:"link", formatterParams:{urlPrefix:"https://www.wunderground.com/dashboard/pws/", target:"_blank"}},
|
||||
{title:"Time (UTC)", field:"lastob"},
|
||||
{title:"T", field:"tempf"},
|
||||
{title:"ApT",formatter:function(cell) {
|
||||
apparentT = calculateApparentTemperature(cell.getData().tempf,cell.getData().dewpt,cell.getData().windspd);
|
||||
console.log(apparentT)
|
||||
return apparentT
|
||||
|
||||
}},
|
||||
{title:"Td", field:"dewpt"},
|
||||
{title:"QPE 00L", field:"preciptotal",formatter:"money",headerTooltip:"Since Midnight"},
|
||||
{title:"24hr QPE", field:"rain24",formatter:"money"},
|
||||
{title:"6hr QPE", field:"rain6",formatter:"money"},
|
||||
{title:"3hr QPE", field:"rain3",formatter:"money"},
|
||||
{title:"Winddir", field:"winddir"},
|
||||
{title:"Speed", field:"windspd",headerTooltip:"Mph"},
|
||||
{title:"Gust", field:"windgust",headerTooltip:"Mph"},
|
||||
{title:"PK 24 Gust", field:"windmax",headerTooltip:"Mph"},
|
||||
{title:"MaxT", field:"maxt",headerTooltip:"Last 24hrs"},
|
||||
{title:"MinT", field:"mint",headerTooltip:"Last 24hrs"},
|
||||
{title:"Elev", field:"elev",responsive:2},
|
||||
{title:"Lon", field:"lon",visible:false},
|
||||
{title:"City", field:"adm1"},
|
||||
{title:"State", field:"adm2"},
|
||||
{title:"County", field:"county"},
|
||||
{title:"Baro", field:"pressure",responsive:2},
|
||||
{title:"Location", field:"neighborhood", formatter:"link", formatterParams:{url: googleMap, target:"_blank"}}
|
||||
],
|
||||
});
|
||||
|
||||
table.setData(dataurl);
|
||||
|
||||
function filters() {
|
||||
var y = document.getElementById("cwa").checked;
|
||||
if (y) {
|
||||
dataurl = 'db.php'
|
||||
table.replaceData(dataurl)
|
||||
table.addFilter("cwa", "=", 'RLX');
|
||||
|
||||
}
|
||||
if (!y) {
|
||||
dataurl = 'db.php?outside=yes'
|
||||
table.replaceData(dataurl)
|
||||
table.removeFilter("cwa", "=", 'RLX');
|
||||
}
|
||||
}
|
||||
// {title:"24hr QPE", field:"rain24", formatterParms:{precision:2}},
|
||||
var timeout = setInterval(reloadData, 300000);
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
179
db.php
Normal file
179
db.php
Normal file
@@ -0,0 +1,179 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
|
||||
//no gets, curent point outage info
|
||||
|
||||
|
||||
if (empty($_GET)) {
|
||||
try {
|
||||
$query = "
|
||||
SELECT
|
||||
stationid,
|
||||
lat,
|
||||
lon,
|
||||
tempf,
|
||||
dewpt,
|
||||
preciptotal,
|
||||
winddir,
|
||||
windspd,
|
||||
windgust,
|
||||
elev,
|
||||
adm1,
|
||||
adm2,
|
||||
neighborhood,
|
||||
maxt,
|
||||
mint,
|
||||
pressure,
|
||||
lastob,
|
||||
county,
|
||||
rain24,
|
||||
rain3,
|
||||
rain6,
|
||||
windmax,
|
||||
cwa
|
||||
FROM (
|
||||
SELECT DISTINCT ON (stationid) *
|
||||
FROM wusites
|
||||
WHERE active = TRUE
|
||||
AND cwa = 'RLX'
|
||||
AND lastob BETWEEN timezone('utc', NOW()) - INTERVAL '0.5 hours'
|
||||
AND timezone('utc', NOW())
|
||||
) p
|
||||
ORDER BY lastob DESC
|
||||
";
|
||||
|
||||
$result = pg_query($dbconn, $query);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (isset($_GET['outsideold'])) {
|
||||
$query = "SELECT stationid, lat, lon, tempf, dewpt,preciptotal,winddir,windspd,windgust,elev,adm1,adm2,neighborhood,maxt,mint,pressure,lastob,county,rain24,rain3,rain6,windmax,cwa FROM (SELECT DISTINCT ON (stationid) * FROM wusites WHERE (active = TRUE) and lastob BETWEEN timezone('utc', now()) - INTERVAL '.5 HOURS'AND timezone('utc', now())) p ORDER BY lastob desc;";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Printing results in HTML
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Closing connection
|
||||
|
||||
}
|
||||
|
||||
if (isset($_GET['outside'])) {
|
||||
try {
|
||||
$query = "
|
||||
SELECT
|
||||
stationid,
|
||||
lat,
|
||||
lon,
|
||||
tempf,
|
||||
dewpt,
|
||||
preciptotal,
|
||||
winddir,
|
||||
windspd,
|
||||
windgust,
|
||||
elev,
|
||||
adm1,
|
||||
adm2,
|
||||
neighborhood,
|
||||
maxt,
|
||||
mint,
|
||||
pressure,
|
||||
lastob,
|
||||
county,
|
||||
rain24,
|
||||
rain3,
|
||||
rain6,
|
||||
windmax,
|
||||
cwa
|
||||
FROM (
|
||||
SELECT DISTINCT ON (stationid) *
|
||||
FROM wusites
|
||||
WHERE active = TRUE
|
||||
AND lastob BETWEEN timezone('utc', NOW()) - INTERVAL '0.5 hours'
|
||||
AND timezone('utc', NOW())
|
||||
) p
|
||||
ORDER BY lastob DESC
|
||||
";
|
||||
|
||||
$result = pg_query($dbconn, $query);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
pg_close($dbconn);
|
||||
|
||||
/*
|
||||
|
||||
// Performing SQL query
|
||||
$query = "SELECT stationid, lat, lon, tempf, dewpt,preciptotal,winddir,windspd,windgust,elev,adm1,adm2,neighborhood,maxt,mint,pressure,lastob,county,rain24,rain3,rain6,windmax,cwa FROM (SELECT DISTINCT ON (stationid) * FROM wusites WHERE (active = TRUE) and lastob BETWEEN timezone('utc', now()) - INTERVAL '.5 HOURS'AND timezone('utc', now())) p ORDER BY lastob desc;";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Printing results in HTML
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Closing connection
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
|
||||
*/
|
||||
|
||||
16
fire.php
Normal file
16
fire.php
Normal file
@@ -0,0 +1,16 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
//no gets, curent point outage info
|
||||
if(empty($_GET)) {
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(geom)::json,'properties',json_build_object('incname',incname,'discovery',discovery,'modified',modified,'age',age,'dailyacres',dailyacres,'type',type,'contained',contained,'personnel',personnel))order by modified asc)) FROM fire where type = $1 and contained <> 100 and modified > now() - interval '36 hours'",
|
||||
array('WF')) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
|
||||
|
||||
126
fire.py
Normal file
126
fire.py
Normal file
@@ -0,0 +1,126 @@
|
||||
import requests
|
||||
import json
|
||||
import psycopg2
|
||||
from datetime import datetime
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||
|
||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||
|
||||
# Database connection details (keep these secure and configurable)
|
||||
DB_HOST = 'localhost'
|
||||
DB_DATABASE = 'nws'
|
||||
DB_USER = 'nws'
|
||||
DB_PASSWORD = 'nws'
|
||||
|
||||
# API URL
|
||||
LIVING_ATLAS_URL = "https://services9.arcgis.com/RHVPKKiFTONKtxq3/ArcGIS/rest/services/USA_Wildfires_v1/FeatureServer/0/query?where=1%3D1&geometry=%7B%22spatialReference%22:%7B%22latestWkid%22:3857,%22wkid%22:102100%7D,%22xmin%22:-9222738.841522107,%22ymin%22:4457648.21239309,%22xmax%22:-9009938.154776277,%22ymax%22:4723649.070825376%7D&geometryType=esriGeometryEnvelope&spatialRelationship=intersects&inSR=3857&returnGeometry=true&returnQueryGeometry=true&outFields=IrwinID,IncidentName,POOState,ModifiedOnDateTime,FireDiscoveryDateTime,FireDiscoveryAge,IncidentTypeCategory,CalculatedAcres,DailyAcres,DiscoveryAcres,PercentContained,TotalIncidentPersonnel&f=json"
|
||||
|
||||
|
||||
S = requests.Session()
|
||||
S.verify = False # Be cautious about disabling SSL verification in production
|
||||
|
||||
|
||||
def livingatlas(url):
|
||||
try:
|
||||
result = S.get(url, timeout=10) # Added timeout to prevent indefinite hanging
|
||||
result.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
|
||||
data = result.json() # Use .json() directly, it handles json.loads() and error handling
|
||||
|
||||
conn = None # Initialize conn outside the loop for broader scope
|
||||
cursor = None
|
||||
try:
|
||||
conn = psycopg2.connect(host=DB_HOST, database=DB_DATABASE, user=DB_USER, password=DB_PASSWORD)
|
||||
cursor = conn.cursor()
|
||||
|
||||
for feature in data.get('features', []): # Safely access features
|
||||
attributes = feature.get('attributes', {})
|
||||
geometry = feature.get('geometry', {})
|
||||
|
||||
incid = attributes.get('IrwinID')
|
||||
incname = attributes.get('IncidentName')
|
||||
state = attributes.get('POOState')
|
||||
modified_timestamp = attributes.get('ModifiedOnDateTime')
|
||||
discoverytime_timestamp = attributes.get('FireDiscoveryDateTime')
|
||||
discoveryage = attributes.get('FireDiscoveryAge')
|
||||
inctype = attributes.get('IncidentTypeCategory')
|
||||
calcacres = attributes.get('CalculatedAcres')
|
||||
dailyacres = attributes.get('DailyAcres')
|
||||
discoveryacres = attributes.get('DiscoveryAcres')
|
||||
contained = attributes.get('PercentContained')
|
||||
personnel = attributes.get('TotalIncidentPersonnel')
|
||||
lat = geometry.get('y')
|
||||
lon = geometry.get('x')
|
||||
|
||||
discoverytime = datetime.fromtimestamp(discoverytime_timestamp/1000) if discoverytime_timestamp else None
|
||||
modified = datetime.fromtimestamp(modified_timestamp/1000) if modified_timestamp else None
|
||||
|
||||
print(incid, incname, state, modified, discoverytime, discoveryage, inctype, calcacres, dailyacres, discoveryacres, contained, personnel, lat, lon)
|
||||
|
||||
|
||||
sql_insert = """
|
||||
INSERT INTO fire (
|
||||
incid, incname, state, modified, discovery, age, type,
|
||||
calcacres, dailyacres, discoveryacres, contained, personnel, lat, lon
|
||||
) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
||||
ON CONFLICT (incid) DO UPDATE SET
|
||||
incname = %s,
|
||||
state = %s,
|
||||
modified = %s,
|
||||
discovery = %s,
|
||||
age = %s,
|
||||
type = %s,
|
||||
calcacres = %s,
|
||||
dailyacres = %s,
|
||||
discoveryacres = %s,
|
||||
contained = %s,
|
||||
personnel = %s,
|
||||
lat = %s,
|
||||
lon = %s
|
||||
"""
|
||||
vals = (
|
||||
incid, incname, state, modified, discoverytime, discoveryage, inctype,
|
||||
calcacres, dailyacres, discoveryacres, contained, personnel, lat, lon,
|
||||
incname, state, modified, discoverytime, discoveryage, inctype,
|
||||
calcacres, dailyacres, discoveryacres, contained, personnel, lat, lon
|
||||
)
|
||||
|
||||
cursor.execute(sql_insert, vals)
|
||||
conn.commit() # Commit after each successful insert/update
|
||||
|
||||
except psycopg2.Error as db_error:
|
||||
if conn:
|
||||
conn.rollback() # Rollback transaction on error
|
||||
print(f"Database error: {db_error}")
|
||||
finally:
|
||||
if cursor:
|
||||
cursor.close()
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
except requests.exceptions.RequestException as req_error: # Catch broader request exceptions
|
||||
print(f"API Request error: {req_error}")
|
||||
except json.JSONDecodeError as json_error:
|
||||
print(f"JSON Decode error: {json_error}. Response text was: {result.text if 'result' in locals() else 'No response received'}") # More informative JSON error
|
||||
|
||||
|
||||
livingatlas(LIVING_ATLAS_URL)
|
||||
|
||||
|
||||
conn = None # Re-initialize conn for the geometry update outside the livingatlas function
|
||||
cursor = None
|
||||
try:
|
||||
conn = psycopg2.connect(host=DB_HOST, database=DB_DATABASE, user=DB_USER, password=DB_PASSWORD)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("UPDATE public.fire SET geom = ST_SetSRID(ST_MakePoint(lon, lat), 4326) WHERE (lat IS NOT NULL AND lon IS NOT NULL AND geom IS NULL)")
|
||||
conn.commit()
|
||||
except psycopg2.Error as db_error:
|
||||
if conn:
|
||||
conn.rollback()
|
||||
print(f"Database error during geometry update: {db_error}")
|
||||
finally:
|
||||
if cursor:
|
||||
cursor.close()
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
print("Script execution completed.")
|
||||
104
followup.py
Normal file
104
followup.py
Normal file
@@ -0,0 +1,104 @@
|
||||
import json
|
||||
import psycopg2
|
||||
from datetime import datetime, timezone
|
||||
import re
|
||||
from typing import List, Tuple
|
||||
|
||||
# Use context manager for connection
|
||||
def get_db_connection():
|
||||
return psycopg2.connect(
|
||||
host='localhost',
|
||||
database='nws',
|
||||
user='nws',
|
||||
password='nws'
|
||||
)
|
||||
|
||||
def decode_vtec(pvtec_string: str) -> List[str]:
|
||||
pattern = r"/([A-Z]+)\.([A-Z]+)\.([A-Z]+)\.([A-Z]+)\.([A-Z])\.([0-9]+)\.([0-9A-Z]+)T([0-9A-Z]+)-([0-9A-Z]+)T([0-9A-Z]+)/"
|
||||
match = re.match(pattern, pvtec_string)
|
||||
return (
|
||||
[match.group(1), match.group(2), match.group(4), match.group(5), match.group(6)]
|
||||
if match
|
||||
else (print("P-VTEC string format not recognized.") or [])
|
||||
)
|
||||
|
||||
def run_warning_checks():
|
||||
with get_db_connection() as conn:
|
||||
with conn.cursor() as cursor:
|
||||
current_time = datetime.utcnow()
|
||||
|
||||
# Batch process EXP/CAN warnings
|
||||
cursor.execute("""
|
||||
SELECT issue, endtime, etin, pil, canexp, warnexpired, svstype, ugc,
|
||||
vtectext, warntype, year, office, sig, index
|
||||
FROM warntracker
|
||||
WHERE svstype IN ('EXP', 'CAN')
|
||||
AND workedon IS NOT TRUE
|
||||
""")
|
||||
|
||||
expired_can_warnings = cursor.fetchall()
|
||||
for warning in expired_can_warnings:
|
||||
issue, endtime, etin, _, _, _, _, ugc, vtectext, warntype, year, office, _, index = warning
|
||||
|
||||
# Combine related updates into single query
|
||||
cursor.execute("""
|
||||
UPDATE warntracker
|
||||
SET canxugc = array(
|
||||
SELECT unnest(canxugc)
|
||||
UNION
|
||||
SELECT unnest(%s)
|
||||
),
|
||||
warnexpired = CASE WHEN %s < %s THEN TRUE ELSE warnexpired END,
|
||||
workedon = TRUE
|
||||
WHERE etin = %s
|
||||
AND svstype = 'NEW'
|
||||
AND year = %s
|
||||
AND warntype = %s
|
||||
AND office = %s
|
||||
""", (ugc, endtime, current_time, etin, year, warntype, office))
|
||||
|
||||
# Batch process CON warnings
|
||||
cursor.execute("""
|
||||
SELECT issue, endtime, etin, pil, canexp, warnexpired, svstype, ugc,
|
||||
vtectext, warntype, year, workedon, office, index
|
||||
FROM warntracker
|
||||
WHERE svstype = 'CON'
|
||||
AND current_timestamp > issue
|
||||
AND (EXTRACT(EPOCH FROM (current_timestamp - endtime))/60) < 120
|
||||
AND canexp IS NOT TRUE
|
||||
AND workedon IS NOT TRUE
|
||||
""")
|
||||
|
||||
continuing_warnings = cursor.fetchall()
|
||||
for warning in continuing_warnings:
|
||||
issue, endtime, etin, _, _, _, _, ugc, _, warntype, _, _, office, index = warning
|
||||
|
||||
# Combine updates into fewer queries
|
||||
cursor.execute("""
|
||||
UPDATE warntracker
|
||||
SET followup = TRUE,
|
||||
followups = COALESCE(followups, 0) + 1,
|
||||
workedon = TRUE
|
||||
WHERE etin = %s
|
||||
AND svstype = 'NEW'
|
||||
AND warntype = %s
|
||||
AND office = %s
|
||||
""", (etin, warntype, office))
|
||||
|
||||
cursor.execute("""
|
||||
UPDATE warntracker
|
||||
SET canexp = TRUE,
|
||||
warnexpired = CASE WHEN %s < %s THEN TRUE ELSE warnexpired END,
|
||||
workedon = TRUE
|
||||
WHERE canxugc @> %s
|
||||
AND etin = %s
|
||||
""", (endtime, current_time, ugc, etin))
|
||||
|
||||
# Final cleanup in one query
|
||||
cursor.execute("UPDATE warntracker SET canexp = TRUE WHERE canxugc @> ugc")
|
||||
|
||||
conn.commit()
|
||||
|
||||
if __name__ == "__main__":
|
||||
svrinfo = [] # Consider removing if not used
|
||||
run_warning_checks()
|
||||
47
gengeo.py
Normal file
47
gengeo.py
Normal file
@@ -0,0 +1,47 @@
|
||||
import psycopg2
|
||||
import json
|
||||
|
||||
# Connect to your PostgreSQL database
|
||||
conn = psycopg2.connect(
|
||||
host="localhost",
|
||||
database="nws",
|
||||
user="nws",
|
||||
password="nws"
|
||||
)
|
||||
|
||||
# Create a cursor object
|
||||
cur = conn.cursor()
|
||||
|
||||
# Execute the SQL query
|
||||
cur.execute("""
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(geom)::json,
|
||||
'properties', json_build_object(
|
||||
'county', countyname,
|
||||
'state', state,
|
||||
'lat', lat,
|
||||
'lon', lon
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
FROM county
|
||||
WHERE cwa = 'RLX';
|
||||
""")
|
||||
|
||||
# Fetch the result
|
||||
geojson_result = cur.fetchone()[0]
|
||||
|
||||
# Print the GeoJSON result
|
||||
outfile = open("rlxtest.json", "w")
|
||||
json.dump(geojson_result,outfile, indent=2)
|
||||
|
||||
# Close the cursor and connection
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
48
geo.py
Normal file
48
geo.py
Normal file
@@ -0,0 +1,48 @@
|
||||
#You can convert panoid to lat/lon using a free API call https://maps.googleapis.com/maps/api/streetview/metadata?pano=PANOID&key=YOURAPIKEY
|
||||
|
||||
|
||||
#https://maps.googleapis.com/maps/api/streetview/metadata?pano=onUr8119UohoEeRXfBNArQ&key=AIzaSyDNmQaLwMoVluAJ8PMIZZyMUfp3hlbsndw
|
||||
|
||||
import requests
|
||||
import os
|
||||
import json
|
||||
import webbrowser
|
||||
|
||||
|
||||
S = requests.Session()
|
||||
apikey = 'AIzaSyDNmQaLwMoVluAJ8PMIZZyMUfp3hlbsndw'
|
||||
|
||||
|
||||
|
||||
def geocheat(panoidurl):
|
||||
|
||||
query = requests.utils.urlparse(panoidurl).query
|
||||
params = dict(x.split('=') for x in query.split('&'))
|
||||
|
||||
if 'panoid' in params:
|
||||
panoid = params['panoid']
|
||||
|
||||
url = 'https://maps.googleapis.com/maps/api/streetview/metadata?pano='+panoid+'&key=AIzaSyDNmQaLwMoVluAJ8PMIZZyMUfp3hlbsndw'
|
||||
dataresponse = json.loads(S.get(url).text)
|
||||
#r = requests.get(url, timeout=3)
|
||||
#if r.status_code == 200:
|
||||
lat = dataresponse['location']['lat']
|
||||
lon = dataresponse['location']['lng']
|
||||
|
||||
|
||||
#print(lat,lon)
|
||||
# print(r.content)
|
||||
#print(dataresponse)
|
||||
mapurl = "https://maps.google.com/maps?q=loc:" + str(lat) + "+" + str(lon)
|
||||
|
||||
#os.system("start \"\" + mapurl)
|
||||
webbrowser.open(mapurl, new = 1)
|
||||
|
||||
poop = True
|
||||
while poop:
|
||||
|
||||
cheatme = input("Enter URL with panoid: ")
|
||||
geocheat(cheatme)
|
||||
|
||||
|
||||
|
||||
63
getonerain.py
Normal file
63
getonerain.py
Normal file
@@ -0,0 +1,63 @@
|
||||
import time
|
||||
import requests
|
||||
import json
|
||||
import geojson
|
||||
from shapely.geometry import Polygon, LineString, Point
|
||||
from shapely import wkt
|
||||
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from psycopg2.extras import Json
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
#Bounding box of CWA
|
||||
|
||||
|
||||
|
||||
url = 'https://wvdhsem.onerain.com/export/map/geojson/?method=sites&view=fb358463-0d34-42b9-b86e-32aee53d3fd2'
|
||||
|
||||
potato = ''
|
||||
features = []
|
||||
alldata = []
|
||||
S = requests.Session()
|
||||
|
||||
|
||||
|
||||
|
||||
response = json.loads(requests.get(url).text)
|
||||
alltings = []
|
||||
for p in response['features']:
|
||||
if p['properties']['system'] != 'USGS':
|
||||
if p['properties']['status'] == 1:
|
||||
sitetype = 'unknown'
|
||||
siteid = p['properties']['site_id']
|
||||
sitename = p['properties']['name']
|
||||
lon = p['geometry']['coordinates'][0]
|
||||
lat = p['geometry']['coordinates'][1]
|
||||
if "MET" in sitename:
|
||||
sitetype = 'MET'
|
||||
if "Rain" in sitename:
|
||||
sitetype = 'Rain'
|
||||
|
||||
tings = [siteid, sitename, lat, lon, sitetype]
|
||||
sql = 'INSERT INTO onerainsites (siteid, sitename, lat, lon, sitetype) VALUES (%s,%s,%s,%s,%s) ON CONFLICT DO NOTHING'
|
||||
cursor.execute(sql,tings)
|
||||
conn.commit()
|
||||
print(alltings)
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
|
||||
def del_none(d):
|
||||
for key, value in list(d.items()):
|
||||
if value is None:
|
||||
del d[key]
|
||||
elif isinstance(value, dict):
|
||||
del_none(value)
|
||||
return d # For convenience
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
1
gif.js
Submodule
1
gif.js
Submodule
Submodule gif.js added at b580b10cd8
69
gif2.php
Normal file
69
gif2.php
Normal file
@@ -0,0 +1,69 @@
|
||||
<?php
|
||||
include "AnimGif.php";
|
||||
error_reporting(E_ALL);
|
||||
ini_set('display_errors', 'on');
|
||||
// Create an array containing file paths, resource var (initialized with imagecreatefromXXX),
|
||||
// image URLs or even binary code from image files.
|
||||
// All sorted in order to appear.
|
||||
|
||||
//$array=json_decode($_POST['camarray']);
|
||||
|
||||
// Use an array containing file paths, resource vars (initialized with imagecreatefromXXX),
|
||||
// image URLs or binary image data.
|
||||
|
||||
//$elements = $_POST['data'];
|
||||
$elements = $_POST['data'];
|
||||
$numimages = $_POST['images'];
|
||||
$delay = $_POST['delay'];
|
||||
$lastdelay = $_POST['lastdelay'];
|
||||
|
||||
$numimages = $numimages - 2;
|
||||
//$elements = json_decode($elements);
|
||||
|
||||
// Or: load images from a dir (sorted, skipping .files):
|
||||
//$elements = "./camdata/177/";
|
||||
|
||||
//$elements = array(
|
||||
// "http://wx.stoat.org/camdata/177/8d55c896b3f5acbafcccc9293e2a82d5.png",
|
||||
// "http://wx.stoat.org/camdata/177/e2cebde9c999352225b6653696c02a98.png" // image file path
|
||||
//);
|
||||
|
||||
|
||||
// Optionally: set different durations (in 1/100s units) for each frame
|
||||
|
||||
//$durations = array(20);
|
||||
$durations = array_fill(0, $numimages, $delay);
|
||||
array_push($durations, $lastdelay);
|
||||
|
||||
|
||||
// Or: you can leave off repeated values from the end:
|
||||
//$durations = array(20, 30, 10); // use 10 for the rest
|
||||
// Or: use 'null' anywhere to re-apply the previous delay:
|
||||
//$durations = array(250, null, null, 500);
|
||||
$anim = new GifCreator\AnimGif();
|
||||
$anim->create($elements, $durations);
|
||||
|
||||
// Or: using the default 100ms even delay:
|
||||
//$anim->create($elements);
|
||||
|
||||
// Or: loop 5 times, then stop:
|
||||
//$anim->create($frames, $durations, 5); // default: infinite looping
|
||||
|
||||
$gif = $anim->get();
|
||||
|
||||
//header("Content-Type: text/plain");
|
||||
//$gif = file_get_contents('test.png');
|
||||
//echo var_dump($_POST['elements']);
|
||||
//echo 'hi';
|
||||
echo base64_encode($gif);
|
||||
//echo var_dump($elements);
|
||||
//echo 'test';
|
||||
|
||||
exit;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
?>
|
||||
418
gokycams.py
Normal file
418
gokycams.py
Normal file
@@ -0,0 +1,418 @@
|
||||
|
||||
import psycopg2
|
||||
import requests
|
||||
import re
|
||||
import logging
|
||||
import time # Import the time module
|
||||
import ast
|
||||
import json
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
||||
|
||||
def get_initial_session_data(session, first_request_zx="fgd9osntni3x"):
|
||||
"""
|
||||
Makes the first POST request to get the SID and the dynamic gsessionid.
|
||||
"""
|
||||
url = f"https://firestore.googleapis.com/google.firestore.v1.Firestore/Listen/channel?database=projects%2Fkytc-goky%2Fdatabases%2F(default)&VER=8&RID=65930&CVER=22&X-HTTP-Session-Id=gsessionid&%24httpHeaders=X-Goog-Api-Client%3Agl-js%2F%20fire%2F9.6.10%0D%0AContent-Type%3Atext%2Fplain%0D%0AX-Firebase-GMPID%3A1%3A911478978941%3Aweb%3Ab965a6c158ee5c4d17b414%0D%0A&zx={first_request_zx}&t=1"
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:139.0) Gecko/20100101 Firefox/139.0",
|
||||
"Accept": "*/*", "Accept-Language": "en-US,en;q=0.5", "content-type": "application/x-www-form-urlencoded",
|
||||
"Sec-Fetch-Dest": "empty", "Sec-Fetch-Mode": "cors", "Sec-Fetch-Site": "cross-site", "Priority": "u=4",
|
||||
"Referer": "https://goky.ky.gov/"
|
||||
}
|
||||
body = "count=2&ofs=0&req0___data__=%7B%22database%22%3A%22projects%2Fkytc-goky%2Fdatabases%2F(default)%22%2C%22addTarget%22%3A%7B%22query%22%3A%7B%22structuredQuery%22%3A%7B%22from%22%3A%5B%7B%22collectionId%22%3A%22realtime%22%7D%5D%2C%22orderBy%22%3A%5B%7B%22field%22%3A%7B%22fieldPath%22%3A%22__name__%22%7D%2C%22direction%22%3A%22ASCENDING%22%7D%5D%7D%2C%22parent%22%3A%22projects%2Fkytc-goky%2Fdatabases%2F(default)%2Fdocuments%22%7D%2C%22targetId%22%3A2%7D%7D&req1___data__=%7B%22database%22%3A%22projects%2Fkytc-goky%2Fdatabases%2F(default)%22%2C%22addTarget%22%3A%7B%22query%22%3A%7B%22structuredQuery%22%3A%7B%22from%22%3A%5B%7B%22collectionId%22%3A%22tweets%22%7D%5D%2C%22orderBy%22%3A%5B%7B%22field%22%3A%7B%22fieldPath%22%3A%22__name__%22%7D%2C%22direction%22%3A%22ASCENDING%22%7D%5D%7D%2C%22parent%22%3A%22projects%2Fkytc-goky%2Fdatabases%2F(default)%2Fdocuments%22%7D%2C%22targetId%22%3A4%7D%7D"
|
||||
|
||||
#logging.info(f"Attempting to get initial session data from: {url}")
|
||||
actual_gsessionid = None
|
||||
sid = None
|
||||
|
||||
try:
|
||||
response = session.post(url, headers=headers, data=body, timeout=15) # Timeout for connect and initial response
|
||||
response.raise_for_status()
|
||||
|
||||
match = re.search(r'\[0,\s*\["c",\s*"([^"]+)"', response.text)
|
||||
if match:
|
||||
sid = match.group(1)
|
||||
#logging.info(f"Successfully obtained SID: {sid}")
|
||||
else:
|
||||
#logging.error(f"Could not parse SID from response body: {response.text[:500]}")
|
||||
return None
|
||||
|
||||
for header_name_original_case in response.headers:
|
||||
if header_name_original_case.lower() == 'x-http-session-id':
|
||||
actual_gsessionid = response.headers[header_name_original_case]
|
||||
logging.info(f"Found '{header_name_original_case}' in response headers: {actual_gsessionid}")
|
||||
break
|
||||
|
||||
if not actual_gsessionid:
|
||||
logging.warning("Dynamic gsessionid (X-HTTP-Session-Id) not found in first response headers.")
|
||||
|
||||
return {"sid": sid, "dynamic_gsessionid": actual_gsessionid}
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
logging.error(f"Error during initial session data request: {e}")
|
||||
return None
|
||||
|
||||
def get_raw_roadway_datastream(session, sid, gsessionid_to_use, zx_for_data_req, read_duration_limit=2.0):
|
||||
"""
|
||||
Makes the second GET request in streaming mode and reads data for a limited duration.
|
||||
Returns the raw text content received within that duration.
|
||||
"""
|
||||
url = f"https://firestore.googleapis.com/google.firestore.v1.Firestore/Listen/channel?database=projects%2Fkytc-goky%2Fdatabases%2F(default)&gsessionid={gsessionid_to_use}&VER=8&RID=rpc&SID={sid}&CI=0&AID=0&TYPE=xmlhttp&zx={zx_for_data_req}&t=1"
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:139.0) Gecko/20100101 Firefox/139.0",
|
||||
"Accept": "*/*", "Accept-Language": "en-US,en;q=0.5", "Sec-Fetch-Dest": "empty",
|
||||
"Sec-Fetch-Mode": "cors", "Sec-Fetch-Site": "cross-site", "Priority": "u=4",
|
||||
"Referer": "https://goky.ky.gov/"
|
||||
}
|
||||
|
||||
#logging.info(f"Attempting to get roadway data stream with SID: {sid}, gsessionid: {gsessionid_to_use}, zx: {zx_for_data_req}")
|
||||
#logging.info(f"Will attempt to read stream for a maximum of {read_duration_limit} seconds.")
|
||||
|
||||
received_data_bytes = []
|
||||
raw_text_data = None
|
||||
|
||||
try:
|
||||
# Initial timeout for connection and first byte, stream=True to control body download
|
||||
with session.get(url, headers=headers, stream=True, timeout=(10, 30)) as response: # (connect_timeout, read_timeout_for_first_byte)
|
||||
response.raise_for_status()
|
||||
|
||||
start_time = time.time()
|
||||
for chunk in response.iter_content(chunk_size=8192): # Read in 8KB chunks
|
||||
if chunk: # filter out keep-alive new chunks
|
||||
received_data_bytes.append(chunk)
|
||||
|
||||
if time.time() - start_time > read_duration_limit:
|
||||
#logging.info(f"Read duration limit of {read_duration_limit} seconds reached. Stopping stream read.")
|
||||
break
|
||||
|
||||
# After loop (timeout or stream ended), decode collected bytes
|
||||
# The server might send UTF-8, response.encoding might guess, or we assume utf-8
|
||||
encoding = response.encoding if response.encoding else 'utf-8'
|
||||
raw_text_data = b"".join(received_data_bytes).decode(encoding, errors='replace')
|
||||
#logging.info(f"Successfully fetched and processed stream. Total data length: {len(raw_text_data)} characters.")
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
logging.error(f"Error during roadway data stream request: {e}")
|
||||
# If there was an error but some data was received before it, try to decode that.
|
||||
if received_data_bytes:
|
||||
try:
|
||||
raw_text_data = b"".join(received_data_bytes).decode('utf-8', errors='replace') # Fallback to utf-8
|
||||
logging.warning(f"Request error occurred, but returning partially received data: {len(raw_text_data)} characters.")
|
||||
except Exception as decode_err:
|
||||
logging.error(f"Could not decode partially received data after request error: {decode_err}")
|
||||
raw_text_data = None # Ensure it's None if decoding fails
|
||||
else:
|
||||
raw_text_data = None
|
||||
except Exception as e_generic: # Catch any other unexpected errors
|
||||
logging.error(f"An unexpected error occurred during stream processing: {e_generic}")
|
||||
if received_data_bytes and not raw_text_data: # If an error happened after collecting some data but before decoding
|
||||
try:
|
||||
raw_text_data = b"".join(received_data_bytes).decode('utf-8', errors='replace')
|
||||
logging.warning(f"Unexpected error, but returning partially received data: {len(raw_text_data)} characters.")
|
||||
except Exception as decode_err:
|
||||
logging.error(f"Could not decode partially received data after unexpected error: {decode_err}")
|
||||
raw_text_data = None
|
||||
else:
|
||||
raw_text_data = None
|
||||
|
||||
return raw_text_data
|
||||
|
||||
|
||||
# --- Helper functions for Firestore value types ---
|
||||
def _parse_firestore_value(value_dict):
|
||||
if not isinstance(value_dict, dict):
|
||||
return value_dict
|
||||
if "stringValue" in value_dict:
|
||||
return value_dict["stringValue"]
|
||||
elif "integerValue" in value_dict:
|
||||
try:
|
||||
return int(value_dict["integerValue"])
|
||||
except ValueError:
|
||||
return value_dict["integerValue"]
|
||||
elif "doubleValue" in value_dict:
|
||||
try:
|
||||
return float(value_dict["doubleValue"])
|
||||
except ValueError:
|
||||
return value_dict["doubleValue"]
|
||||
elif "booleanValue" in value_dict:
|
||||
return value_dict["booleanValue"]
|
||||
elif "timestampValue" in value_dict:
|
||||
return value_dict["timestampValue"]
|
||||
elif "geoPointValue" in value_dict:
|
||||
return value_dict["geoPointValue"]
|
||||
elif "mapValue" in value_dict:
|
||||
return _parse_firestore_fields(value_dict["mapValue"].get("fields", {}))
|
||||
elif "arrayValue" in value_dict:
|
||||
values = value_dict["arrayValue"].get("values", [])
|
||||
return [_parse_firestore_value(v) for v in values]
|
||||
elif "nullValue" in value_dict:
|
||||
return None
|
||||
else:
|
||||
return value_dict
|
||||
|
||||
def _parse_firestore_fields(fields_dict):
|
||||
if not isinstance(fields_dict, dict):
|
||||
return {}
|
||||
py_dict = {}
|
||||
for field_name, firestore_value_wrapper in fields_dict.items():
|
||||
py_dict[field_name] = _parse_firestore_value(firestore_value_wrapper)
|
||||
return py_dict
|
||||
|
||||
def _parse_single_message_payload(list_payload_string, message_number_for_debug=""):
|
||||
"""
|
||||
Parses the actual "[[...]]" list payload of a single Firestore message.
|
||||
Returns a list of processed change objects, or None if parsing fails.
|
||||
"""
|
||||
# Pre-process for common JSON-to-Python literal mismatches
|
||||
# This is a bit of a shotgun approach, be careful if these strings
|
||||
# could legitimately appear as string *values*.
|
||||
# Ensure there are word boundaries to avoid replacing "trueValue" with "TrueValue"
|
||||
|
||||
# Using a more careful replacement with regular expressions might be better,
|
||||
# but for a quick test:
|
||||
processed_string = list_payload_string.replace(": true", ": True")
|
||||
processed_string = processed_string.replace(": false", ": False")
|
||||
processed_string = processed_string.replace(": null", ": None")
|
||||
# If they can appear at the start of an array value without a key:
|
||||
processed_string = processed_string.replace("[true", "[True")
|
||||
processed_string = processed_string.replace(", true", ", True")
|
||||
processed_string = processed_string.replace("[false", "[False")
|
||||
processed_string = processed_string.replace(", false", ", False")
|
||||
processed_string = processed_string.replace("[null", "[None")
|
||||
processed_string = processed_string.replace(", null", ", None")
|
||||
|
||||
|
||||
|
||||
|
||||
try:
|
||||
# Use the pre-processed string
|
||||
parsed_outer_list = ast.literal_eval(processed_string) # <--- USE PROCESSED STRING
|
||||
if not isinstance(parsed_outer_list, list):
|
||||
print(f"DEBUG: PAYLOAD_ERROR (Message {message_number_for_debug}) - Expected a list after ast.literal_eval, got {type(parsed_outer_list)}")
|
||||
return None
|
||||
except (ValueError, SyntaxError) as e:
|
||||
error_msg = getattr(e, 'msg', str(e))
|
||||
#print(f"DEBUG: PAYLOAD_ERROR (Message {message_number_for_debug}) - Error parsing list payload string with ast.literal_eval: {error_msg}")
|
||||
|
||||
error_line_num = getattr(e, 'lineno', None)
|
||||
error_col_num = getattr(e, 'offset', None)
|
||||
error_text_line = getattr(e, 'text', None)
|
||||
|
||||
# Use the *original* string for error reporting context if pre-processing was done,
|
||||
# or the processed_string if not, to ensure line numbers match the string ast saw.
|
||||
string_for_error_context = processed_string
|
||||
|
||||
if error_line_num is not None and error_text_line is not None:
|
||||
#print(f"DEBUG: PAYLOAD_ERROR Detail: Line {error_line_num}, Column {error_col_num if error_col_num is not None else '(unknown)'} (of the current {len(string_for_error_context)}-byte payload for Message {message_number_for_debug}).")
|
||||
#print(f"DEBUG: PAYLOAD_ERROR Offending line from parsed string: {error_text_line.rstrip()}") # This is the line ast.literal_eval saw
|
||||
if error_col_num is not None:
|
||||
pointer = " " * (error_col_num - 1) + "^"
|
||||
print(f"DEBUG: PAYLOAD_ERROR {pointer}")
|
||||
|
||||
lines_of_payload = string_for_error_context.splitlines()
|
||||
context_window_size = 2
|
||||
actual_error_line_idx = error_line_num - 1
|
||||
|
||||
display_start_idx = max(0, actual_error_line_idx - context_window_size)
|
||||
display_end_idx = min(len(lines_of_payload), actual_error_line_idx + context_window_size + 1)
|
||||
|
||||
#print(f"DEBUG: PAYLOAD_ERROR Context from payload (Message {message_number_for_debug}, around its line {error_line_num}):")
|
||||
for i in range(display_start_idx, display_end_idx):
|
||||
current_line_content = lines_of_payload[i] if i < len(lines_of_payload) else "[End of Payload]"
|
||||
line_prefix = " "
|
||||
if i == actual_error_line_idx:
|
||||
line_prefix = ">>"
|
||||
#print(f"DEBUG: PAYLOAD_ERROR {line_prefix} L{i+1:04d}: {current_line_content.rstrip()}")
|
||||
if i == actual_error_line_idx and error_col_num is not None:
|
||||
context_pointer_prefix = f"DEBUG: PAYLOAD_ERROR {line_prefix} L{i+1:04d}: " # Recalculate prefix length
|
||||
context_pointer = " " * (len(context_pointer_prefix) + error_col_num -1) + "^"
|
||||
print(context_pointer)
|
||||
else:
|
||||
print(f"DEBUG: PAYLOAD_ERROR (Message {message_number_for_debug}) Problematic segment (first 200 chars of string given to ast.literal_eval): '{string_for_error_context[:200]}...'")
|
||||
return None
|
||||
|
||||
# --- Rest of the processing loop (unchanged) ---
|
||||
processed_changes_for_this_message = []
|
||||
for item_array in parsed_outer_list:
|
||||
# ... (same processing logic as before)
|
||||
if not isinstance(item_array, list) or len(item_array) != 2:
|
||||
print(f"DEBUG: PAYLOAD_WARN - Skipping malformed item in outer list: {item_array}")
|
||||
continue
|
||||
|
||||
change_events_list = item_array[1]
|
||||
if not isinstance(change_events_list, list):
|
||||
print(f"DEBUG: PAYLOAD_WARN - Expected list of change events, got {type(change_events_list)}")
|
||||
continue
|
||||
|
||||
for event_obj in change_events_list:
|
||||
if not isinstance(event_obj, dict):
|
||||
print(f"DEBUG: PAYLOAD_WARN - Expected dict for change event, got {type(event_obj)}")
|
||||
continue
|
||||
if "documentChange" in event_obj:
|
||||
doc_change = event_obj["documentChange"]
|
||||
doc = doc_change.get("document", {})
|
||||
parsed_document = {
|
||||
"type": "document_change",
|
||||
"document_name": doc.get("name"),
|
||||
"fields": _parse_firestore_fields(doc.get("fields", {})),
|
||||
"createTime": doc.get("createTime"),
|
||||
"updateTime": doc.get("updateTime"),
|
||||
"targetIds": doc_change.get("targetIds"),
|
||||
}
|
||||
processed_changes_for_this_message.append(parsed_document)
|
||||
elif "targetChange" in event_obj:
|
||||
processed_changes_for_this_message.append({
|
||||
"type": "target_change",
|
||||
"change": event_obj["targetChange"]
|
||||
})
|
||||
elif "documentDelete" in event_obj:
|
||||
doc_delete = event_obj["documentDelete"]
|
||||
processed_changes_for_this_message.append({
|
||||
"type": "document_delete",
|
||||
"document_name": doc_delete.get("document"),
|
||||
"removedTargetIds": doc_delete.get("removedTargetIds"),
|
||||
"readTime": doc_delete.get("readTime")
|
||||
})
|
||||
elif "documentRemove" in event_obj:
|
||||
doc_remove = event_obj["documentRemove"]
|
||||
processed_changes_for_this_message.append({
|
||||
"type": "document_remove",
|
||||
"document_name": doc_remove.get("document"),
|
||||
"removedTargetIds": doc_remove.get("removedTargetIds"),
|
||||
"readTime": doc_remove.get("readTime")
|
||||
})
|
||||
elif "filter" in event_obj:
|
||||
processed_changes_for_this_message.append({
|
||||
"type": "filter_update",
|
||||
"filter_details": event_obj["filter"]
|
||||
})
|
||||
return processed_changes_for_this_message
|
||||
|
||||
|
||||
# --- Main function to parse the entire stream of messages ---
|
||||
def parse_all_firestore_messages(full_stream_string):
|
||||
"""
|
||||
Parses a string containing one or more length-prefixed Firestore messages.
|
||||
"""
|
||||
full_stream_string = full_stream_string.strip() # Strip entire input first
|
||||
all_parsed_changes = []
|
||||
current_offset = 0
|
||||
total_stream_length = len(full_stream_string)
|
||||
message_count = 0
|
||||
|
||||
while current_offset < total_stream_length:
|
||||
remaining_stream_chunk = full_stream_string[current_offset:]
|
||||
if not remaining_stream_chunk.strip():
|
||||
break
|
||||
|
||||
newline_idx_in_chunk = remaining_stream_chunk.find('\n')
|
||||
if newline_idx_in_chunk == -1:
|
||||
if remaining_stream_chunk.strip():
|
||||
print(f"DEBUG: STREAM_ERROR - Malformed stream: No newline found for length prefix at main offset {current_offset}. Remaining data (first 100 chars): '{remaining_stream_chunk[:100].strip()}...'")
|
||||
break
|
||||
|
||||
message_length_str = remaining_stream_chunk[:newline_idx_in_chunk].strip()
|
||||
try:
|
||||
expected_payload_length = int(message_length_str)
|
||||
if expected_payload_length < 0:
|
||||
print(f"DEBUG: STREAM_ERROR - Invalid negative length prefix '{message_length_str}' at main offset {current_offset}.")
|
||||
break
|
||||
except ValueError:
|
||||
print(f"DEBUG: STREAM_ERROR - Invalid length prefix '{message_length_str}' at main offset {current_offset}. Cannot convert to int. Chunk starts: '{remaining_stream_chunk[:100]}...'")
|
||||
break
|
||||
|
||||
payload_start_in_chunk = newline_idx_in_chunk + 1
|
||||
if payload_start_in_chunk + expected_payload_length > len(remaining_stream_chunk):
|
||||
print(f"DEBUG: STREAM_ERROR - Truncated payload for message (prefix: {message_length_str}) at main offset {current_offset}. Expected {expected_payload_length} payload bytes, but only {len(remaining_stream_chunk) - payload_start_in_chunk} available after newline.")
|
||||
break
|
||||
|
||||
actual_list_payload_string = remaining_stream_chunk[payload_start_in_chunk :
|
||||
payload_start_in_chunk + expected_payload_length]
|
||||
message_count += 1
|
||||
# print(f"DEBUG: STREAM - Parsing message #{message_count}, prefix: {message_length_str}, payload_len: {expected_payload_length}, main_offset: {current_offset}")
|
||||
|
||||
parsed_changes_from_this_message = _parse_single_message_payload(actual_list_payload_string, str(message_count))
|
||||
|
||||
if parsed_changes_from_this_message is None:
|
||||
print(f"DEBUG: STREAM_ERROR - Halting processing due to error in message #{message_count} (prefix: {message_length_str}).")
|
||||
break
|
||||
|
||||
all_parsed_changes.extend(parsed_changes_from_this_message)
|
||||
current_offset += (len(message_length_str) + 1 + expected_payload_length) # +1 for the original newline before stripping the prefix
|
||||
|
||||
print(f"DEBUG: STREAM - Finished processing. Parsed {message_count} messages. Total changes found: {len(all_parsed_changes)}.")
|
||||
return all_parsed_changes
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
zx_for_sid_request = "fgd9osntni3x"
|
||||
zx_for_data_request = "kxqo6qqmk3y4"
|
||||
stream_read_time_seconds = 1.0
|
||||
|
||||
with requests.Session() as http_session:
|
||||
session_init_data = get_initial_session_data(http_session, first_request_zx=zx_for_sid_request)
|
||||
|
||||
if session_init_data and session_init_data["sid"]:
|
||||
sid = session_init_data["sid"]
|
||||
gsessionid_to_use = session_init_data["dynamic_gsessionid"]
|
||||
|
||||
if not gsessionid_to_use:
|
||||
logging.warning("Dynamic gsessionid was not found in headers. Using a STALE example gsessionid. THIS WILL LIKELY FAIL.")
|
||||
gsessionid_to_use = "7VNzPJC3suxWdG7nZ4kBzftyc1O2nD9xZb-U0AEQD6w" # Fallback, likely stale
|
||||
|
||||
raw_data = get_raw_roadway_datastream(
|
||||
http_session,
|
||||
sid,
|
||||
gsessionid_to_use,
|
||||
zx_for_data_req=zx_for_data_request,
|
||||
read_duration_limit=stream_read_time_seconds
|
||||
)
|
||||
|
||||
if raw_data is not None: # Check for None explicitly, as empty string is valid data
|
||||
|
||||
parsed_data = parse_all_firestore_messages(raw_data)
|
||||
#data = json.loads(parsed_data)
|
||||
cams = {}
|
||||
for i in parsed_data:
|
||||
try:
|
||||
if i['fields']['type'] == 'rwis':
|
||||
if i['fields']['location']['longitude'] > -83.90:
|
||||
needed = {'url': i['fields']['source']['imageUrl'],'route': i['fields']['source']['route'],'cameraKey': i['fields']['source']['cameraKey'],'milepost': i['fields']['source']['mile_post'],'id': i['fields']['source']['id'],'lon': i['fields']['location']['longitude'],'lat': i['fields']['location']['latitude']}
|
||||
key = len(cams) + 1
|
||||
cams[key] = needed
|
||||
except (KeyError, TypeError):
|
||||
continue
|
||||
#print(cams)
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
for i in cams:
|
||||
#print(cams[i])
|
||||
values = (cams[i]['url'],cams[i]['route'] + " MM" + str(cams[i]['milepost']) ,cams[i]['cameraKey'],cams[i]['lat'],cams[i]['lon'],'normal',True,10)
|
||||
sql = 'INSERT INTO cams (url, description, bloomsky,lat,lon,method,active,interval) values (%s, %s, %s,%s,%s,%s,%s, %s) on conflict (bloomsky) do update set url = EXCLUDED.url'
|
||||
cursor.execute(sql,values)
|
||||
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
||||
updates = ['UPDATE public.cams SET geom = ST_SetSRID(ST_MakePoint(lon, lat), 4326)',
|
||||
'UPDATE public.cams SET county = county.countyname from public.county WHERE ST_Contains(county.geom,cams.geom)',
|
||||
'UPDATE public.cams SET pzone = pzone.state_zone from public.pzone WHERE ST_Contains(pzone.geom,cams.geom)',
|
||||
'UPDATE public.cams SET fzone = fzone.state_zone from public.fzone WHERE ST_Contains(fzone.geom,cams.geom)',
|
||||
'UPDATE public.cams SET cwa = fzone.cwa from public.fzone WHERE ST_Contains(fzone.geom,cams.geom)',
|
||||
'UPDATE public.cams SET zonename = pzone.shortname from public.pzone WHERE ST_Contains(pzone.geom,cams.geom)',
|
||||
'UPDATE public.cams SET keephours = 400 WHERE keephours is null',
|
||||
"""UPDATE public.cams SET method = 'normal' WHERE method is null""",
|
||||
'UPDATE public.cams SET state = county.state from public.county WHERE ST_Contains(county.geom,cams.geom)']
|
||||
|
||||
for i in updates:
|
||||
cursor.execute(i)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
else:
|
||||
print("Failed to retrieve any roadway data stream content, or an error occurred before data could be processed.")
|
||||
print(f" Attempted with SID='{sid}', gsessionid='{gsessionid_to_use}', zx for data='{zx_for_data_request}'")
|
||||
else:
|
||||
print("Could not obtain initial session data (SID and/or gsessionid). Script cannot continue.")
|
||||
66
gr2power.py
Normal file
66
gr2power.py
Normal file
@@ -0,0 +1,66 @@
|
||||
import requests
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from datetime import datetime, timezone
|
||||
import re
|
||||
from shapely import wkt
|
||||
|
||||
def generate_gr2_triangle_placefile(filename="power_outages_gr2.txt"):
|
||||
"""
|
||||
Generates a GR2Analyst placefile using Triangles and Text for power outages.
|
||||
|
||||
Args:
|
||||
filename (str): The name of the placefile to create.
|
||||
"""
|
||||
try:
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("SELECT startguess,outagen,lastchange,st_astext(realareageom),lat,lon,cause, outagen FROM power WHERE cwa = 'RLX' and active = true") # Include outagen in select
|
||||
outages = cursor.fetchall()
|
||||
|
||||
with open(filename, "w") as pf:
|
||||
pf.write("Refresh: 1\n")
|
||||
pf.write("Threshold: 999 nautical_miles\n")
|
||||
pf.write("Title: Power Outages (RLX CWA) - Triangles\n")
|
||||
pf.write("Font: 1, 11, 0, \"Courier New\"\n")
|
||||
pf.write("Color: 255 0 0\n") # Red for triangles (outage area)
|
||||
|
||||
for outage in outages:
|
||||
startguess, outagen_num_db, lastchange, realareageom_wkt, lat, lon, cause, outagen_display_num = outage # Get outagen as outagen_display_num
|
||||
|
||||
# Format hover text (optional for triangles, but can add later if needed)
|
||||
hover_text = f"Outage #{outagen_display_num}\\n" # Use outagen_display_num for display
|
||||
hover_text += f"Cause: {cause}\\n"
|
||||
hover_text += f"Last Update: {lastchange.strftime('%Y-%m-%d %H:%M:%Z UTC')}"
|
||||
|
||||
# Triangle vertices - create a small triangle around the outage point
|
||||
triangle_lat_offset = 0.02 # Adjust for triangle size (latitude offset)
|
||||
triangle_lon_offset = 0.03 # Adjust for triangle size (longitude offset)
|
||||
|
||||
pf.write("Triangles:\n")
|
||||
pf.write(f" {lat - triangle_lat_offset}, {lon - triangle_lon_offset}\n") # Vertex 1: Southwest
|
||||
pf.write(f" {lat + triangle_lat_offset}, {lon - triangle_lon_offset}\n") # Vertex 2: Southeast
|
||||
pf.write(f" {lat}, {lon + triangle_lon_offset}\n") # Vertex 3: North
|
||||
pf.write("End:\n")
|
||||
|
||||
# Blue text label for outage number
|
||||
pf.write("Color: 0 0 255\n") # Blue for text
|
||||
pf.write(f"Text: {lat}, {lon}, 1, \"Outage #{outagen_display_num}\", \"{hover_text}\"\n") # Include hover text
|
||||
|
||||
pf.write("End:\n")
|
||||
|
||||
print(f"GR2Analyst Triangle placefile '{filename}' generated successfully.")
|
||||
|
||||
except psycopg2.Error as db_error:
|
||||
print(f"Database error: {db_error}")
|
||||
except IOError as io_error:
|
||||
print(f"File I/O error: {io_error}")
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
generate_gr2_triangle_placefile()
|
||||
# generate_gr2_triangle_placefile("my_outages_triangle.txt")
|
||||
24
ham.php
Normal file
24
ham.php
Normal file
@@ -0,0 +1,24 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
// Performing SQL query
|
||||
//$query = "SELECT distinct on (camid) camid, filepath FROM camdb order by camid,dateutc desc";
|
||||
$query = "SELECT hour as hour, band as band, freq as freq FROM ham";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Printing results in HTML
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Closing connection
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
62
hamadmin.html
Normal file
62
hamadmin.html
Normal file
@@ -0,0 +1,62 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>SRD Update</title>
|
||||
|
||||
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css">
|
||||
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.min.css">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<p>Form for updating bands that are currently being monitored/worked</p>
|
||||
<p>Fill out these as pairs then click submit, you need to fill 1 pair as a minimum:</p>
|
||||
<form action="hamupdate.php" method="post">
|
||||
<div class="form-group row">
|
||||
<label for="band[]" class="col-4 col-form-label">Band / Mode 1</label>
|
||||
<div class="col-8">
|
||||
<input id="band[]" name="band[]" type="text" class="form-control">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group row">
|
||||
<label for="band[]" class="col-4 col-form-label">Freq / Comments 1</label>
|
||||
<div class="col-8">
|
||||
<input id="band[]" name="band[]" type="text" class="form-control">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group row">
|
||||
<label for="band[]" class="col-4 col-form-label">Band / Mode 2</label>
|
||||
<div class="col-8">
|
||||
<input id="band[]" name="band[]" type="text" class="form-control">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group row">
|
||||
<label for="band[]" class="col-4 col-form-label">Freq / Comments 2</label>
|
||||
<div class="col-8">
|
||||
<input id="band[]" name="band[]" type="text" class="form-control">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group row">
|
||||
<label for="band[]" class="col-4 col-form-label">Band / Mode 3</label>
|
||||
<div class="col-8">
|
||||
<input id="band[]" name="band[]" type="text" class="form-control">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group row">
|
||||
<label for="band[]" class="col-4 col-form-label">Freq / Comments 3</label>
|
||||
<div class="col-8">
|
||||
<input id="band[]" name="band[]" type="text" class="form-control">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group row">
|
||||
<div class="offset-4 col-8">
|
||||
<button name="submit" type="submit" class="btn btn-primary">Submit</button>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
</body>
|
||||
|
||||
|
||||
<script>
|
||||
</script>
|
||||
</html>
|
||||
167
index.html
Normal file
167
index.html
Normal file
@@ -0,0 +1,167 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>
|
||||
WX4RLX - NWS Charleston WV SKYWARN</title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta name="description" content="WX4RLX SKYWARN">
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
|
||||
<style>
|
||||
body {background-color:#ffffff;background-repeat:no-repeat;background-position:top left;background-attachment:fixed;}
|
||||
h1{font-family:Arial, sans-serif;color:#000000;background-color:#ffffff;}
|
||||
p {font-family:Georgia, serif;font-size:14px;font-style:normal;font-weight:normal;color:#000000;background-color:#ffffff;}
|
||||
.table_component {
|
||||
overflow: auto;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.table_component table {
|
||||
border: 1px solid #000000;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
table-layout: auto;
|
||||
border-collapse: collapse;
|
||||
border-spacing: 1px;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.table_component caption {
|
||||
caption-side: top;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.table_component th {
|
||||
border: 1px solid #000000;
|
||||
background-color: #eceff1;
|
||||
color: #000000;
|
||||
padding: 5px;
|
||||
}
|
||||
|
||||
.table_component td {
|
||||
border: 1px solid #000000;
|
||||
padding: 5px;
|
||||
}
|
||||
|
||||
.table_component tr:nth-child(even) td {
|
||||
background-color: #ababab;
|
||||
color: #000000;
|
||||
}
|
||||
|
||||
.table_component tr:nth-child(odd) td {
|
||||
background-color: #ffffff;
|
||||
color: #000000;
|
||||
}
|
||||
|
||||
.current {
|
||||
font-size: 20px;
|
||||
border: 3px black;
|
||||
border-style: solid;
|
||||
background: black;
|
||||
color: yellow;
|
||||
}
|
||||
</style>
|
||||
|
||||
|
||||
|
||||
|
||||
<meta content="text/html;charset=utf-8" http-equiv="Content-Type">
|
||||
<meta content="utf-8" http-equiv="encoding">
|
||||
|
||||
</head>
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
function ham() {
|
||||
$.getJSON('ham.php', function(data){
|
||||
|
||||
var t = "";
|
||||
var current = "";
|
||||
for (var j=0;j<24;j++) {
|
||||
tr = "<tr>"
|
||||
hour = j.toString().padStart(2,"0") + "-" + (j+1).toString().padStart(2,"0") + "Z";
|
||||
var band = "";
|
||||
var freq = "";
|
||||
for (var i=0;i<data.length;i++) {
|
||||
if (j==data[i]["hour"]) {
|
||||
band += data[i]["band"];
|
||||
band += "<br>";
|
||||
if (data[i]["freq"] != null) {
|
||||
freq += data[i]["freq"];
|
||||
freq += "<br>";
|
||||
}}}
|
||||
|
||||
tr += "<td>";
|
||||
tr += hour;
|
||||
tr += "</td>";
|
||||
tr += "<td>";
|
||||
if (band == "") {
|
||||
band = "No plan yet!";
|
||||
}
|
||||
tr += band;
|
||||
tr += "</td>";
|
||||
tr += "<td>";
|
||||
tr += freq;
|
||||
tr += "</td>";
|
||||
tr += "</tr>"
|
||||
t += tr
|
||||
}
|
||||
|
||||
for (var i=0;i<data.length;i++) {
|
||||
if (data[i]["hour"] == 25) {
|
||||
current += data[i]["band"] + " / " + data[i]["freq"];
|
||||
current += "<br>";
|
||||
}}
|
||||
|
||||
|
||||
console.log(t)
|
||||
document.getElementById("current").innerHTML = "Currently Operating: " + current;
|
||||
document.getElementById("#datatable").innerHTML += t;
|
||||
});
|
||||
}
|
||||
ham();
|
||||
|
||||
</script>
|
||||
|
||||
|
||||
<body>
|
||||
<h1>NWS Charleston WV SKYWARN Recognition Day (SRD) 2024</h1>
|
||||
<p>SKYWARN Recognition Day 2024 will run from 00Z 7 December until 00Z 8 December</p>
|
||||
<p>For SKYWARN recognition day 2024 we will be using the office callsign WX4RLX and operating from The National Weather Service office in South Charleston at the West Virginia Regional Technology Park.</p>
|
||||
<p>If you plan to participate in SRD please, please register on the national registration page <a href="https://www.weather.gov/crh/skywarnrecognition" target="_blank">here!</a></p>
|
||||
<p>If you are interested in coming to our office to participate, email <a href="mailto:John.Peck@noaa.gov">John.Peck@noaa.gov</a> for more information. You can get an idea of what time slots need more coverage by looking at the operator schedule at the bottom of this page </p>
|
||||
<p>We are located in the West Virginia Regional Tech Park, 1754 Hendrickson Dr South Charleston WV</p>
|
||||
<img src="wx4rlx.jpg">
|
||||
<p class="current" id="current">None</p>
|
||||
|
||||
<p>The black banner above will have info on our current operations<br>
|
||||
The table below is a rough idea of where we might be operating, but certainly not concrete.<br>
|
||||
If you are scheduled to operate at the office and would like something specific in this schedule please email <a href="mailto:John.Peck@noaa.gov">John.Peck@noaa.gov</a> <br>
|
||||
It may not be updated once are operating, especially after WX6C goes home at 10 AM</p>
|
||||
<div class="table_component" role="region" tabindex="0">
|
||||
<table id="#datatable">
|
||||
<caption><b>Tentative Operating Schedule</b></caption>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Time</th>
|
||||
<th>Bands / Modes</th>
|
||||
<th>Frequencies / Comments</th>
|
||||
</tr>
|
||||
</thead>
|
||||
|
||||
</table>
|
||||
|
||||
|
||||
<h1>Other ways to contact us:</h1>
|
||||
<p>Winlink: WX4RLX / <a href="mailto:wx4rlx@winlink.org">wx4rlx@winlink.org</a></p>
|
||||
<p>X: <a href="https://x.com/nwscharlestonwv" target="_blank">https://x.com/nwscharlestonwv</a></p>
|
||||
<p>Facebook: <a href="https://facebook.com/nwscharlestonwv" target="_blank">https://facebook.com/nwscharlestonwv</a></p>
|
||||
<p><b>Operator Schedule (if you would like to be added to this schedule contact: <a href="mailto:John.Peck@noaa.gov">John.Peck@noaa.gov</a>)</b></p>
|
||||
<iframe src="https://docs.google.com/spreadsheets/d/e/2PACX-1vTbueFNfIPMpZ1JM3U8UEgBiF_-WTDW-5_zjRaDaoea6H-kh1GwnhparbkjoFwwpXQYYmPB5Qu2_FpJ/pubhtml?gid=297489095&single=true&widget=true&headers=false" width="900" height="800"></iframe>
|
||||
<p><b>Repeaters we can access from the office:</b></p>
|
||||
<img src='repeaters.png'>
|
||||
<p>Our logs will be uploaded to LoTW and QRZ within 7 days of event conclusion</p>
|
||||
</body>
|
||||
|
||||
|
||||
</html>
|
||||
53
individualcam.php
Normal file
53
individualcam.php
Normal file
@@ -0,0 +1,53 @@
|
||||
<?php
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
$camid = pg_escape_string($_GET['camid'] ?? null);
|
||||
|
||||
if($_GET['dtg'] ?? null){
|
||||
$endtime = pg_escape_string($_GET['dtg']);
|
||||
if($_GET['camimages']){
|
||||
$camimages = $_GET['camimages'];
|
||||
}
|
||||
if(!$_GET['camimages'] ?? null){
|
||||
$camimages = 20;
|
||||
}
|
||||
|
||||
$query = "SELECT camid, filepath, date_trunc('second', dateutc) as dateutc FROM camdb where camid = '{$camid}' and dateutc < '{$endtime}' order by dateutc desc limit '{$camimages}'";
|
||||
}
|
||||
|
||||
$camimages = 20;
|
||||
|
||||
|
||||
//if(!$_GET['dtg'] ?? null){
|
||||
//if ($_GET['dtg'] == null) {
|
||||
if (!isset($_GET['dtg'])) {
|
||||
|
||||
if($_GET['camimages'] ?? null){
|
||||
$camimages = $_GET['camimages'];
|
||||
}
|
||||
|
||||
|
||||
$query = "SELECT camid, filepath, date_trunc('second', dateutc) as dateutc FROM camdb where camid = '{$camid}' order by dateutc desc limit '{$camimages}'";
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
//$query = "SELECT camid, filepath, date_trunc('second', dateutc) as dateutc FROM camdb where camid = '{$camid}' order by dateutc desc limit 20";
|
||||
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Closing connection
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
395
ky511.py
Normal file
395
ky511.py
Normal file
@@ -0,0 +1,395 @@
|
||||
import requests
|
||||
import json
|
||||
import traceback # For detailed error reporting
|
||||
import psycopg2 # For PostgreSQL interaction
|
||||
from psycopg2 import extras
|
||||
import os
|
||||
from datetime import datetime
|
||||
import pytz # <--- Import pytz for timezone handling
|
||||
|
||||
# --- Database Configuration (Use Environment Variables!) ---
|
||||
DB_NAME = os.environ.get("PG_DBNAME", "nws")
|
||||
DB_USER = os.environ.get("PG_USER", "nws")
|
||||
DB_PASSWORD = os.environ.get("PG_PASSWORD", "nws")
|
||||
DB_HOST = os.environ.get("PG_HOST", "localhost")
|
||||
DB_PORT = os.environ.get("PG_PORT", "5432")
|
||||
DB_SCHEMA = "ky511" # Specify schema if used, otherwise set to None or remove usage
|
||||
DB_TABLE = "ky511"
|
||||
TABLE_NAME_QUALIFIED = f"{DB_SCHEMA}.{DB_TABLE}" if DB_SCHEMA else DB_TABLE
|
||||
|
||||
# --- Looker Studio API Configuration ---
|
||||
# ... (URL, HEADERS, COOKIES, PAYLOADS remain the same - ensure they are valid/fresh) ...
|
||||
URL = "https://lookerstudio.google.com/batchedDataV2?appVersion=20250324_0406"
|
||||
# HEADERS, COOKIES, payload_req1, payload_req2, combined_payload - keep as provided
|
||||
# (Make sure HEADERS/COOKIES are up-to-date for authentication)
|
||||
HEADERS = { "authority": "lookerstudio.google.com", "method": "POST", "path": "/batchedDataV2?appVersion=20250324_0406", "scheme": "https", "accept": "application/json, text/plain, */*", "accept-encoding": "gzip, deflate, br, zstd", "accept-language": "en-US,en;q=0.9", "cache-control": "no-cache", "origin": "https://lookerstudio.google.com", "pragma": "no-cache", "priority": "u=1, i", "referer": "https://lookerstudio.google.com/reporting/1413fcfb-1416-4e56-8967-55f8e9f30ec8/page/p_pbm4eo88qc", "sec-ch-ua": '"Google Chrome";v="135", "Not-A.Brand";v="8", "Chromium";v="135"', "sec-ch-ua-mobile": "?0", "sec-ch-ua-platform": '"Windows"', "sec-fetch-dest": "empty", "sec-fetch-mode": "cors", "sec-fetch-site": "same-origin", "x-client-data": "CIS2yQEIorbJAQipncoBCMHbygEIk6HLAQiFoM0BCP6lzgEIvdXOAQjJ4M4BCIbizgEIu+fOAQjS6M4BCKzpzgE=", "x-rap-xsrf-token": "AImk1AIPFDSOkwsENSc2sLLwBANEib1ysQ:1744403672035", "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36", "Content-Type": "application/json", }
|
||||
COOKIES = { "SID": "g.a000vwi83sZx47d1q9Po8BgPR3jBX7lpsrzWSuPIKyp6RycUEctVzyy0oo1gNvQbwZa53AWllwACgYKAVQSARMSFQHGX2MiUpZ3JZlRTzUS5L-5fPmgpBoVAUF8yKprZzPlNM_X83070Ct35bq_0076", "__Secure-1PSID": "g.a000vwi83sZx47d1q9Po8BgPR3jBX7lpsrzWSuPIKyp6RycUEctVKNqZm8qWU9ZC2DlRVPn0igACgYKAfQSARMSFQHGX2MiYGW66m_L_p1vlqiuEg-IrxoVAUF8yKrgRbA9tcA4VinZix0qlexX0076", "__Secure-3PSID": "g.a000vwi83sZx47d1q9Po8BgPR3jBX7lpsrzWSuPIKyp6RycUEctVnGF-cJAl9Wr1rJ-NOCW63wACgYKAZASARMSFQHGX2Mi1gYcRRnI8v2AdvofwvFG5BoVAUF8yKooSw-opCZ1-vxmkQXCB7330076", "HSID": "AJifEcy5MKbQSoqIz", "SSID": "AB87p4RIXK_USto4D", "APISID": "fQw9oTJbNnptFKdr/AIXxJuvoAk3qrIeWi", "SAPISID": "3zFHAXrGoL_XNe0X/A7AafTXBL2NRj7N2h", "__Secure-1PAPISID": "3zFHAXrGoL_XNe0X/A7AafTXBL2NRj7N2h", "__Secure-3PAPISID": "3zFHAXrGoL_XNe0X/A7AafTXBL2NRj7N2h", "AEC": "AVcja2e2MbzHDDL2yqJDMgUeDL-S3BEFB_yK293aRhQwshqv22Bd7IQ9mvg", "S": "maestro=IUwVdLn0rPZ27SY26uYPEonS9u1J9oQi8YzVJpqIc-w", "_gid": "GA1.3.1639311329.1744393279", "NID": "523=n5TR7moG8fZ7ZefkfCPQgx3aIuzhv_Vqk6CI0ah9HTWaoS-D4CtVwh6nIKaFY3hPLBPf_vf3THtu0nnJXc2ZaD5i3MpiIqOD8zBQbDuCp5G8rB3an7Jt7wdVuE8S3326_SF6FEBti9omWc2wNU43MbtEXSC7g4YP2sVzLohrGiUi_FHMIRDHj8OUKtDg9HjD3U3RzLRZk-IlyQyoRrwz0Ubf7IKxTsMvt57VWJvB2gqODqJJvaGWUdASl5lvPuTR7hlNkDyZwRC9rr-T2XtGUa10wUS_5bERpv6A2wTXXquQodkB25oDZvnhoS7FmO-SSAV-fweVu9tGlQ566w1yfBS0iANk8_iSCqhqFx6R1fyB9Nwxyf_g6ncw6nOmIHrtsIFiwAGJgwno2WRNey5nEgZbd5O-ew3z2K_aiPLvgeJCvY82L5swZctp7ZJtPFgGqXuAj3i6KtDhsTtizx0PbPpb7bvZ2nJ-SxlkR4s84fp_NxcXZKEdQfzOzoL6nAQMG9Kh28t_yKvcordnv25-55J_n_yzPDH78dTcLBzWS5dEcLP_Tt7HlSevSbP_2-NNoBGhM76vmMyIsMS0FucwPPKExrF6pwC3kc4g-4gruLlIWEHSnYodVcXQxD7Y2pD-0MBD7O8s-fCBDhlk1OgWfTHC1JnyrkUIMuqzj5-_LbuRIDPbN8YjVF0lO7jeyX0I9JjHHU3qc9EvhZ5LqpZKpRgTl5U4Prsmgq9K0o3nqvNoRMAbSTLX2kKGfhZKxtZT-Ezzrs_jLuZELFL8u98joYHS6v-guEvjD2Kg_XeWgRz3zAKBojSdtJnSuD7qjsnrV6IagTb8Yoazd7Gz3g-mlz8XTtSx1v7sStbFLnpMUlk", "_ga": "GA1.1.1275703331.1743917675", "_ga_LPCKLD3Z7X": "GS1.1.1744403669.9.1.1744403672.0.0.0", "RAP_XSRF_TOKEN": "AImk1AIPFDSOkwsENSc2sLLwBANEib1ysQ:1744403672035", "__Secure-1PSIDTS": "sidts-CjIB7pHpta3VWPdkJx7cNCUjUpy6c-d9WA3bvc3_9icXKqMA3kaW8Nh6_NJ-S4_OkQRZtRAA", "__Secure-3PSIDTS": "sidts-CjIB7pHpta3VWPdkJx7cNCUjUpy6c-d9WA3bvc3_9icXKqMA3kaW8Nh6_NJ-S4_OkQRZtRAA", "SIDCC": "AKEyXzUvhhICXSbNTp0dL7c4St5hDPC_ghsZ_PlMfsv7M8YVMlV8EibT-8IAsh7PmcPAtY38PJY", "__Secure-1PSIDCC": "AKEyXzUDzbYtACzfEgsC_j1S2ay0dayt4PwLSUoOjlS1xMBQ9FeL52NBdnlZBn_KWMM4a8jFaNv5", "__Secure-3PSIDCC": "AKEyXzVCxfyVIsm9apiA7JdTK3UKhcQUNLBOKQFxN8QDJyeLPojNFUBUs_K_X0xOR2vzZsBBEk5V", "_gat": "1", "_ga_S4FJY0X3VX": "GS1.1.1744401723.8.1.1744404147.0.0.0", }
|
||||
payload_req1 = { "requestContext": {"reportContext": {"reportId": "1413fcfb-1416-4e56-8967-55f8e9f30ec8", "pageId": "p_pbm4eo88qc", "mode": 1, "componentId": "cd-rdkny9a9qc", "displayType": "simple-table", "actionId": "crossFilters"}, "requestMode": 0}, "datasetSpec": {"dataset": [{"datasourceId": "c2fc8cdd-46bb-454c-bf09-90ebfd4067d7", "revisionNumber": 0, "parameterOverrides": []}], "queryFields": [{"name": "qt_3nwfu9yq1c", "datasetNs": "d0", "tableNs": "t0", "resultTransformation": {"analyticalFunction": 0, "isRelativeToBase": False}, "dataTransformation": {"sourceFieldName": "_Event_Id_"}}, {"name": "qt_8yjok4izsc", "datasetNs": "d0", "tableNs": "t0", "resultTransformation": {"analyticalFunction": 0, "isRelativeToBase": False}, "dataTransformation": {"sourceFieldName": "_DateTime_EST_"}}, {"name": "qt_sfkc163arc", "datasetNs": "d0", "tableNs": "t0", "dataTransformation": {"sourceFieldName": "_KYTC_Type_"}}, {"name": "qt_4e66idhbrc", "datasetNs": "d0", "tableNs": "t0", "dataTransformation": {"sourceFieldName": "_Incident_Source_"}}, {"name": "qt_re76qrqe2c", "datasetNs": "d0", "tableNs": "t0", "dataTransformation": {"sourceFieldName": "_District_", "aggregation": 0}}, {"name": "qt_tfkc163arc", "datasetNs": "d0", "tableNs": "t0", "dataTransformation": {"sourceFieldName": "_County_Name_"}}, {"name": "qt_ufkc163arc", "datasetNs": "d0", "tableNs": "t0", "dataTransformation": {"sourceFieldName": "_Route_Label_"}}, {"name": "qt_vfkc163arc", "datasetNs": "d0", "tableNs": "t0", "dataTransformation": {"sourceFieldName": "_BMP_Initial_"}}, {"name": "qt_o7kc163arc", "datasetNs": "d0", "tableNs": "t0", "dataTransformation": {"sourceFieldName": "_EMP_Initial_"}}, {"name": "qt_p7kc163arc", "datasetNs": "d0", "tableNs": "t0", "dataTransformation": {"sourceFieldName": "_Description_"}}], "sortData": [{"sortColumn": {"name": "qt_8yjok4izsc", "datasetNs": "d0", "tableNs": "t0", "dataTransformation": {"sourceFieldName": "_DateTime_EST_"}}, "sortDir": 1}], "includeRowsCount": True, "relatedDimensionMask": {"addDisplay": False, "addUniqueId": False, "addLatLong": False}, "paginateInfo": {"startRow": 1, "rowsCount": 500}, "dsFilterOverrides": [], "filters": [{"filterDefinition": {"filterExpression": {"include": False, "conceptType": 0, "concept": {"ns": "t0", "name": "qt_exs3vib9qc"}, "filterConditionType": "PT", "stringValues": ["Shoulder"], "numberValues": [], "queryTimeTransformation": {"dataTransformation": {"sourceFieldName": "_Source_Type_"}}}}, "dataSubsetNs": {"datasetNs": "d0", "tableNs": "t0", "contextNs": "c0"}, "version": 3}, {"filterDefinition": {"filterExpression": {"include": True, "conceptType": 0, "concept": {"name": "qt_vwmdhfhbrc", "ns": "t0"}, "queryTimeTransformation": {"dataTransformation": {"sourceFieldName": "_Incident_Source_"}}, "filterConditionType": "IN", "stringValues": ["KYTC Reported"]}}, "dataSubsetNs": {"datasetNs": "d0", "tableNs": "t0", "contextNs": "c0"}, "version": 3, "isCanvasFilter": True}, {"filterDefinition": {"filterExpression": {"include": True, "conceptType": 0, "concept": {"name": "qt_kjyfx83arc", "ns": "t0"}, "queryTimeTransformation": {"dataTransformation": {"sourceFieldName": "_County_Name_"}}, "filterConditionType": "IN", "stringValues": ["Boyd", "Carter", "Fleming", "Greenup"]}}, "dataSubsetNs": {"datasetNs": "d0", "tableNs": "t0", "contextNs": "c0"}, "version": 3, "isCanvasFilter": True}, {"filterDefinition": {"filterExpression": {"include": True, "conceptType": 0, "concept": {"name": "qt_uiren73arc", "ns": "t0"}, "queryTimeTransformation": {"dataTransformation": {"sourceFieldName": "_KYTC_Type_"}}, "filterConditionType": "IN", "stringValues": ["Weather"]}}, "dataSubsetNs": {"datasetNs": "d0", "tableNs": "t0", "contextNs": "c0"}, "version": 3, "isCanvasFilter": True}], "features": [], "dateRanges": [], "contextNsCount": 1, "calculatedField": [], "needGeocoding": False, "geoFieldMask": [], "multipleGeocodeFields": [], "timezone": "America/New_York"}, "role": "main", "retryHints": {"useClientControlledRetry": True, "isLastRetry": False, "retryCount": 0, "originalRequestId": "cd-rdkny9a9qc_0_0"} }
|
||||
payload_req2 = { "requestContext": {"reportContext": {"reportId": "1413fcfb-1416-4e56-8967-55f8e9f30ec8", "pageId": "p_pbm4eo88qc", "mode": 1, "componentId": "cd-3aflvp88qc", "displayType": "google-map", "actionId": "crossFilters"}, "requestMode": 0}, "datasetSpec": {"dataset": [{"datasourceId": "c2fc8cdd-46bb-454c-bf09-90ebfd4067d7", "revisionNumber": 0, "parameterOverrides": []}], "queryFields": [{"name": "qt_pbotw53arc", "datasetNs": "d0", "tableNs": "t0", "dataTransformation": {"sourceFieldName": "_Incident_Id_"}}, {"name": "qt_qbotw53arc", "datasetNs": "d0", "tableNs": "t0", "dataTransformation": {"sourceFieldName": "_KYTC_Type_"}}, {"name": "qt_7grvqe4arc", "datasetNs": "d0", "tableNs": "t0", "dataTransformation": {"sourceFieldName": "qt_7grvqe4arc", "textFormula": "CONCAT(t0._KYTC_Type_, \" - \", t0._Route_Label_, \" - \", t0._BMP_Initial_)", "sourceType": 1, "frontendTextFormula": "CONCAT(t0._KYTC_Type_,\" - \",t0._Route_Label_,\" - \",t0._BMP_Initial_)", "formulaOutputDataType": 0}}, {"name": "qt_5prae63arc", "datasetNs": "d0", "tableNs": "t0", "dataTransformation": {"sourceFieldName": "_SHAPE_", "aggregation": 16, "outputGeoType": 1}}], "sortData": [{"sortColumn": {"name": "qt_pbotw53arc", "datasetNs": "d0", "tableNs": "t0", "dataTransformation": {"sourceFieldName": "_Incident_Id_"}}, "sortDir": 1}], "includeRowsCount": True, "relatedDimensionMask": {"addDisplay": False, "addUniqueId": False, "addLatLong": True}, "paginateInfo": {"startRow": 1, "rowsCount": 100000}, "dsFilterOverrides": [], "filters": [{"filterDefinition": {"filterExpression": {"include": False, "conceptType": 0, "concept": {"ns": "t0", "name": "qt_exs3vib9qc"}, "filterConditionType": "PT", "stringValues": ["Shoulder"], "numberValues": [], "queryTimeTransformation": {"dataTransformation": {"sourceFieldName": "_Source_Type_"}}}}, "dataSubsetNs": {"datasetNs": "d0", "tableNs": "t0", "contextNs": "c0"}, "version": 3}, {"filterDefinition": {"filterExpression": {"include": True, "conceptType": 0, "concept": {"name": "qt_vwmdhfhbrc", "ns": "t0"}, "queryTimeTransformation": {"dataTransformation": {"sourceFieldName": "_Incident_Source_"}}, "filterConditionType": "IN", "stringValues": ["KYTC Reported"]}}, "dataSubsetNs": {"datasetNs": "d0", "tableNs": "t0", "contextNs": "c0"}, "version": 3, "isCanvasFilter": True}, {"filterDefinition": {"filterExpression": {"include": True, "conceptType": 0, "concept": {"name": "qt_kjyfx83arc", "ns": "t0"}, "queryTimeTransformation": {"dataTransformation": {"sourceFieldName": "_County_Name_"}}, "filterConditionType": "IN", "stringValues": ["Boyd", "Carter", "Fleming", "Greenup"]}}, "dataSubsetNs": {"datasetNs": "d0", "tableNs": "t0", "contextNs": "c0"}, "version": 3, "isCanvasFilter": True}, {"filterDefinition": {"filterExpression": {"include": True, "conceptType": 0, "concept": {"name": "qt_uiren73arc", "ns": "t0"}, "queryTimeTransformation": {"dataTransformation": {"sourceFieldName": "_KYTC_Type_"}}, "filterConditionType": "IN", "stringValues": ["Weather"]}}, "dataSubsetNs": {"datasetNs": "d0", "tableNs": "t0", "contextNs": "c0"}, "version": 3, "isCanvasFilter": True}], "features": [], "dateRanges": [], "contextNsCount": 1, "calculatedField": [], "needGeocoding": False, "geoFieldMask": [], "geoVertices": 100000, "multipleGeocodeFields": [], "timezone": "America/New_York"}, "role": "main", "retryHints": {"useClientControlledRetry": True, "isLastRetry": False, "retryCount": 0, "originalRequestId": "cd-3aflvp88qc_0_0"} }
|
||||
combined_payload = {"dataRequest": [payload_req1, payload_req2]}
|
||||
|
||||
# --- Key Mappings and Constants ---
|
||||
KEY_RENAME_MAP = { 'qt_3nwfu9yq1c': 'id', 'qt_8yjok4izsc': 'dtg', 'qt_4e66idhbrc': 'source', 'qt_ufkc163arc': 'route', 'qt_p7kc163arc': 'remark' }
|
||||
COORDINATE_KEY_OLD = 'qt_5prae63arc'
|
||||
LONGITUDE_KEY_NEW = 'lon'
|
||||
LATITUDE_KEY_NEW = 'lat'
|
||||
BMP_KEY_ORIGINAL = 'qt_vfkc163arc'
|
||||
COUNTY_KEY_ORIGINAL = 'qt_tfkc163arc'
|
||||
|
||||
# --- Define the US Eastern Timezone ---
|
||||
# This timezone correctly handles switches between EST and EDT
|
||||
eastern_tz = pytz.timezone('America/New_York')
|
||||
|
||||
# --- Helper Function (Column-to-Row) ---
|
||||
# ... (process_table_dataset function remains the same) ...
|
||||
def process_table_dataset(table_dataset):
|
||||
if not table_dataset: return []
|
||||
try:
|
||||
column_info = table_dataset.get('columnInfo', [])
|
||||
columns_data = table_dataset.get('column', [])
|
||||
num_records = table_dataset.get('totalCount', 0)
|
||||
if num_records == 0 and columns_data:
|
||||
first_col_data = columns_data[0]
|
||||
data_key = next((k for k in first_col_data if isinstance(first_col_data[k], dict) and 'values' in first_col_data[k]), None)
|
||||
if data_key: num_records = len(first_col_data[data_key].get('values', []))
|
||||
if not column_info or not columns_data or num_records == 0: return []
|
||||
column_names = [info.get('name', f'unknown_col_{i}') for i, info in enumerate(column_info)]
|
||||
processed_events = []
|
||||
for i in range(num_records):
|
||||
event = {}
|
||||
for j, col_name in enumerate(column_names):
|
||||
if j >= len(columns_data): event[col_name] = None; continue
|
||||
col_data_dict = columns_data[j]
|
||||
value = None
|
||||
data_key = next((k for k in col_data_dict if isinstance(col_data_dict[k], dict) and 'values' in col_data_dict[k]), None)
|
||||
if data_key:
|
||||
values_list = col_data_dict[data_key].get('values', [])
|
||||
if i < len(values_list): value = values_list[i]
|
||||
else: value = None
|
||||
else: value = None
|
||||
event[col_name] = value
|
||||
processed_events.append(event)
|
||||
return processed_events
|
||||
except Exception as e: print(f"Error processing table dataset: {e}"); traceback.print_exc(); return []
|
||||
|
||||
|
||||
# --- Data Fetching and Processing Function ---
|
||||
def fetch_and_process_data():
|
||||
print("Sending combined request...")
|
||||
try:
|
||||
response = requests.post(URL, headers=HEADERS, cookies=COOKIES, json=combined_payload)
|
||||
response.raise_for_status()
|
||||
print(f"Request successful! Status Code: {response.status_code}")
|
||||
cleaned_text = response.text.lstrip(")]}'")
|
||||
response_data = json.loads(cleaned_text)
|
||||
|
||||
table_data_list, map_data_list = [], []
|
||||
if 'dataResponse' in response_data and isinstance(response_data['dataResponse'], list) and len(response_data['dataResponse']) >= 2:
|
||||
try:
|
||||
table_subset = response_data['dataResponse'][0].get('dataSubset', [{}])[0]
|
||||
table_dataset = table_subset.get('dataset', {}).get('tableDataset')
|
||||
if table_dataset: table_data_list = process_table_dataset(table_dataset)
|
||||
except IndexError: pass
|
||||
try:
|
||||
map_subset = response_data['dataResponse'][1].get('dataSubset', [{}])[0]
|
||||
map_dataset = map_subset.get('dataset', {}).get('tableDataset')
|
||||
if map_dataset: map_data_list = process_table_dataset(map_dataset)
|
||||
except IndexError: pass
|
||||
else: print(f"Error: Expected 2 'dataResponse' items."); return []
|
||||
|
||||
if not table_data_list or not map_data_list: print("Error: Failed to process datasets."); return []
|
||||
|
||||
print(f"\nMerging {len(table_data_list)} table events and {len(map_data_list)} map events...")
|
||||
merged_events_dict = {}
|
||||
for event1 in table_data_list:
|
||||
event_type = event1.get('qt_sfkc163arc')
|
||||
route_label = event1.get('qt_ufkc163arc')
|
||||
bmp_value = event1.get(BMP_KEY_ORIGINAL)
|
||||
if event_type is not None and route_label is not None and bmp_value is not None:
|
||||
bmp_str = str(int(bmp_value)) if isinstance(bmp_value, float) and bmp_value.is_integer() else str(bmp_value)
|
||||
join_key = f"{event_type} - {route_label} - {bmp_str}"
|
||||
merged_events_dict[join_key] = event1.copy()
|
||||
|
||||
merge_success_count = 0
|
||||
for event2 in map_data_list:
|
||||
join_key = event2.get('qt_7grvqe4arc')
|
||||
if join_key in merged_events_dict:
|
||||
existing_event = merged_events_dict[join_key]
|
||||
for key, value in event2.items():
|
||||
if key == COORDINATE_KEY_OLD:
|
||||
if isinstance(value, str) and value.strip().startswith('{'):
|
||||
try:
|
||||
geo_data = json.loads(value)
|
||||
if geo_data.get('type') == 'Point' and 'coordinates' in geo_data: existing_event[key] = tuple(geo_data['coordinates'])
|
||||
else: existing_event[key] = value
|
||||
except json.JSONDecodeError: existing_event[key] = value
|
||||
else: existing_event[key] = value
|
||||
elif key not in existing_event: existing_event[key] = value
|
||||
merge_success_count += 1
|
||||
#print(f"-> Merged data for {merge_success_count} events.")
|
||||
|
||||
#print("\nApplying final transformations (including timezone handling)...") # <--- Updated print message
|
||||
final_transformed_list = []
|
||||
parse_error_count = 0
|
||||
# Define potential datetime formats from Looker Studio (add more if needed)
|
||||
# Common formats:
|
||||
# %m/%d/%Y %I:%M:%S %p (e.g., 05/20/2024 02:30:00 PM)
|
||||
# %Y-%m-%d %H:%M:%S (e.g., 2024-05-20 14:30:00)
|
||||
# %Y%m%d%H%M%S (e.g., 20240520143000) - Less common for display but possible
|
||||
possible_dt_formats = [
|
||||
'%m/%d/%Y %I:%M:%S %p', # e.g., 05/20/2024 02:30:00 PM
|
||||
'%Y-%m-%d %H:%M:%S', # e.g., 2024-05-20 14:30:00
|
||||
'%Y-%m-%dT%H:%M:%S', # <--- ADD THIS FORMAT (e.g., 2025-04-12T06:16:06)
|
||||
# Add other formats if you observe them in the data
|
||||
]
|
||||
|
||||
|
||||
for event in merged_events_dict.values():
|
||||
transformed_event = {}
|
||||
aware_dtg = None # Initialize dtg as None
|
||||
try:
|
||||
# --- Timezone Handling for 'dtg' ---
|
||||
raw_dtg_value = event.get('qt_8yjok4izsc')
|
||||
if raw_dtg_value:
|
||||
parsed_naive = None
|
||||
for fmt in possible_dt_formats:
|
||||
try:
|
||||
# Attempt to parse using the current format
|
||||
parsed_naive = datetime.strptime(str(raw_dtg_value), fmt)
|
||||
break # Stop trying formats if one succeeds
|
||||
except ValueError:
|
||||
continue # Try the next format
|
||||
|
||||
if parsed_naive:
|
||||
# Successfully parsed, now make it timezone-aware (ET)
|
||||
aware_dtg = eastern_tz.localize(parsed_naive, is_dst=None) # is_dst=None handles ambiguous times
|
||||
# print(f"DEBUG: Raw: {raw_dtg_value}, Parsed Naive: {parsed_naive}, Aware ET: {aware_dtg}, UTC: {aware_dtg.astimezone(pytz.utc)}") # Optional debug print
|
||||
else:
|
||||
# Could not parse with any known format
|
||||
print(f"Warning: Could not parse dtg value '{raw_dtg_value}' for event ID {event.get('qt_3nwfu9yq1c')} using known formats.")
|
||||
parse_error_count += 1
|
||||
else:
|
||||
print(f"Warning: Missing dtg value for event ID {event.get('qt_3nwfu9yq1c')}.")
|
||||
parse_error_count += 1
|
||||
|
||||
# --- Standard Key Renaming and Data Extraction ---
|
||||
for old_key, new_key in KEY_RENAME_MAP.items():
|
||||
if old_key == 'qt_8yjok4izsc':
|
||||
transformed_event[new_key] = aware_dtg # Assign the aware datetime object or None
|
||||
elif old_key in event:
|
||||
transformed_event[new_key] = event[old_key]
|
||||
|
||||
if COORDINATE_KEY_OLD in event:
|
||||
coordinates = event[COORDINATE_KEY_OLD]
|
||||
if isinstance(coordinates, (list, tuple)) and len(coordinates) >= 2:
|
||||
try:
|
||||
transformed_event[LONGITUDE_KEY_NEW] = float(coordinates[0])
|
||||
transformed_event[LATITUDE_KEY_NEW] = float(coordinates[1])
|
||||
except (TypeError, ValueError): pass # Keep lon/lat None if conversion fails
|
||||
keys_to_keep = [BMP_KEY_ORIGINAL, COUNTY_KEY_ORIGINAL]
|
||||
for key in keys_to_keep:
|
||||
if key in event: transformed_event[key] = event[key]
|
||||
# Ensure source is added if not already mapped
|
||||
if 'source' not in transformed_event and 'qt_4e66idhbrc' in event:
|
||||
transformed_event['source'] = event['qt_4e66idhbrc']
|
||||
|
||||
# Only add event if it has an ID (required for DB)
|
||||
if transformed_event.get('id'):
|
||||
final_transformed_list.append(transformed_event)
|
||||
else:
|
||||
print(f"Warning: Skipping event due to missing ID. Original data snippet: {str(event)[:100]}")
|
||||
|
||||
except Exception as e: print(f"Error during final transformation for event: {e}"); traceback.print_exc()
|
||||
|
||||
if parse_error_count > 0:
|
||||
print(f"-> Encountered {parse_error_count} datetime parsing issues.")
|
||||
#print(f"-> Finished transformation. Resulting valid events: {len(final_transformed_list)}.")
|
||||
return final_transformed_list
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"API Request failed: {e}")
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"Failed to decode API JSON response: {e}")
|
||||
if 'cleaned_text' in locals():
|
||||
print("Raw response snippet:", cleaned_text[:500])
|
||||
except Exception as e:
|
||||
print(f"An unexpected error occurred during data fetching/processing: {e}")
|
||||
traceback.print_exc()
|
||||
return []
|
||||
|
||||
# --- Database Upsert and Post-Update Function ---
|
||||
def upsert_and_update_db(events):
|
||||
"""Upserts events and runs post-update spatial queries."""
|
||||
if not events:
|
||||
print("No events to process.")
|
||||
return
|
||||
|
||||
conn = None
|
||||
cursor = None
|
||||
upserted_count = 0
|
||||
upsert_error_count = 0
|
||||
|
||||
# SQL definitions (NO COUNTY in upsert)
|
||||
# Ensure first_seen and last_seen_in_feed columns are TIMESTAMPTZ
|
||||
# psycopg2 handles timezone-aware datetime objects correctly for TIMESTAMPTZ
|
||||
sql_upsert = f"""
|
||||
INSERT INTO {TABLE_NAME_QUALIFIED}
|
||||
(id, first_seen, initial_description, latest_description, last_updated, last_seen_in_feed, geom, source, route, remark)
|
||||
VALUES
|
||||
(%(id)s, %(first_seen)s, %(initial_desc)s, %(latest_desc)s, NOW(), %(last_seen)s, ST_SetSRID(ST_MakePoint(%(lon)s, %(lat)s), 4326), %(source)s, %(route)s, %(remark)s)
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
latest_description = excluded.latest_description,
|
||||
last_updated = NOW(),
|
||||
last_seen_in_feed = excluded.last_seen_in_feed,
|
||||
geom = excluded.geom,
|
||||
source = excluded.source,
|
||||
route = excluded.route,
|
||||
remark = excluded.remark,
|
||||
-- Keep original first_seen and initial_description on update
|
||||
first_seen = {TABLE_NAME_QUALIFIED}.first_seen,
|
||||
initial_description = {TABLE_NAME_QUALIFIED}.initial_description;
|
||||
"""
|
||||
sql_upsert_no_geom = f"""
|
||||
INSERT INTO {TABLE_NAME_QUALIFIED}
|
||||
(id, first_seen, initial_description, latest_description, last_updated, last_seen_in_feed, geom, source, route, remark)
|
||||
VALUES
|
||||
(%(id)s, %(first_seen)s, %(initial_desc)s, %(latest_desc)s, NOW(), %(last_seen)s, NULL, %(source)s, %(route)s, %(remark)s)
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
latest_description = excluded.latest_description,
|
||||
last_updated = NOW(),
|
||||
last_seen_in_feed = excluded.last_seen_in_feed,
|
||||
geom = NULL, -- Keep geom NULL on update if it was NULL
|
||||
source = excluded.source,
|
||||
route = excluded.route,
|
||||
remark = excluded.remark,
|
||||
-- Keep original first_seen and initial_description on update
|
||||
first_seen = {TABLE_NAME_QUALIFIED}.first_seen,
|
||||
initial_description = {TABLE_NAME_QUALIFIED}.initial_description;
|
||||
"""
|
||||
|
||||
# Post-update SQL definitions (remain the same)
|
||||
post_update_sqls = [
|
||||
f'UPDATE {TABLE_NAME_QUALIFIED} SET county = county.countyname from public.county WHERE ST_Contains(county.geom,{TABLE_NAME_QUALIFIED}.geom) AND {TABLE_NAME_QUALIFIED}.county IS NULL', # Optional: only update if null
|
||||
f'UPDATE {TABLE_NAME_QUALIFIED} SET cwa = fzone.cwa from public.fzone WHERE ST_Contains(fzone.geom,{TABLE_NAME_QUALIFIED}.geom) AND {TABLE_NAME_QUALIFIED}.cwa IS NULL', # Optional: only update if null
|
||||
f'UPDATE {TABLE_NAME_QUALIFIED} SET st = county.state from public.county WHERE ST_Contains(county.geom,{TABLE_NAME_QUALIFIED}.geom) AND {TABLE_NAME_QUALIFIED}.st IS NULL' # Optional: only update if null
|
||||
]
|
||||
|
||||
try:
|
||||
#print(f"\nConnecting to PostgreSQL database '{DB_NAME}' on {DB_HOST}...")
|
||||
conn = psycopg2.connect(dbname=DB_NAME, user=DB_USER, password=DB_PASSWORD, host=DB_HOST, port=DB_PORT)
|
||||
cursor = conn.cursor()
|
||||
#print("Connection successful.")
|
||||
|
||||
# --- Stage 1: Upsert Events ---
|
||||
#print("Starting upsert stage...")
|
||||
for event in events:
|
||||
try:
|
||||
event_id = event.get('id')
|
||||
if not event_id:
|
||||
upsert_error_count += 1
|
||||
#print("Skipping event due to missing ID in upsert stage.")
|
||||
continue
|
||||
|
||||
# Get the timezone-aware datetime object (or None)
|
||||
dtg_aware = event.get('dtg')
|
||||
|
||||
lon_val = event.get('lon'); lat_val = event.get('lat')
|
||||
route_val = event.get('route'); remark_val = event.get('remark')
|
||||
bmp_val = event.get(BMP_KEY_ORIGINAL)
|
||||
|
||||
desc_parts = []
|
||||
if route_val: desc_parts.append(route_val)
|
||||
if bmp_val is not None: desc_parts.append(f"at MM {str(int(bmp_val)) if isinstance(bmp_val, float) and bmp_val.is_integer() else str(bmp_val)}")
|
||||
if remark_val: desc_parts.append(remark_val)
|
||||
full_desc = " ".join(desc_parts) if desc_parts else None
|
||||
|
||||
data_dict = {
|
||||
'id': event_id,
|
||||
# Pass the aware datetime object directly. If it's None, psycopg2 handles it as NULL.
|
||||
'first_seen': dtg_aware,
|
||||
'initial_desc': full_desc,
|
||||
'latest_desc': full_desc,
|
||||
'last_seen': dtg_aware,
|
||||
'lon': lon_val,
|
||||
'lat': lat_val,
|
||||
'source': event.get('source'),
|
||||
'route': route_val,
|
||||
'remark': remark_val
|
||||
}
|
||||
|
||||
# Choose the correct SQL based on geometry presence
|
||||
if lon_val is not None and lat_val is not None:
|
||||
cursor.execute(sql_upsert, data_dict)
|
||||
else:
|
||||
cursor.execute(sql_upsert_no_geom, data_dict)
|
||||
upserted_count += 1
|
||||
|
||||
except psycopg2.Error as db_err:
|
||||
upsert_error_count += 1
|
||||
print(f"DB upsert error ID '{event_id}': {db_err}")
|
||||
conn.rollback() # Rollback this event
|
||||
except Exception as e:
|
||||
upsert_error_count += 1
|
||||
print(f"Prep/Exec error ID '{event_id}': {e}")
|
||||
traceback.print_exc()
|
||||
# No rollback here needed as the error is before execute or handled by psycopg2 block
|
||||
|
||||
# Commit upsert stage (committing successful ones)
|
||||
if upsert_error_count > 0:
|
||||
print(f"Upsert stage completed with {upsert_error_count} errors for individual events (rolled back individually).")
|
||||
#print(f"Committing {upserted_count} successful upserts...")
|
||||
conn.commit()
|
||||
#print(f"Upsert stage committed.")
|
||||
|
||||
# --- Stage 2: Post-Update Spatial Queries ---
|
||||
#print("\nRunning post-upsert spatial updates...")
|
||||
post_update_errors = 0
|
||||
for i, update_sql in enumerate(post_update_sqls, 1):
|
||||
#print(f"Executing post-update {i}/{len(post_update_sqls)}: {update_sql[:100]}...") # Print start of query
|
||||
try:
|
||||
cursor.execute(update_sql)
|
||||
#print(f" -> Update {i} successful ({cursor.rowcount} rows affected).")
|
||||
# Commit each successful post-update step individually
|
||||
conn.commit()
|
||||
#print(f" Committed post-update step {i}.")
|
||||
except psycopg2.Error as post_db_err:
|
||||
post_update_errors += 1
|
||||
print(f" -> Database error executing post-update {i}: {post_db_err}")
|
||||
print(f" Failed SQL: {update_sql}")
|
||||
conn.rollback() # Rollback this specific update attempt
|
||||
print(" Rolled back post-update transaction attempt.")
|
||||
# Decide if script should stop entirely on post-update failure
|
||||
# break # Optional: Stop processing further post-updates if one fails
|
||||
except Exception as post_e:
|
||||
post_update_errors += 1
|
||||
print(f" -> Unexpected error executing post-update {i}: {post_e}")
|
||||
print(f" Failed SQL: {update_sql}")
|
||||
conn.rollback()
|
||||
print(" Rolled back post-update transaction attempt.")
|
||||
# break # Optional: Stop processing further post-updates if one fails
|
||||
|
||||
if post_update_errors == 0:
|
||||
print("Post-upsert updates completed successfully.")
|
||||
else:
|
||||
print(f"Post-upsert updates encountered {post_update_errors} errors (each failed step was rolled back).")
|
||||
|
||||
except psycopg2.OperationalError as e:
|
||||
print(f"Database connection failed: {e}")
|
||||
except Exception as e:
|
||||
print(f"An unexpected error occurred during database operations: {e}")
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback() # Rollback any outstanding transaction on major error
|
||||
finally:
|
||||
if cursor: cursor.close()
|
||||
if conn: conn.close()
|
||||
print("Database connection closed.")
|
||||
|
||||
# --- Script Entry Point ---
|
||||
if __name__ == "__main__":
|
||||
processed_events = fetch_and_process_data()
|
||||
if processed_events: # Only run DB operations if we got data
|
||||
upsert_and_update_db(processed_events)
|
||||
else:
|
||||
print("No processed events found, skipping database operations.")
|
||||
print("\nScript finished.")
|
||||
873
lsr.php
Normal file
873
lsr.php
Normal file
@@ -0,0 +1,873 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (isset($_GET['ohgo'])) {
|
||||
$query = "SELECT jsonb_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', jsonb_agg(
|
||||
jsonb_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(geom)::jsonb,
|
||||
'properties', jsonb_build_object(
|
||||
'lat', lat,
|
||||
'lon', lon,
|
||||
'description', description,
|
||||
'roadstatus', roadstatus,
|
||||
'start', start,
|
||||
'lastupdate', lastupdate
|
||||
)
|
||||
)
|
||||
)
|
||||
) as geojson
|
||||
FROM public.ohgo
|
||||
WHERE endtime IS NULL
|
||||
AND lastupdate > NOW() - INTERVAL '2 hours'";
|
||||
|
||||
// Prepare and execute the query
|
||||
$result = pg_query($dbconn, $query);
|
||||
if (!$result) {
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode(['error' => 'Query failed: ' . pg_last_error()]);
|
||||
exit;
|
||||
}
|
||||
|
||||
// Fetch the result
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
// Check if we got results
|
||||
if ($resultArray && isset($resultArray[0]['geojson'])) {
|
||||
header('Content-Type: application/json');
|
||||
echo $resultArray[0]['geojson']; // Direct output since it's already JSON from jsonb_build_object
|
||||
} else {
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode(['error' => 'No results found']);
|
||||
}
|
||||
|
||||
// Free result and close connection
|
||||
pg_free_result($result);
|
||||
// pg_close($dbconn); // Uncomment if you want to close the connection
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
if (isset($_GET['ohgotable'])) {
|
||||
// Performing SQL query
|
||||
$query = "SELECT CASE WHEN COALESCE(lsr, FALSE) THEN 'true' ELSE 'false' END AS lsr,
|
||||
CASE WHEN COALESCE(hide, FALSE) THEN 'true' ELSE 'false' END AS hide,
|
||||
ROUND(ST_Y(geom)::numeric, 3) AS lat,
|
||||
ROUND(ST_X(geom)::numeric, 3) AS lon,
|
||||
id, category, roadstatus, cwa, county, state, location, routename,
|
||||
description,
|
||||
TO_CHAR(start, 'YYYY-MM-DD HH24:MI') AS start,
|
||||
TO_CHAR(endtime, 'YYYY-MM-DD HH24:MI') AS endtime,
|
||||
TO_CHAR(lastupdate, 'YYYY-MM-DD HH24:MI') AS lastupdate
|
||||
FROM ohgo
|
||||
WHERE (endtime IS NULL OR endtime > NOW() - INTERVAL '48 hours') and start > now() - interval '144 hours'
|
||||
ORDER BY start ASC
|
||||
";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Printing results in JSON
|
||||
$array = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Closing connection (uncomment if needed)
|
||||
// pg_close($dbconn);
|
||||
}
|
||||
|
||||
|
||||
if (isset($_GET['lsrohgo'])) {
|
||||
if ($_GET['lsrohgo'] == 'true') {
|
||||
$lsrflag = 'true';
|
||||
} else {
|
||||
$lsrflag = 'false';
|
||||
}
|
||||
$id = (int) $_GET['id'];
|
||||
$query = "UPDATE ohgo SET lsr = $1 WHERE id = $2";
|
||||
$result = pg_query_params($dbconn, $query, array($lsrflag, $id)) or die('Query failed: ' . pg_last_error());
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
|
||||
if (isset($_GET['ohgohide'])) {
|
||||
if ($_GET['ohgohide'] == 'true') {
|
||||
$lsrflag = 'true';
|
||||
} else {
|
||||
$lsrflag = 'false';
|
||||
}
|
||||
$id = (int) $_GET['id'];
|
||||
$query = "UPDATE ohgo SET hide = $1 WHERE id = $2";
|
||||
$result = pg_query_params($dbconn, $query, array($lsrflag, $id)) or die('Query failed: ' . pg_last_error());
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
|
||||
//take vtec, return start, end, polygon, outages in polygon, outages in buffer, warntype, polygon pop
|
||||
if (isset($_GET['vtec'])) {
|
||||
|
||||
|
||||
$vtec = $_GET['vtec'];
|
||||
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(nwspoly)::json,
|
||||
'properties', json_build_object(
|
||||
'id', warnindex,
|
||||
'issue', issue,
|
||||
'endtime', endtime,
|
||||
'warntype', warntype,
|
||||
'issue', issue,
|
||||
'outagesvalid', outagesvalid,
|
||||
'outagesbuffer', outagesbuffer,
|
||||
'polygonpop', polygonpop,
|
||||
'lat', st_y(st_centroid(nwspoly)),
|
||||
'lon', st_x(st_centroid(nwspoly)),
|
||||
'vtec', vtec
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
FROM svr
|
||||
WHERE vtec = $1;
|
||||
";
|
||||
|
||||
// Prepare and execute the query using pg_query_params
|
||||
$result = pg_query_params($dbconn, $query, array($vtec))
|
||||
or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Fetch the result
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
// Output the JSON object
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
// Free result
|
||||
pg_free_result($result);
|
||||
|
||||
}
|
||||
|
||||
//Get reports pre-flagged with the vtec
|
||||
if (isset($_GET['preflagreports'])) {
|
||||
$vtec = $_GET['preflagreports'];
|
||||
$query = "SELECT * from reports WHERE severe = $1";
|
||||
$result = pg_query_params($dbconn, $query, array($vtec)) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
|
||||
//Get reports within polygon for arbitary time after end time
|
||||
|
||||
if (isset($_GET['reports'])) {
|
||||
$vtec = $_GET['reports'];
|
||||
if (isset($_GET['hours'])) {
|
||||
$hours = $_GET['hours'];
|
||||
} else {
|
||||
$hours = 6;
|
||||
}
|
||||
|
||||
//echo $hours;
|
||||
|
||||
$query = "SELECT * from reports,svr where ST_Contains(svr.nwspoly, reports.geom) and vtec = $1 and reports.initialdtg AT TIME ZONE 'America/New_York' > svr.issue AND reports.initialdtg AT TIME ZONE 'America/New_York' < svr.issue + (INTERVAL '1 h' * $2)";
|
||||
$result = pg_query_params($dbconn, $query, array($vtec,$hours)) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
|
||||
//Get point power outages within polygon + arb time, default 60 minutes
|
||||
|
||||
if (isset($_GET['outages'])) {
|
||||
$vtec = $_GET['outages'];
|
||||
if (isset($_GET['hours'])) {
|
||||
$hours = $_GET['hours'];
|
||||
} else {
|
||||
$hours = 1;
|
||||
}
|
||||
|
||||
//echo $hours;
|
||||
|
||||
$query = "SELECT power.lat,power.lon,power.peakoutage,power.cause,power.derivedstart,power.lastchange from power,svr where ST_Contains(svr.nwspoly, power.realgeom) and vtec = $1 and derivedstart > svr.issue AND derivedstart < svr.issue + (INTERVAL '1 h' * $2)";
|
||||
$result = pg_query_params($dbconn, $query, array($vtec,$hours)) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//no gets, curent point outage info
|
||||
if(empty($_GET)) {
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(geom)::json,'properties',json_build_object('id',id,'time',initialdtg,'county',county,'state',state,'issue',issue,'rawemail',rawemail,'place',place,'comments',comments)) order by initialdtg desc)) FROM reports where initialdtg > $1 ",
|
||||
array('2024-06-07')) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//For real time mapping
|
||||
|
||||
if($_GET['verify'] ?? null) {
|
||||
$query = "select id, lat::Numeric(16,3), lon::Numeric(16,3),issue,to_char(initialdtg, 'yyyy/mm/dd hh24:mi') as initialdtg,rawemail,concat(county,' ',state,'\n',place) as place,comments,lsr::text,severe from reports where severe is not null and processed = true order by initialdtg desc";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
|
||||
//no gets, curent point outage info
|
||||
if (isset($_GET['rtcad'])) {
|
||||
if (isset($_GET['hours'])) {
|
||||
|
||||
$hours = $_GET['hours'];
|
||||
}
|
||||
|
||||
else {
|
||||
|
||||
$hours = 6;
|
||||
}
|
||||
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(geom)::json,
|
||||
'properties', json_build_object(
|
||||
'id', id,
|
||||
'time', utcinitialdtg,
|
||||
'county', county,
|
||||
'state', state,
|
||||
'issue', issue,
|
||||
'rawemail', rawemail,
|
||||
'place', place,
|
||||
'comments', comments
|
||||
)
|
||||
) ORDER BY initialdtg DESC
|
||||
)
|
||||
)
|
||||
FROM reports
|
||||
WHERE lat is not null and utcinitialdtg >= NOW() - INTERVAL '1 hour' * $1;
|
||||
";
|
||||
|
||||
// Prepare and execute the query using pg_query_params
|
||||
$result = pg_query_params($dbconn, $query, array($hours))
|
||||
or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Fetch the result
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
// Output the JSON object
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
// Free result
|
||||
pg_free_result($result);
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//Stats
|
||||
if (isset($_GET['stats'])) {
|
||||
$query = "SELECT county, state, MAX(emailtime), count(*) FROM reports where county is not null and (state = 'WV' or state = 'VA' or state = 'KY' or state = 'OH') GROUP BY county, state";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
|
||||
//Get METAR Array for Jelly Bean
|
||||
|
||||
if (isset($_GET['metars'])) {
|
||||
if (isset($_GET['start'])) {
|
||||
$start = $_GET['start'];
|
||||
}
|
||||
if (isset($_GET['end'])) {
|
||||
$end = $_GET['end'];
|
||||
}
|
||||
|
||||
|
||||
|
||||
$query = "SELECT icao,temp,dewp,wx,precip1,precip3,precip6,raw,obtime,stationname,lat,lon from metars where obtime - interval '45 minutes' > $1 and obtime < $2 order by lon asc";
|
||||
$result = pg_query_params($dbconn, $query, array($start,$end)) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
} //echo($array);
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
if (isset($_GET['news'])) {
|
||||
//$query = "SELECT headline, summary, imageurl, source, storylink, updated from news where notrelevant is not true";
|
||||
$query = "
|
||||
SELECT
|
||||
*,
|
||||
CASE
|
||||
WHEN concat(summary, ' ', headline) ILIKE ANY (ARRAY[
|
||||
'%weather%', '%flood%', '%fire%', '%fog%', '%snow%', '%emergency%'
|
||||
'%wind%', '%ice%', '%rain%', '%power%', '%explosion%',
|
||||
'%drown%', '%stream%', '%river%', '%air%', '%wind%',
|
||||
'%river%', '%ice%', '%creek%', '%crash%', '%thunder%',
|
||||
'%fog%', '%spill%', '%pileup%', '%pile-up%', '%gust%',
|
||||
'%fatal%', '%injury%', '%sleet%', '%injured%', '%frost%',
|
||||
'%culvert%', '%slippery%', '%wildfire%', '%tornado%',
|
||||
'%thunderstorm%', '%downburst%', '%microburst%', '%crash%', '%heatstroke%', '%derecho%'
|
||||
'%lightning%', '%hypothermia%', '%slide%', '%flow%', '%ski%', '%water%', '%innundation%'
|
||||
]) THEN 2
|
||||
WHEN concat(summary, ' ', headline) ILIKE ANY (ARRAY[
|
||||
'%legislative%','%history%','%budget%','%birthday%','%banning%','%academic%','%tuna%','%Service Forecast%', '%DOGE%','%demonstrators%','%forum%','%health%','%fraud%','%birthday%', '%egg%', '%eggs%', '%collector%', '%church%', ' %crypto%'
|
||||
]) THEN 0
|
||||
ELSE 1
|
||||
END AS relevance_level
|
||||
FROM news WHERE timeutc > NOW() - INTERVAL '18 hours'
|
||||
ORDER BY relevance_level DESC, timeutc DESC
|
||||
";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
|
||||
if (isset($_GET['news2'])) {
|
||||
$query = "
|
||||
SELECT
|
||||
*,
|
||||
(SELECT COUNT(*)
|
||||
FROM unnest(ARRAY[
|
||||
'weather', 'flood', 'fire', 'fog', 'snow', 'emergency',
|
||||
'wind', 'ice', 'rain', 'power', 'explosion', 'warmer', 'colder',
|
||||
'drown', 'stream', 'river', 'air', 'wind', 'destroyed', 'rime', 'glaze',
|
||||
'river', 'ice', 'creek', 'crash', 'thunder', 'spinup', 'black ice', 'aircraft',
|
||||
'fog', 'spill', 'pileup', 'pile-up', 'gust', 'frozen', 'funnel', 'rainfall',
|
||||
'fatal', 'injury', 'sleet', 'injured', 'frost', 'dead', 'death', 'landslide',
|
||||
'culvert', 'slippery', 'wildfire', 'tornado', 'blizzard', 'creek', 'hail',
|
||||
'thunderstorm', 'downburst', 'microburst', 'crash', 'heatstroke', 'derecho',
|
||||
'lightning', 'hypothermia', 'slide', 'flow', 'ski', 'water', 'inundation', 'victim',
|
||||
'victims', 'flooding','flooded','snowing','freezing rain','clouds','cloud','storm'
|
||||
]) AS pattern
|
||||
WHERE concat(summary, ' ', headline) ~* ('\y' || pattern || '\y')) AS match_count
|
||||
FROM news
|
||||
WHERE timeutc > NOW() - INTERVAL '18 hours'
|
||||
ORDER BY nlpscore DESC, timeutc DESC
|
||||
";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
|
||||
if (isset($_GET['news3old'])) {
|
||||
$query = "
|
||||
SELECT * FROM news WHERE (timeutc > NOW() - INTERVAL '24 hours' and nlpscore > 0.1) or (timeutc > NOW() - INTERVAL '6 hours') ORDER BY nlpscore DESC, timeutc DESC";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
|
||||
if (isset($_GET['news3'])) {
|
||||
// Corrected query with NULLS LAST
|
||||
$query = "
|
||||
SELECT * FROM news
|
||||
WHERE (timeutc > NOW() - INTERVAL '24 hours' AND impact_score > 25)
|
||||
OR (timeutc > NOW() - INTERVAL '6 hours' and impact_score > 0)
|
||||
ORDER BY impact_score DESC NULLS LAST, timeutc DESC limit 50";
|
||||
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$array = []; // It's good practice to initialize the array
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
|
||||
|
||||
if (isset($_GET['newsarchive'])) {
|
||||
// Initialize variables
|
||||
$start = isset($_GET['start']) ? $_GET['start'] : null;
|
||||
$end = isset($_GET['end']) ? $_GET['end'] : null;
|
||||
$keys = isset($_GET['key']) ? $_GET['key'] : [];
|
||||
// Convert keys to an array if it's a string
|
||||
if (is_string($keys)) {
|
||||
$keys = explode(',', $keys);
|
||||
}
|
||||
|
||||
$patterns = array_map(function($term) {
|
||||
return trim($term);
|
||||
}, $keys);
|
||||
|
||||
// Handle case with no search terms
|
||||
if (empty($patterns)) {
|
||||
$query = "SELECT * FROM news";
|
||||
$params = [];
|
||||
} else {
|
||||
// Build parameter placeholders
|
||||
$placeholders = [];
|
||||
for ($i = 1; $i <= count($patterns); $i++) {
|
||||
$placeholders[] = "\${$i}::text";
|
||||
}
|
||||
$placeholder_string = implode(',', $placeholders);
|
||||
|
||||
$query = "
|
||||
SELECT
|
||||
n.*,
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM unnest(ARRAY[{$placeholder_string}]::text[]) AS pattern
|
||||
WHERE concat(n.summary, ' ', n.headline) ILIKE pattern
|
||||
) AS match_count
|
||||
FROM news n
|
||||
WHERE concat(summary, ' ', headline) ILIKE ANY (ARRAY[{$placeholder_string}]::text[])
|
||||
";
|
||||
$params = array_map(function($term) { return "%{$term}%"; }, $patterns);
|
||||
}
|
||||
|
||||
// Add date filters if provided
|
||||
$param_count = count($patterns);
|
||||
if ($start) {
|
||||
$param_count++;
|
||||
$query .= " AND timeutc >= $${param_count}";
|
||||
$params[] = $start;
|
||||
}
|
||||
if ($end) {
|
||||
$param_count++;
|
||||
$query .= " AND timeutc <= $${param_count}";
|
||||
$params[] = $end;
|
||||
}
|
||||
|
||||
$query .= " ORDER BY match_count DESC, timeutc desc";
|
||||
|
||||
// Execute query
|
||||
$result = pg_query_params($query, $params) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$array = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
|
||||
echo json_encode($array);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
if (isset($_GET['wv511'])) {
|
||||
$query = "SELECT jsonb_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', jsonb_agg(
|
||||
jsonb_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(geom)::jsonb,
|
||||
'properties', jsonb_build_object(
|
||||
'type', name,
|
||||
'reported', first_seen,
|
||||
'end', last_seen_in_feed,
|
||||
'county', county,
|
||||
'state', st,
|
||||
'remark', latest_description,
|
||||
'lat', st_y(st_centroid(geom)),
|
||||
'lon', st_x(st_centroid(geom))
|
||||
)
|
||||
)
|
||||
)
|
||||
) as geojson
|
||||
FROM public.wv511
|
||||
WHERE last_updated > NOW() - INTERVAL '2 hours'";
|
||||
|
||||
// Prepare and execute the query
|
||||
$result = pg_query($dbconn, $query);
|
||||
if (!$result) {
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode(['error' => 'Query failed: ' . pg_last_error()]);
|
||||
exit;
|
||||
}
|
||||
|
||||
// Fetch the result
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
// Check if we got results
|
||||
if ($resultArray && isset($resultArray[0]['geojson'])) {
|
||||
header('Content-Type: application/json');
|
||||
echo $resultArray[0]['geojson']; // Direct output since it's already JSON from jsonb_build_object
|
||||
} else {
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode(['error' => 'No results found']);
|
||||
}
|
||||
|
||||
// Free result and close connection
|
||||
pg_free_result($result);
|
||||
// pg_close($dbconn); // Uncomment if you want to close the connection
|
||||
}
|
||||
|
||||
|
||||
|
||||
if (isset($_GET['ky511'])) {
|
||||
$query = "SELECT jsonb_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', jsonb_agg(
|
||||
jsonb_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(geom)::jsonb,
|
||||
'properties', jsonb_build_object(
|
||||
'reported', first_seen,
|
||||
'end', last_seen_in_feed,
|
||||
'county', county,
|
||||
'state', st,
|
||||
'remark', latest_description,
|
||||
'lat', st_y(st_centroid(geom)),
|
||||
'lon', st_x(st_centroid(geom))
|
||||
)
|
||||
)
|
||||
)
|
||||
) as geojson
|
||||
FROM ky511.ky511
|
||||
WHERE last_updated > NOW() - INTERVAL '2 hours'";
|
||||
|
||||
// Prepare and execute the query
|
||||
$result = pg_query($dbconn, $query);
|
||||
if (!$result) {
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode(['error' => 'Query failed: ' . pg_last_error()]);
|
||||
exit;
|
||||
}
|
||||
|
||||
// Fetch the result
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
// Check if we got results
|
||||
if ($resultArray && isset($resultArray[0]['geojson'])) {
|
||||
header('Content-Type: application/json');
|
||||
echo $resultArray[0]['geojson']; // Direct output since it's already JSON from jsonb_build_object
|
||||
} else {
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode(['error' => 'No results found']);
|
||||
}
|
||||
|
||||
// Free result and close connection
|
||||
pg_free_result($result);
|
||||
// pg_close($dbconn); // Uncomment if you want to close the connection
|
||||
}
|
||||
|
||||
|
||||
|
||||
if (isset($_GET['getCombinedTable'])) {
|
||||
|
||||
// Combined SQL query using UNION ALL with CAST for 'id'
|
||||
$query = "
|
||||
SELECT * FROM (
|
||||
-- OHGO Query
|
||||
SELECT
|
||||
'ohgo' AS source,
|
||||
CASE WHEN COALESCE(lsr, FALSE) THEN 'true' ELSE 'false' END AS lsr,
|
||||
CASE WHEN COALESCE(hide, FALSE) THEN 'true' ELSE 'false' END AS hide,
|
||||
ROUND(ST_Y(geom)::numeric, 3) AS lat,
|
||||
ROUND(ST_X(geom)::numeric, 3) AS lon,
|
||||
CAST(id AS TEXT) AS id, -- Cast id to TEXT
|
||||
category,
|
||||
roadstatus,
|
||||
cwa,
|
||||
county,
|
||||
state,
|
||||
location,
|
||||
routename,
|
||||
description,
|
||||
TO_CHAR(start, 'YYYY-MM-DD HH24:MI') AS start,
|
||||
TO_CHAR(endtime, 'YYYY-MM-DD HH24:MI') AS endtime,
|
||||
TO_CHAR(lastupdate, 'YYYY-MM-DD HH24:MI') AS lastupdate
|
||||
FROM ohgo
|
||||
WHERE (endtime IS NULL OR endtime > NOW() - INTERVAL '24 hours') AND start > now() - interval '144 hours'
|
||||
|
||||
UNION ALL
|
||||
|
||||
-- WV511 Query
|
||||
SELECT
|
||||
'wv511' AS source,
|
||||
CASE WHEN COALESCE(lsr, FALSE) THEN 'true' ELSE 'false' END AS lsr,
|
||||
CASE WHEN COALESCE(hide, FALSE) THEN 'true' ELSE 'false' END AS hide,
|
||||
ROUND(ST_Y(geom)::numeric, 3) AS lat,
|
||||
ROUND(ST_X(geom)::numeric, 3) AS lon,
|
||||
CAST(id AS TEXT) AS id, -- Cast id to TEXT
|
||||
wv511.name as category,
|
||||
NULL AS roadstatus,
|
||||
cwa,
|
||||
county,
|
||||
st as state,
|
||||
'Map Link' AS location,
|
||||
NULL AS routename,
|
||||
latest_description as description,
|
||||
TO_CHAR(first_seen, 'YYYY-MM-DD HH24:MI') AS start,
|
||||
TO_CHAR(last_seen_in_feed, 'YYYY-MM-DD HH24:MI') AS endtime,
|
||||
TO_CHAR(last_updated, 'YYYY-MM-DD HH24:MI') AS lastupdate
|
||||
FROM wv511
|
||||
WHERE (last_seen_in_feed IS NULL OR last_seen_in_feed > NOW() - INTERVAL '24 hours') AND first_seen > now() - interval '144 hours'
|
||||
and wv511.name !~ 'Crash' and
|
||||
wv511.name !~ 'Vehicle' and wv511.name !~ 'Dead Animal' and wv511.name !~ 'Debris in Roadway' and wv511.name !~ 'Congestion-Delay' and
|
||||
wv511.name !~ 'Pot hole' and wv511.name !~ 'Debris On Bridge' and wv511.name !~ 'Attenuator' and wv511.name !~ 'Pedestrian' and
|
||||
wv511.name !~ 'Bridge Closed' and wv511.name !~ 'Truck on escape' and wv511.name !~ 'Bridge Incident' and wv511.name !~ 'Escape Ramp' AND
|
||||
wv511.name !~ 'Signal'
|
||||
UNION ALL
|
||||
|
||||
-- KY511 Query
|
||||
SELECT
|
||||
'ky511.ky511' AS source,
|
||||
CASE WHEN COALESCE(lsr, FALSE) THEN 'true' ELSE 'false' END AS lsr,
|
||||
CASE WHEN COALESCE(hide, FALSE) THEN 'true' ELSE 'false' END AS hide,
|
||||
ROUND(ST_Y(geom)::numeric, 3) AS lat,
|
||||
ROUND(ST_X(geom)::numeric, 3) AS lon,
|
||||
CAST(id AS TEXT) AS id, -- Cast id to TEXT
|
||||
'Weather' as category,
|
||||
NULL AS roadstatus,
|
||||
cwa,
|
||||
county,
|
||||
st as state,
|
||||
'Map Link' AS location,
|
||||
NULL AS routename,
|
||||
latest_description as description,
|
||||
TO_CHAR(first_seen, 'YYYY-MM-DD HH24:MI') AS start,
|
||||
TO_CHAR(last_seen_in_feed, 'YYYY-MM-DD HH24:MI') AS endtime,
|
||||
TO_CHAR(last_updated, 'YYYY-MM-DD HH24:MI') AS lastupdate
|
||||
FROM ky511.ky511
|
||||
WHERE (last_seen_in_feed IS NULL OR last_seen_in_feed > NOW() - INTERVAL '24 hours') AND first_seen > now() - interval '144 hours'
|
||||
) AS combined_data
|
||||
ORDER BY start ASC;
|
||||
";
|
||||
|
||||
// Execute the query
|
||||
$result = pg_query($dbconn, $query);
|
||||
|
||||
// Set header before any output
|
||||
header('Content-Type: application/json');
|
||||
|
||||
if (!$result) {
|
||||
// Output error as JSON
|
||||
echo json_encode(['error' => 'Combined query failed: ' . pg_last_error($dbconn)]);
|
||||
// Close connection if needed
|
||||
// pg_close($dbconn);
|
||||
exit;
|
||||
}
|
||||
|
||||
// Fetch results into an array
|
||||
$dataArray = [];
|
||||
while ($row = pg_fetch_assoc($result)) {
|
||||
$dataArray[] = $row;
|
||||
}
|
||||
if ($dataArray === false) {
|
||||
echo json_encode(['error' => 'Failed to fetch results.']);
|
||||
pg_free_result($result);
|
||||
// pg_close($dbconn);
|
||||
exit;
|
||||
}
|
||||
|
||||
|
||||
// Output the combined results as JSON
|
||||
echo json_encode($dataArray);
|
||||
|
||||
// Free result memory
|
||||
pg_free_result($result);
|
||||
|
||||
// Optionally close the connection
|
||||
// pg_close($dbconn);
|
||||
|
||||
exit; // Stop script execution
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Add more table names here as needed in the future.
|
||||
|
||||
|
||||
if (isset($_GET['updater'])) {
|
||||
$allowedTables = ['wv511', 'ky511.ky511', 'ohgo'];
|
||||
if (isset($_GET['lsr']) && isset($_GET['id']) && isset($_GET['table'])) {
|
||||
|
||||
// --- Handle LSR Update ---
|
||||
$requestedTable = $_GET['table'];
|
||||
$lsrInput = $_GET['lsr'];
|
||||
$idInput = $_GET['id']; // ID validation missing here, see note below
|
||||
|
||||
if (!in_array($requestedTable, $allowedTables)) {
|
||||
http_response_code(400);
|
||||
die('Error (LSR): Invalid table specified.');
|
||||
}
|
||||
|
||||
$lsrflag = ($lsrInput === 'true') ? 'true' : 'false';
|
||||
$id = $idInput; // WARNING: ID is not validated/sanitized here for LSR!
|
||||
|
||||
|
||||
|
||||
$tableNameEscaped = $requestedTable;
|
||||
// **** IMPORTANT: Ensure your ID column is actually TEXT/VARCHAR if you're not casting to int ****
|
||||
// If 'id' is numeric, you MUST validate/cast it like in the 'hide' block.
|
||||
// Assuming 'id' is text for now based on your original code for LSR:
|
||||
$query = "UPDATE {$tableNameEscaped} SET lsr = $1 WHERE id = $2";
|
||||
$result = pg_query_params($dbconn, $query, array($lsrflag, $id));
|
||||
|
||||
if ($result) {
|
||||
$affectedRows = pg_affected_rows($result);
|
||||
echo "LSR Update successful for table '{$requestedTable}'. {$affectedRows} row(s) affected for ID {$id}.";
|
||||
} else {
|
||||
http_response_code(500);
|
||||
error_log("LSR Query failed for table '{$requestedTable}', ID {$id}: " . pg_last_error($dbconn));
|
||||
die('Error: The LSR update query failed.');
|
||||
}
|
||||
|
||||
} else if (isset($_GET['hide']) && isset($_GET['id']) && isset($_GET['table'])) {
|
||||
// --- Handle Hide Update ---
|
||||
$requestedTable = $_GET['table'];
|
||||
$hideInput = $_GET['hide'];
|
||||
$idInput = $_GET['id'];
|
||||
|
||||
if (!in_array($requestedTable, $allowedTables)) {
|
||||
http_response_code(400);
|
||||
die('Error (Hide): Invalid table specified.');
|
||||
}
|
||||
|
||||
$hideflag = ($hideInput === 'true') ? 'true' : 'false';
|
||||
|
||||
// Use INT validation for ID here - make sure this matches your DB column type
|
||||
$id = $idInput;
|
||||
|
||||
|
||||
|
||||
$tableNameEscaped = $requestedTable;
|
||||
// Assuming 'id' is numeric based on your validation here
|
||||
$query = "UPDATE {$tableNameEscaped} SET hide = $1 WHERE id = $2";
|
||||
$result = pg_query_params($dbconn, $query, array($hideflag, $id));
|
||||
|
||||
if ($result) {
|
||||
$affectedRows = pg_affected_rows($result);
|
||||
echo "Hide Update successful for table '{$requestedTable}'. {$affectedRows} row(s) affected for ID {$id}.";
|
||||
} else {
|
||||
http_response_code(500);
|
||||
error_log("Hide Query failed for table '{$requestedTable}', ID {$id}: " . pg_last_error($dbconn));
|
||||
die('Error: The Hide update query failed.');
|
||||
}
|
||||
|
||||
} else {
|
||||
// --- Handle Missing Parameters Error ---
|
||||
// Neither 'lsr' nor 'hide' (along with id and table) were provided correctly.
|
||||
http_response_code(400); // Bad Request
|
||||
// Check which parameters *are* present to give a potentially more helpful error
|
||||
$missing = [];
|
||||
if (!isset($_GET['id'])) $missing[] = '"id"';
|
||||
if (!isset($_GET['table'])) $missing[] = '"table"';
|
||||
if (!isset($_GET['lsr']) && !isset($_GET['hide'])) {
|
||||
$missing[] = 'action ("lsr" or "hide")';
|
||||
} else if (isset($_GET['lsr']) && (!isset($_GET['id']) || !isset($_GET['table']))) {
|
||||
// LSR was specified, but others missing
|
||||
} else if (isset($_GET['hide']) && (!isset($_GET['id']) || !isset($_GET['table']))) {
|
||||
// Hide was specified, but others missing
|
||||
}
|
||||
|
||||
if (!empty($missing)) {
|
||||
die('Error: Missing required parameter(s): ' . implode(', ', $missing) . '. Please provide a valid action ("lsr" or "hide"), "id", and "table".');
|
||||
} else {
|
||||
// Should ideally not happen with the logic above, but as a fallback:
|
||||
die('Error: Invalid request parameters. Please provide action ("lsr" or "hide"), "id", and "table".');
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
pg_close($dbconn);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
?>
|
||||
343
lsrtool.html
Normal file
343
lsrtool.html
Normal file
@@ -0,0 +1,343 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>RLX Report Query</title>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
|
||||
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.7.1/dist/leaflet.css">
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/leaflet-timedimension/1.1.0/leaflet.timedimension.control.min.css" />
|
||||
<link rel="stylesheet" href="/js/leaflet-radar.css">
|
||||
<link rel="stylesheet" href="https://www.w3schools.com/w3css/4/w3.css">
|
||||
|
||||
<link rel="preload" href="https://server.arcgisonline.com/ArcGIS/rest/services/World_Street_Map/MapServer/tile/{z}/{y}/{x}" as="image">
|
||||
|
||||
<style>
|
||||
body, html {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
overflow: hidden;
|
||||
}
|
||||
#mapid {
|
||||
height: 90%;
|
||||
width: 100%;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
z-index: 1;
|
||||
}
|
||||
#slider-2 {
|
||||
position: absolute;
|
||||
left: 20px;
|
||||
top: 200px;
|
||||
width: 300px;
|
||||
z-index: 1000;
|
||||
}
|
||||
#controls {
|
||||
position: absolute;
|
||||
bottom: 10px;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
width: 80%;
|
||||
z-index: 1000;
|
||||
background: rgba(255, 255, 255, 0.8);
|
||||
padding: 10px;
|
||||
}
|
||||
#time-display, #local-time-display {
|
||||
margin: 10px 0;
|
||||
font-size: 14px;
|
||||
}
|
||||
.box, .box2 {
|
||||
position: absolute;
|
||||
z-index: 1000;
|
||||
text-align: center;
|
||||
width: 250px;
|
||||
left: 10%;
|
||||
margin-left: -125px;
|
||||
}
|
||||
.box { top: 225px; }
|
||||
.box2 { top: 500px; }
|
||||
.legend {
|
||||
line-height: 18px;
|
||||
color: #555;
|
||||
}
|
||||
.legend i {
|
||||
width: 15px;
|
||||
height: 15px;
|
||||
float: left;
|
||||
margin-right: 8px;
|
||||
opacity: 0.7;
|
||||
}
|
||||
.info {
|
||||
padding: 6px 8px;
|
||||
font: 14px/16px Arial, Helvetica, sans-serif;
|
||||
background: rgba(255,255,255,0.8);
|
||||
box-shadow: 0 0 15px rgba(0,0,0,0.2);
|
||||
border-radius: 5px;
|
||||
}
|
||||
.info h4 {
|
||||
margin: 0 0 5px;
|
||||
color: #777;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="mapid"></div>
|
||||
<div id="slider-2"></div>
|
||||
<div class="box"></div>
|
||||
<div class="box2"></div>
|
||||
<div id="controls">
|
||||
<input type="range" id="slider" min="0" max="12" step="1" value="0">
|
||||
<div id="time-display">Time: </div>
|
||||
<div id="local-time-display">Local Time (ET): </div>
|
||||
</div>
|
||||
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.7.1/jquery.min.js" defer></script>
|
||||
<script src="https://unpkg.com/leaflet@1.7.1/dist/leaflet.js" integrity="sha512-XQoYMqMTK8LvdxXYG3nZ448hOEQiglfqkJs1NOQV44cWnUrBc8PkAOcXy20w0vlaXaVUearIOBhiXZ5V3ynxwA==" crossorigin="" defer></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/chroma-js/2.4.2/chroma.min.js" integrity="sha512-zInFF17qBFVvvvFpIfeBzo7Tj7+rQxLeTJDmbxjBz5/zIr89YVbTNelNhdTT+/DCrxoVzBeUPVFJsczKbB7sew==" crossorigin="anonymous" referrerpolicy="no-referrer" defer></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.29.4/moment.min.js" defer></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/moment-timezone/0.5.43/moment-timezone-with-data.min.js" defer></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/leaflet-timedimension/1.1.0/leaflet.timedimension.min.js" defer></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/leaflet-timedimension/1.1.0/leaflet.timedimension.control.min.js" defer></script>
|
||||
<script src="/js/leaflet-radar.js" defer></script>
|
||||
<script src="https://unpkg.com/file-saver@2.0.5/dist/FileSaver.js" defer></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/dom-to-image/2.6.0/dom-to-image.js" integrity="sha512-wUa0ktp10dgVVhWdRVfcUO4vHS0ryT42WOEcXjVVF2+2rcYBKTY7Yx7JCEzjWgPV+rj2EDUr8TwsoWF6IoIOPg==" crossorigin="anonymous" referrerpolicy="no-referrer" defer></script>
|
||||
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
if (typeof L === 'undefined') {
|
||||
console.error('Leaflet is not loaded yet');
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof moment === 'undefined') {
|
||||
console.error('Moment.js is not loaded yet');
|
||||
return;
|
||||
}
|
||||
|
||||
const mymap = L.map('mapid', {
|
||||
zoomDelta: 0.25,
|
||||
zoomSnap: 0,
|
||||
fadeAnimation: false,
|
||||
zoomAnimation: true,
|
||||
zoomAnimationThreshold: 4,
|
||||
preferCanvas: true
|
||||
}).setView([38.508, -81.652480], 8.0);
|
||||
|
||||
const Esri_WorldStreetMap = L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Street_Map/MapServer/tile/{z}/{y}/{x}', {
|
||||
attribution: 'Tiles © Esri',
|
||||
maxZoom: 19,
|
||||
tileSize: 256,
|
||||
updateWhenIdle: true,
|
||||
updateInterval: 200,
|
||||
reuseTiles: true
|
||||
}).addTo(mymap);
|
||||
|
||||
const baselayers = {
|
||||
"Esri Street Map": Esri_WorldStreetMap,
|
||||
"Esri Satellite": L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', {
|
||||
attribution: 'Tiles © Esri — Source: Esri, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community',
|
||||
maxZoom: 19,
|
||||
updateWhenIdle: true,
|
||||
reuseTiles: true
|
||||
}),
|
||||
"Esri Topo": L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Topo_Map/MapServer/tile/{z}/{y}/{x}', {
|
||||
attribution: 'Tiles © Esri — Esri, DeLorme, NAVTEQ, TomTom, Intermap, iPC, USGS, FAO, NPS, NRCAN, GeoBase, Kadaster NL, Ordnance Survey, Esri Japan, METI, Esri China (Hong Kong), and the GIS User Community',
|
||||
maxZoom: 19,
|
||||
updateWhenIdle: true,
|
||||
reuseTiles: true
|
||||
}),
|
||||
"USGS Sat/Topo": L.tileLayer('https://basemap.nationalmap.gov/arcgis/rest/services/USGSImageryTopo/MapServer/tile/{z}/{y}/{x}', {
|
||||
maxZoom: 20,
|
||||
attribution: 'Tiles courtesy of the <a href="https://usgs.gov/">U.S. Geological Survey</a>',
|
||||
updateWhenIdle: true,
|
||||
reuseTiles: true
|
||||
})
|
||||
};
|
||||
|
||||
L.control.layers(baselayers, null, {collapsed: false}).addTo(mymap);
|
||||
|
||||
mymap.createPane('polygonPane');
|
||||
mymap.getPane('polygonPane').style.zIndex = 300;
|
||||
mymap.createPane('radarPane');
|
||||
mymap.getPane('radarPane').style.zIndex = 350;
|
||||
mymap.getPane('markerPane').style.zIndex = 400;
|
||||
|
||||
const geoJSONsvr = L.geoJSON(null, {
|
||||
style: function(feature) {
|
||||
return {
|
||||
weight: 3,
|
||||
opacity: 1,
|
||||
color: getColorWarning(feature.properties.warntype),
|
||||
fillOpacity: 0,
|
||||
interactive: false
|
||||
};
|
||||
},
|
||||
onEachFeature: function(feature, layer) {
|
||||
const vtecurl = vtecget(feature.properties.vtec);
|
||||
layer.bindPopup(`<a href="${vtecurl}">${feature.properties.vtec}</a>`);
|
||||
},
|
||||
pane: 'polygonPane'
|
||||
}).addTo(mymap);
|
||||
|
||||
const geoJSONPoint = L.geoJSON(null, {
|
||||
pointToLayer: function(feature, latlng) {
|
||||
const isWeatherRelated = /tree|weather/i.test(feature.properties.cause);
|
||||
return L.circleMarker(latlng, {
|
||||
radius: isWeatherRelated ? 6 : 4,
|
||||
fillOpacity: 1,
|
||||
weight: isWeatherRelated ? 12 : 1,
|
||||
color: getColorpoint(feature.properties.outage),
|
||||
interactive: true,
|
||||
pane: 'markerPane'
|
||||
}).bindPopup(`
|
||||
${latlng.lat.toFixed(3)}, ${latlng.lng.toFixed(3)}<br>
|
||||
Outage Start: ${feature.properties.time}<br>
|
||||
Customers Affected: ${feature.properties.outage}<br>
|
||||
Cause: ${feature.properties.cause}<br>
|
||||
<a href="${googleMap(latlng.lat.toFixed(3), latlng.lng.toFixed(3))}" target="_blank">Google Map Link</a>
|
||||
`);
|
||||
}
|
||||
}).addTo(mymap);
|
||||
|
||||
function googleMap(lat, lon) {
|
||||
return `http://maps.google.com/maps?t=k&q=loc:${lat}+${lon}&basemap=satellite`;
|
||||
}
|
||||
|
||||
function vtecget(vtectext) {
|
||||
vtectext = vtectext.slice(1, -1);
|
||||
const parts = vtectext.split('.');
|
||||
let vtecstring = `#20${parts[6].substring(0, 2)}-`;
|
||||
for (let i = 0; i < 6; i++) {
|
||||
vtecstring += parts[i] + (i < 5 ? '-' : '');
|
||||
}
|
||||
return `https://mesonet.agron.iastate.edu/vtec/${vtecstring}`;
|
||||
}
|
||||
|
||||
function getColorWarning(d) {
|
||||
return d === 'SVRRLX' || d === 'SVR' ? 'orange' :
|
||||
d === 'TORRLX' || d === 'TOR' ? 'red' : 'gray';
|
||||
}
|
||||
|
||||
function getColorpoint(d) {
|
||||
return chroma.scale(['gray', '#0cff0c', '#ff9933', 'red', '#fe019a'])
|
||||
.domain([4, 50, 200, 500, 1000])(d).hex();
|
||||
}
|
||||
|
||||
fetch('counties.json')
|
||||
.then(res => res.json())
|
||||
.then(data => L.geoJSON(data, {
|
||||
style: {color: "#000000", weight: 1, fillOpacity: 0},
|
||||
pane: 'polygonPane'
|
||||
}).addTo(mymap))
|
||||
.catch(err => console.error('Failed to load counties:', err));
|
||||
|
||||
async function saddisplay() {
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
const vtec = urlParams.get('vtec');
|
||||
const ids = urlParams.get('id');
|
||||
|
||||
try {
|
||||
const svrResponse = await fetch(`lsr.php?vtec=${vtec}`);
|
||||
const geojsonsvr = await svrResponse.json();
|
||||
geoJSONsvr.clearLayers().addData(geojsonsvr);
|
||||
|
||||
if (geojsonsvr.features.length > 0) {
|
||||
const firstFeature = geojsonsvr.features[0].properties;
|
||||
mymap.setView([firstFeature.lat, firstFeature.lon], 10);
|
||||
createRadarMap(firstFeature.issue + "Z", firstFeature.endtime + "Z");
|
||||
}
|
||||
|
||||
if (ids) {
|
||||
const powerResponse = await fetch(`powerapi.php?poweridsgeojson=${ids}`);
|
||||
const geojsonPoint = await powerResponse.json();
|
||||
geoJSONPoint.clearLayers().addData(geojsonPoint);
|
||||
}
|
||||
|
||||
geoJSONPoint.bringToFront();
|
||||
} catch (error) {
|
||||
console.error('Error in saddisplay:', error);
|
||||
}
|
||||
}
|
||||
|
||||
function createRadarMap(startTime, endTime) {
|
||||
const coeff = 1000 * 60 * 5;
|
||||
const start = new Date(Math.round(new Date(startTime).getTime() / coeff) * coeff);
|
||||
const end = new Date(endTime);
|
||||
const times = [];
|
||||
let current = new Date(start);
|
||||
|
||||
while (current <= end) {
|
||||
times.push(current.toISOString());
|
||||
current.setMinutes(current.getMinutes() + 5);
|
||||
}
|
||||
|
||||
const radarLayers = new Map();
|
||||
const slider = document.getElementById("slider");
|
||||
const timeDisplay = document.getElementById("time-display");
|
||||
const localTimeDisplay = document.getElementById("local-time-display");
|
||||
|
||||
function createRadarLayer(time) {
|
||||
return L.tileLayer.wms("https://mesonet.agron.iastate.edu/cgi-bin/wms/nexrad/n0q-t.cgi", {
|
||||
layers: 'nexrad-n0q-wmst',
|
||||
format: 'image/png',
|
||||
transparent: true,
|
||||
attribution: "Weather data © 2025 IEM Nexrad",
|
||||
time: time,
|
||||
opacity: 0,
|
||||
pane: 'radarPane',
|
||||
updateWhenIdle: true,
|
||||
reuseTiles: true
|
||||
});
|
||||
}
|
||||
|
||||
function updateLocalTimeDisplay(utcTime) {
|
||||
const easternTime = moment.utc(utcTime).tz('America/New_York').format('YYYY-MM-DD HH:mm:ss');
|
||||
localTimeDisplay.textContent = `Local Time (ET): ${easternTime}`;
|
||||
}
|
||||
|
||||
slider.max = times.length - 1;
|
||||
slider.value = 0;
|
||||
|
||||
slider.addEventListener("input", function() {
|
||||
const index = parseInt(this.value);
|
||||
const time = times[index];
|
||||
|
||||
if (!radarLayers.has(time)) {
|
||||
const layer = createRadarLayer(time);
|
||||
radarLayers.set(time, layer);
|
||||
layer.addTo(mymap);
|
||||
}
|
||||
|
||||
radarLayers.forEach((layer, layerTime) => {
|
||||
layer.setOpacity(layerTime === time ? 0.8 : 0);
|
||||
});
|
||||
|
||||
timeDisplay.textContent = `Time: ${time}`;
|
||||
updateLocalTimeDisplay(time);
|
||||
geoJSONPoint.bringToFront();
|
||||
});
|
||||
|
||||
const firstLayer = createRadarLayer(times[0]);
|
||||
radarLayers.set(times[0], firstLayer);
|
||||
firstLayer.setOpacity(0.8).addTo(mymap);
|
||||
timeDisplay.textContent = `Time: ${times[0]}`;
|
||||
updateLocalTimeDisplay(times[0]);
|
||||
geoJSONPoint.bringToFront();
|
||||
|
||||
setInterval(() => {
|
||||
const currentIndex = parseInt(slider.value);
|
||||
updateLocalTimeDisplay(times[currentIndex]);
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
saddisplay();
|
||||
|
||||
mymap.on('overlayadd overlayremove zoomend', () => {
|
||||
geoJSONPoint.bringToFront();
|
||||
});
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
481
ltg.html
Normal file
481
ltg.html
Normal file
@@ -0,0 +1,481 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>RLX Lightning Archive</title>
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
|
||||
<link rel="stylesheet" href="https://www.w3schools.com/w3css/4/w3.css">
|
||||
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.7.1/dist/leaflet.css"
|
||||
integrity="sha512-xodZBNTC5n17Xt2atTPuE1HxjVMSvLVW9ocqUKLsCC5CXdbqCmblAshOMAS6/keqq/sMZMZ19scR4PsZChSR7A=="
|
||||
crossorigin=""/>
|
||||
<script src="https://unpkg.com/leaflet@1.7.1/dist/leaflet.js"
|
||||
|
||||
integrity="sha512-XQoYMqMTK8LvdxXYG3nZ448hOEQiglfqkJs1NOQV44cWnUrBc8PkAOcXy20w0vlaXaVUearIOBhiXZ5V3ynxwA=="
|
||||
crossorigin=""></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/leaflet.draw/1.0.4/leaflet.draw.js" integrity="sha512-ozq8xQKq6urvuU6jNgkfqAmT7jKN2XumbrX1JiB3TnF7tI48DPI4Gy1GXKD/V3EExgAs1V+pRO7vwtS1LHg0Gw==" crossorigin="anonymous" referrerpolicy="no-referrer"></script>
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/leaflet.draw/1.0.4/leaflet.draw-src.css" integrity="sha512-vJfMKRRm4c4UupyPwGUZI8U651mSzbmmPgR3sdE3LcwBPsdGeARvUM5EcSTg34DK8YIRiIo+oJwNfZPMKEQyug==" crossorigin="anonymous" referrerpolicy="no-referrer" />
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/leaflet.draw/1.0.4/leaflet.draw-src.js" integrity="sha512-czICF/Crp0B7QB13iQZG9bYUpd/P1Ona1NeZN52gYsoVFXIpakDmdOUepMCHCMBIBd9Ei5Mlg8Quy4e504IT5A==" crossorigin="anonymous" referrerpolicy="no-referrer"></script>
|
||||
<script type="text/javascript" src="https://cdn.jsdelivr.net/jquery/latest/jquery.min.js"></script>
|
||||
<script type="text/javascript" src="https://cdn.jsdelivr.net/momentjs/latest/moment.min.js"></script>
|
||||
<script type="text/javascript" src="https://cdn.jsdelivr.net/npm/daterangepicker/daterangepicker.min.js"></script>
|
||||
<link rel="stylesheet" type="text/css" href="https://cdn.jsdelivr.net/npm/daterangepicker/daterangepicker.css" />
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/leaflet.draw/1.0.4/leaflet.draw.css" integrity="sha512-gc3xjCmIy673V6MyOAZhIW93xhM9ei1I+gLbmFjUHIjocENRsLX/QUE1htk5q1XV2D/iie/VQ8DXI6Vu8bexvQ==" crossorigin="anonymous" referrerpolicy="no-referrer" />
|
||||
|
||||
|
||||
</head>
|
||||
<body>
|
||||
<style type="text/css">
|
||||
//#mapid { height: 800px; }
|
||||
body {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
html, body {
|
||||
height: 100%;
|
||||
width: 100%
|
||||
|
||||
}
|
||||
#mapid {
|
||||
height: 97%;
|
||||
|
||||
}
|
||||
#bototmbar {
|
||||
height: 3%;
|
||||
}
|
||||
#form {
|
||||
position: absolute;
|
||||
width: 250px;
|
||||
left: 15px;
|
||||
top: 350px;
|
||||
z-index: 255555;
|
||||
|
||||
}
|
||||
#inputForm {
|
||||
background-color: #fff;
|
||||
width: 250px;
|
||||
z-index: 255553;
|
||||
}
|
||||
.datetimes {
|
||||
width: 250px;
|
||||
z-index: 2556557;
|
||||
}
|
||||
#current {
|
||||
z-index: 10000000;
|
||||
}
|
||||
|
||||
#legend {
|
||||
position: absolute;
|
||||
right: 30px;
|
||||
bottom: 40px;
|
||||
z-index: 55555;
|
||||
}
|
||||
|
||||
</style>
|
||||
|
||||
|
||||
<div id="mapid">
|
||||
<div id="legend">
|
||||
<img src="legend.png" alt="Legend">
|
||||
|
||||
</div>
|
||||
|
||||
<div id="form">
|
||||
<form id="inputForm">
|
||||
|
||||
<label for="datetimes">Time Range (UTC)</label>
|
||||
<input type="text" name="datetimes" class="datetimes" id="datetimes">
|
||||
Use the box drawing thing in the top right so you don't have to manually enter these<br>
|
||||
<label for="nwLon">NE Corner Latitude:</label><br>
|
||||
<input type="text" id="neLat" name="neLat" required><br>
|
||||
|
||||
<label for="nwLon">NE Corner Longitude:</label><br>
|
||||
<input type="text" id="neLon" name="neLon" required><br>
|
||||
|
||||
<label for="seLat">SW Corner Latitude:</label><br>
|
||||
<input type="text" id="swLat" name="swLat" required><br>
|
||||
|
||||
<label for="seLon">SW Corner Longitude:</label><br>
|
||||
<input type="text" id="swLon" name="swLon" required><br>
|
||||
|
||||
<label>Output Format:</label><br>
|
||||
<input type="radio" id="leafletMap" name="outputFormat" value="Leaflet Map" checked>
|
||||
<label for="leafletMap">Leaflet Map</label><br>
|
||||
|
||||
|
||||
<input type="radio" id="geoJSON" name="outputFormat" value="geoJSON">
|
||||
<label for="geoJSON">geoJSON</label><br>
|
||||
|
||||
<input type="radio" id="kml" name="outputFormat" value="KML">
|
||||
<label for="kml">KML</label><br>
|
||||
|
||||
<label>Database:</label><br>
|
||||
<label>Real-time ltg ingest has been disabled</label><br>
|
||||
<input type="radio" id="main" name="database" value="main" checked>
|
||||
<label for="main">Main (1986-Present)</label><br>
|
||||
<input type="radio" id="current" name="database" value="current">
|
||||
<label for="main">Current (Only new strikes)</label><br>
|
||||
|
||||
<!-- <input type="radio" id="csv" name="outputFormat" value="CSV">
|
||||
<label for="csv">CSV</label><br>-->
|
||||
|
||||
|
||||
<button type="button" onclick="generateOutput()">Generate Output</button>
|
||||
<button type="button" onclick="clearstrikes()">Clear Strikes</button><br>
|
||||
<label for="strikecount">Total / Pos / Neg Strikes Displayed:</label><br>
|
||||
<input type="text" id="strikecount" name="strikecount" value=0 readonly><br>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
<div id="bottombar">
|
||||
<a href="cams.html" class="w3-button w3-black">Cam List</a>
|
||||
<a href="admin.html" class="w3-button w3-black">Add Camera</a>
|
||||
<a href="db.html" class="w3-button w3-black">WU obs</a>
|
||||
<a href="5min.html" class="w3-button w3-black">5m ASOS obs</a>
|
||||
<a href="outagemap.html" class="w3-button w3-black">Power Outages</a>
|
||||
<a href="today.txt" class="w3-button w3-black">CoCoRaHS Remarks</a>
|
||||
<a href="https://docs.google.com/forms/d/1-2rTBkNyyBVe08G1vN1hcSOEOvvLUcS1Vs2SmmaudlU" class="w3-button w3-black" target="_blank">Questions? Comments?</a>
|
||||
|
||||
<!-- <a href="http://stoat.org/work/db.html" class="w3-button w3-black">Wunderground Obs</a>-->
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
|
||||
|
||||
var mymap = L.map('mapid').setView([38.332372, -81.652480], 8);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
var Esri_WorldStreetMap = L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Street_Map/MapServer/tile/{z}/{y}/{x}', {
|
||||
attribution: 'Tiles © Esri'
|
||||
});
|
||||
|
||||
var Esri_WorldImagery = L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', {
|
||||
attribution: 'Tiles © Esri — Source: Esri, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community'
|
||||
});
|
||||
|
||||
var Esri_WorldTopoMap = L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Topo_Map/MapServer/tile/{z}/{y}/{x}', {
|
||||
attribution: 'Tiles © Esri — Esri, DeLorme, NAVTEQ, TomTom, Intermap, iPC, USGS, FAO, NPS, NRCAN, GeoBase, Kadaster NL, Ordnance Survey, Esri Japan, METI, Esri China (Hong Kong), and the GIS User Community'
|
||||
});
|
||||
var Stadia_StamenToner = L.tileLayer('https://tiles.stadiamaps.com/tiles/stamen_toner/{z}/{x}/{y}{r}.{ext}', {
|
||||
minZoom: 0,
|
||||
maxZoom: 20,
|
||||
attribution: '© <a href="https://www.stadiamaps.com/" target="_blank">Stadia Maps</a> © <a href="https://www.stamen.com/" target="_blank">Stamen Design</a> © <a href="https://openmaptiles.org/" target="_blank">OpenMapTiles</a> © <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors',
|
||||
ext: 'png'
|
||||
});
|
||||
var USGS_USImageryTopo = L.tileLayer('https://basemap.nationalmap.gov/arcgis/rest/services/USGSImageryTopo/MapServer/tile/{z}/{y}/{x}', {
|
||||
maxZoom: 20,
|
||||
attribution: 'Tiles courtesy of the <a href="https://usgs.gov/">U.S. Geological Survey</a>'
|
||||
});
|
||||
|
||||
|
||||
var baselayers = {
|
||||
"Esri Street Map": Esri_WorldStreetMap,
|
||||
"Esri Satellite": Esri_WorldImagery,
|
||||
"Esri Topo": Esri_WorldTopoMap,
|
||||
"USGS Sat/Topo": USGS_USImageryTopo
|
||||
}
|
||||
L.control.layers(baselayers,null,{collapsed: false}).addTo(mymap);
|
||||
|
||||
Esri_WorldStreetMap.addTo(mymap);
|
||||
|
||||
|
||||
var counties = 'counties.json'
|
||||
|
||||
var exteriorStyle = {
|
||||
"color": "#000000",
|
||||
"weight": 1,
|
||||
"fillOpacity": 0
|
||||
};
|
||||
|
||||
fetch(
|
||||
counties
|
||||
).then(
|
||||
res => res.json()
|
||||
).then(
|
||||
data => L.geoJSON(data, {style: exteriorStyle}).addTo(mymap)
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
var LeafIcon = L.Icon.extend({
|
||||
options: {
|
||||
shadowUrl:
|
||||
'http://leafletjs.com/docs/images/leaf-shadow.png',
|
||||
iconSize: [38, 95],
|
||||
shadowSize: [50, 64],
|
||||
iconAnchor: [22, 94],
|
||||
shadowAnchor: [4, 62],
|
||||
popupAnchor: [-3, -76]
|
||||
}
|
||||
});
|
||||
|
||||
var greenIcon = new LeafIcon({
|
||||
iconUrl: 'http://leafletjs.com/docs/images/leaf-green.png'
|
||||
});
|
||||
|
||||
var drawnItems = new L.FeatureGroup();
|
||||
mymap.addLayer(drawnItems);
|
||||
|
||||
var drawControl = new L.Control.Draw({
|
||||
position: 'topright',
|
||||
draw: {
|
||||
rect: true,
|
||||
polygon: false,
|
||||
circle: false,
|
||||
marker: false,
|
||||
circlemarker: false,
|
||||
polyline: false
|
||||
},
|
||||
|
||||
edit: {
|
||||
featureGroup: drawnItems
|
||||
}
|
||||
});
|
||||
mymap.addControl(drawControl);
|
||||
|
||||
mymap.on('draw:created', function (e) {
|
||||
var type = e.layerType,
|
||||
layer = e.layer;
|
||||
document.getElementById('neLat').value = layer._bounds._northEast.lat.toFixed(4);
|
||||
document.getElementById('neLon').value = layer._bounds._northEast.lng.toFixed(4);
|
||||
document.getElementById('swLat').value = layer._bounds._southWest.lat.toFixed(4);
|
||||
document.getElementById('swLon').value = layer._bounds._southWest.lng.toFixed(4);
|
||||
|
||||
if (type === 'marker') {
|
||||
layer.bindPopup('A popup!');
|
||||
}
|
||||
|
||||
drawnItems.addLayer(layer);
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
$(function() {
|
||||
var date = new Date();
|
||||
$('input[name="datetimes"]').daterangepicker({
|
||||
timePicker: true,
|
||||
timePicker24Hour: true,
|
||||
showDropdowns: true,
|
||||
startDate: moment().utc().startOf('hour').add(-23, 'hour'),
|
||||
endDate: moment().utc().startOf('hour').add(1,'hour'),
|
||||
maxSpan: { "years": 100 },
|
||||
linkedCalenders: false,
|
||||
minDate: new Date(1986,0,1),
|
||||
maxDate: new Date(date.getFullYear(), date.getMonth() + 1, 0),
|
||||
opens: 'right',
|
||||
drops: 'up',
|
||||
locale: {
|
||||
format: 'M/DD/YYYY HH:mm'
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
function generateOutput() {
|
||||
// Retrieve user inputs
|
||||
var neLat = (document.getElementById("neLat").value);
|
||||
var neLon = document.getElementById("neLon").value;
|
||||
var swLat = document.getElementById("swLat").value;
|
||||
var swLon = document.getElementById("swLon").value;
|
||||
var database = document.querySelector('input[name="database"]:checked').value;
|
||||
var outputFormat = document.querySelector('input[name="outputFormat"]:checked').value;
|
||||
var startDate = $('#datetimes').data('daterangepicker').startDate._d;
|
||||
var endDate = $('#datetimes').data('daterangepicker').endDate._d;
|
||||
if (neLat && neLon && swLat && swLon) {
|
||||
|
||||
|
||||
let data = {swLat: swLat, neLat: neLat, swLon: swLon, neLon: neLon, type: outputFormat, startDate: startDate, endDate: endDate, database: database};
|
||||
// Perform actions based on the selected output format
|
||||
switch (outputFormat) {
|
||||
case "geoJSON":
|
||||
fetch_geoJSON(data)
|
||||
// Generate GeoJSON output
|
||||
// Implement your GeoJSON generation logic here
|
||||
break;
|
||||
case "KML":
|
||||
generate_kml(data)
|
||||
// Generate KML output
|
||||
// Implement your KML generation logic here
|
||||
break;
|
||||
case "CSV":
|
||||
// Generate CSV output
|
||||
// Implement your CSV generation logic here
|
||||
break;
|
||||
case "Leaflet Map":
|
||||
add_leaflet(data);
|
||||
// Show a Leaflet map with the specified coordinates and date/time
|
||||
// Implement your Leaflet map rendering logic here
|
||||
break;
|
||||
default:
|
||||
// Handle unsupported format
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
alert('Please ensure you have selected a bounding box for the data you want')
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
function fetch_geoJSON(data) {
|
||||
fetch("https://wx.stoat.org/cgi-bin/ltg.py", {
|
||||
method: "POST",
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
body: JSON.stringify(data)
|
||||
}).then((res) => res.blob())
|
||||
.then((blob) => URL.createObjectURL(blob))
|
||||
.then((href) => {
|
||||
Object.assign(document.createElement('a'), {
|
||||
href,
|
||||
download: 'RLXltg.geojson',
|
||||
}).click();
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
|
||||
function add_leaflet(data) {
|
||||
var geojsonMarkerOptions = {
|
||||
radius: 8,
|
||||
fillColor: "#ff7800",
|
||||
color: "#000",
|
||||
weight: 1,
|
||||
opacity: 1,
|
||||
fillOpacity: 0.8
|
||||
};
|
||||
|
||||
fetch("https://wx.stoat.org/cgi-bin/ltg.py", {
|
||||
method: "POST",
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
body: JSON.stringify(data)
|
||||
}).then((res) => res.text())
|
||||
.then(text => (plot_geojson(text)));
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
function plot_geojson(data) {
|
||||
|
||||
|
||||
|
||||
data = jQuery.parseJSON(data);
|
||||
|
||||
|
||||
if (data.features == null) {
|
||||
alert("No strikes were returned from the database, try a different time range or bounding box!");
|
||||
}
|
||||
|
||||
|
||||
var geojsonMarkerOptions = {
|
||||
radius: 2,
|
||||
fillColor: "#ff7800",
|
||||
color: "#000",
|
||||
weight: 1,
|
||||
opacity: 1,
|
||||
fillOpacity: 0.8
|
||||
};
|
||||
L.geoJson(data, {
|
||||
pointToLayer: function (feature, latlng) {
|
||||
mypopup = L.popup().setContent("Time (UTC): " + (feature.properties.time) + "<br> Magnitude (kA) " + feature.properties.mag)
|
||||
counter = counter + 1;
|
||||
if (feature.properties.mag > 0) {
|
||||
poscounter = poscounter + 1;
|
||||
}
|
||||
if (feature.properties.mag < 0) {
|
||||
negcounter = negcounter + 1;
|
||||
}
|
||||
return L.circleMarker(latlng, {
|
||||
|
||||
radius: 3,
|
||||
fillColor: styleMarker(feature),
|
||||
color: styleMarker(feature),
|
||||
weight: 1,
|
||||
opacity: 1,
|
||||
fillOpacity: 0.8
|
||||
|
||||
|
||||
}
|
||||
).bindPopup(mypopup);
|
||||
}
|
||||
}).addTo(layerGroup);
|
||||
document.getElementById('strikecount').value = counter + " / " + poscounter + " / " + negcounter;
|
||||
drawnItems.clearLayers();
|
||||
//document.getElementById("current").innerHTML = 'Total Displayed Strikes: ' + counter;
|
||||
}
|
||||
|
||||
counter = 0;
|
||||
poscounter = 0;
|
||||
negcounter = 0;
|
||||
document.getElementById('strikecount').value = counter + " / " + poscounter + " / " + negcounter;
|
||||
function generate_kml(data) {
|
||||
fetch("https://wx.stoat.org/cgi-bin/ltg.py", {
|
||||
method: "POST",
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
body: JSON.stringify(data)
|
||||
}).then((res) => res.blob())
|
||||
.then((blob) => URL.createObjectURL(blob))
|
||||
.then((href) => {
|
||||
Object.assign(document.createElement('a'), {
|
||||
href,
|
||||
download: 'RLXltg.kml',
|
||||
}).click();
|
||||
|
||||
});
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
function styleMarker(feature) {
|
||||
if (feature.properties.mag > 0) {
|
||||
return 'red';
|
||||
|
||||
}
|
||||
if (feature.properties.mag < 0) {
|
||||
return 'blue';
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function clearstrikes() {
|
||||
layerGroup.clearLayers();
|
||||
counter = 0;
|
||||
poscounter = 0;
|
||||
negcounter = 0;
|
||||
document.getElementById('strikecount').value = counter;
|
||||
}
|
||||
|
||||
|
||||
var layerGroup = L.layerGroup().addTo(mymap);
|
||||
|
||||
|
||||
|
||||
|
||||
</script>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
27
ltg.php
Normal file
27
ltg.php
Normal file
@@ -0,0 +1,27 @@
|
||||
<?php
|
||||
ini_set("log_errors", 1);
|
||||
ini_set("error_log", "/var/www/html/work/php-error.log");
|
||||
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
|
||||
$name = file_get_contents('php://input');
|
||||
$arr = json_decode($name,true);
|
||||
file_put_contents('test1.txt', $arr);
|
||||
foreach($arr as $item) { //foreach element in $arr
|
||||
file_put_contents('test3.txt', $item, FILE_APPEND);
|
||||
$dtg = $item['time']; //etc
|
||||
$lat = $item['lat'];
|
||||
$lon = $item['lon'];
|
||||
$mag = $item['mag'];
|
||||
pg_query_params($dbconn,
|
||||
"INSERT INTO ltg (datetime,lat,lon,mag) values ($1, $2, $3, $4) on conflict do nothing",
|
||||
array($dtg,$lat,$lon,$mag)) or die('Query failed: ' . pg_last_error());
|
||||
pg_query("COMMIT");
|
||||
}
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
|
||||
|
||||
339
map.html
Normal file
339
map.html
Normal file
@@ -0,0 +1,339 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>RLX Weather Camera Map</title>
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
|
||||
<link rel="stylesheet" href="https://www.w3schools.com/w3css/4/w3.css">
|
||||
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.7.1/dist/leaflet.css"
|
||||
integrity="sha512-xodZBNTC5n17Xt2atTPuE1HxjVMSvLVW9ocqUKLsCC5CXdbqCmblAshOMAS6/keqq/sMZMZ19scR4PsZChSR7A=="
|
||||
crossorigin=""/>
|
||||
<script src="https://unpkg.com/leaflet@1.7.1/dist/leaflet.js"
|
||||
|
||||
integrity="sha512-XQoYMqMTK8LvdxXYG3nZ448hOEQiglfqkJs1NOQV44cWnUrBc8PkAOcXy20w0vlaXaVUearIOBhiXZ5V3ynxwA=="
|
||||
crossorigin=""></script>
|
||||
</head>
|
||||
<body>
|
||||
<style type="text/css">
|
||||
/* #mapid { height: 800px; } */
|
||||
body {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
html, body {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
|
||||
}
|
||||
#mapid {
|
||||
height: calc(100% - 40px);
|
||||
}
|
||||
#bottombar {
|
||||
height: 40px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
background-color: #f8f8f8;
|
||||
border-top: 1px solid #ccc;
|
||||
padding: 0 10px;
|
||||
z-index: 1000;
|
||||
}
|
||||
|
||||
input[type=number] {
|
||||
|
||||
|
||||
width: 50px;
|
||||
}
|
||||
|
||||
input[type=text] {
|
||||
|
||||
|
||||
width: 150px;
|
||||
}
|
||||
|
||||
|
||||
|
||||
</style>
|
||||
|
||||
|
||||
<div id="mapid">
|
||||
</div>
|
||||
<div id="bottombar">
|
||||
<a href="cams.html" class="w3-button w3-black">Cam List</a>
|
||||
<a href="admin.html" class="w3-button w3-black">Add Camera</a>
|
||||
<a href="db.html" class="w3-button w3-black">WU obs</a>
|
||||
<a href="5min.html" class="w3-button w3-black">5m ASOS obs</a>
|
||||
<a href="outage.html" class="w3-button w3-black">Power Outages</a>
|
||||
<a href="today.txt" class="w3-button w3-black">CoCoRaHS Remarks</a>
|
||||
<a href="https://docs.google.com/forms/d/1-2rTBkNyyBVe08G1vN1hcSOEOvvLUcS1Vs2SmmaudlU" class="w3-button w3-black" target="_blank">Questions? Comments?</a>
|
||||
<label for="numberofimages">Number of Frames for Loop</label>
|
||||
<input type="number" id="numberofimages" name="numberofimages" onchange="setcams()">
|
||||
</div>
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
|
||||
|
||||
var mymap = L.map('mapid').setView([38.332372, -81.652480], 8);
|
||||
// Get the saved value from cookie, default to 20 if not found
|
||||
var camimages = getCookie('camimages') || 20;
|
||||
|
||||
// Set the initial value of the input field
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
document.getElementById('numberofimages').value = camimages;
|
||||
});
|
||||
|
||||
var Esri_WorldStreetMap = L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Street_Map/MapServer/tile/{z}/{y}/{x}', {
|
||||
attribution: 'Tiles © Esri'
|
||||
});
|
||||
|
||||
var Esri_WorldImagery = L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', {
|
||||
attribution: 'Tiles © Esri — Source: Esri, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community'
|
||||
});
|
||||
|
||||
var Esri_WorldTopoMap = L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Topo_Map/MapServer/tile/{z}/{y}/{x}', {
|
||||
attribution: 'Tiles © Esri — Esri, DeLorme, NAVTEQ, TomTom, Intermap, iPC, USGS, FAO, NPS, NRCAN, GeoBase, Kadaster NL, Ordnance Survey, Esri Japan, METI, Esri China (Hong Kong), and the GIS User Community'
|
||||
});
|
||||
var Stadia_StamenToner = L.tileLayer('https://tiles.stadiamaps.com/tiles/stamen_toner/{z}/{x}/{y}{r}.{ext}', {
|
||||
minZoom: 0,
|
||||
maxZoom: 20,
|
||||
attribution: '© <a href="https://www.stadiamaps.com/" target="_blank">Stadia Maps</a> © <a href="https://www.stamen.com/" target="_blank">Stamen Design</a> © <a href="https://openmaptiles.org/" target="_blank">OpenMapTiles</a> © <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors',
|
||||
ext: 'png'
|
||||
});
|
||||
var USGS_USImageryTopo = L.tileLayer('https://basemap.nationalmap.gov/arcgis/rest/services/USGSImageryTopo/MapServer/tile/{z}/{y}/{x}', {
|
||||
maxZoom: 20,
|
||||
attribution: 'Tiles courtesy of the <a href="https://usgs.gov/">U.S. Geological Survey</a>'
|
||||
});
|
||||
|
||||
|
||||
// Function to set baselayer cookie
|
||||
function setBaseLayerCookie(layerName) {
|
||||
setCookie('baselayer', layerName, 365);
|
||||
}
|
||||
|
||||
// Function to get baselayer cookie
|
||||
function getBaseLayerCookie() {
|
||||
return getCookie('baselayer');
|
||||
}
|
||||
|
||||
var baselayers = {
|
||||
"Esri Street Map": Esri_WorldStreetMap,
|
||||
"Esri Satellite": Esri_WorldImagery,
|
||||
"Esri Topo": Esri_WorldTopoMap,
|
||||
"USGS Sat/Topo": USGS_USImageryTopo
|
||||
};
|
||||
|
||||
var layerControl = L.control.layers(baselayers, null, {collapsed: false}).addTo(mymap);
|
||||
|
||||
// Get saved baselayer from cookie and set it
|
||||
var savedBaseLayer = getBaseLayerCookie();
|
||||
if (savedBaseLayer && baselayers[savedBaseLayer]) {
|
||||
baselayers[savedBaseLayer].addTo(mymap);
|
||||
} else {
|
||||
// Default to Esri Street Map if no cookie found
|
||||
Esri_WorldStreetMap.addTo(mymap);
|
||||
setBaseLayerCookie("Esri Street Map");
|
||||
}
|
||||
|
||||
// Listen for baselayer changes and save to cookie
|
||||
mymap.on('baselayerchange', function(e) {
|
||||
// Find the layer name from the baselayers object
|
||||
for (var layerName in baselayers) {
|
||||
if (baselayers[layerName] === e.layer) {
|
||||
setBaseLayerCookie(layerName);
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
function archive () {
|
||||
archive = document.getElementById("archive").value;
|
||||
archive=archive.trim().replace(/ /g, '%20');
|
||||
}
|
||||
|
||||
|
||||
// Function to set a cookie
|
||||
function setCookie(name, value, days) {
|
||||
var expires = "";
|
||||
if (days) {
|
||||
var date = new Date();
|
||||
date.setTime(date.getTime() + (days * 24 * 60 * 60 * 1000));
|
||||
expires = "; expires=" + date.toUTCString();
|
||||
}
|
||||
document.cookie = name + "=" + (value || "") + expires + "; path=/";
|
||||
}
|
||||
|
||||
// Function to get a cookie
|
||||
function getCookie(name) {
|
||||
var nameEQ = name + "=";
|
||||
var ca = document.cookie.split(';');
|
||||
for(var i = 0; i < ca.length; i++) {
|
||||
var c = ca[i];
|
||||
while (c.charAt(0) == ' ') c = c.substring(1, c.length);
|
||||
if (c.indexOf(nameEQ) == 0) return c.substring(nameEQ.length, c.length);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function setcams() {
|
||||
camimages = document.getElementById("numberofimages").value;
|
||||
// Save the value to a cookie that expires in 365 days
|
||||
setCookie('camimages', camimages, 365);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
var counties = 'counties.json'
|
||||
|
||||
var exteriorStyle = {
|
||||
"color": "#000000",
|
||||
"weight": 1,
|
||||
"fillOpacity": 0
|
||||
};
|
||||
|
||||
fetch(
|
||||
counties
|
||||
).then(
|
||||
res => res.json()
|
||||
).then(
|
||||
data => L.geoJSON(data, {style: exteriorStyle}).addTo(mymap)
|
||||
)
|
||||
|
||||
var LeafIcon = L.Icon.extend({
|
||||
options: {
|
||||
iconSize: [40, 25],
|
||||
shadowSize: [50, 64],
|
||||
iconAnchor: [0, 0],
|
||||
shadowAnchor: [4, 62],
|
||||
popupAnchor: [-3, -76],
|
||||
className: 'cams'
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
camlayer = L.layerGroup();
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
$.getJSON('cam.php', function(data){
|
||||
var wild = Math.random();
|
||||
for(var i in data){
|
||||
//var greenIcon = new LeafIcon({iconUrl: 'camdata/' + data[i].lastimage})
|
||||
var greenIcon = new LeafIcon({iconUrl: 'camdata/' + data[i].camid + "/latest.jpg"}) // Use thumbnail instead of full image
|
||||
//window['camsite'+data[i].camid] = L.marker([data[i].lat, data[i].lon], {icon: greenIcon}).on('click',onMarkerClick(data[i].camid)).on('mouseover',onMarkerHover(data[i].camid)).on('mouseout',onMarkerNotHover(data[i].camid)).addTo(mymap);
|
||||
window['camsite'+data[i].camid] = L.marker([data[i].lat, data[i].lon], {icon: greenIcon}).on('click',onMarkerClick(data[i].camid)).addTo(mymap);
|
||||
}
|
||||
|
||||
|
||||
|
||||
});
|
||||
|
||||
function onMarkerClick(itemsID){
|
||||
return function(ev) {
|
||||
window.open('one.php?camid=' + itemsID + '&camimages='+camimages,"name=_blank")
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function onMarkerHover(itemsID){
|
||||
return function(ev) {
|
||||
console.log(itemsID);
|
||||
}
|
||||
}
|
||||
|
||||
function onMarkerNotHover(itemsID){
|
||||
return function(ev) {
|
||||
console.log(itemsID);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
function updateIcon() {
|
||||
var wild = Math.random();
|
||||
|
||||
$.getJSON('cam.php', function(data){
|
||||
for(var i in data){
|
||||
//var greenIcon = new LeafIcon({iconUrl: 'camdata/' + data[i].lastimage});
|
||||
var greenIcon = new LeafIcon({iconUrl: 'camdata/' + data[i].camid + "/latest.jpg"})
|
||||
//window['camsite'+data[i].camid] = L.marker([data[i].lat, data[i].lon], {icon: greenIcon}).on('click',onMarkerClick(data[i].camid)).addTo(mymap);
|
||||
window['camsite'+data[i].camid].setIcon(greenIcon);
|
||||
}
|
||||
var currentZoom = mymap.getZoom();
|
||||
if (currentZoom < 15) {
|
||||
var zoomfactor = currentZoom*currentZoom/2;
|
||||
var hnewzoom = '' + (zoomfactor) +'px';
|
||||
var vnewzoom = '' + (zoomfactor*.70) +'px';
|
||||
} else {
|
||||
hnewzoom = '300px';
|
||||
vnewzoom = '200px';
|
||||
}
|
||||
$('#mapid .cams').css({'width':hnewzoom,'height':vnewzoom});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
window.setInterval(function(){
|
||||
updateIcon()
|
||||
}, 360000);
|
||||
|
||||
|
||||
|
||||
mymap.on('zoomend', function() {
|
||||
var currentZoom = mymap.getZoom();
|
||||
if (currentZoom < 15) {
|
||||
var zoomfactor = currentZoom*currentZoom/2;
|
||||
var hnewzoom = '' + (zoomfactor) +'px';
|
||||
var vnewzoom = '' + (zoomfactor*.70) +'px';
|
||||
} else {
|
||||
hnewzoom = '300px';
|
||||
vnewzoom = '200px';
|
||||
}
|
||||
$('#mapid .cams').css({'width':hnewzoom,'height':vnewzoom});
|
||||
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</script>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
578
metar.html
Normal file
578
metar.html
Normal file
@@ -0,0 +1,578 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>RLX METAR Cove</title>
|
||||
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
|
||||
<script src="https://code.jquery.com/ui/1.13.1/jquery-ui.js" integrity="sha256-6XMVI0zB8cRzfZjqKcD01PBsAy3FlDASrlC8SxCpInY=" crossorigin="anonymous"></script>
|
||||
<link rel="stylesheet" href="https://code.jquery.com/ui/1.13.1/themes/smoothness/jquery-ui.css">
|
||||
<script src="https://d3js.org/d3.v7.min.js"></script>
|
||||
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<style type="text/css">
|
||||
#weatherGraph {
|
||||
width: 100%;
|
||||
height: 600px;
|
||||
border: 1px solid black;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.icao-label {
|
||||
text-orientation: horizontal;
|
||||
font-size: 12px;
|
||||
font-weight: bold;
|
||||
fill: #333;
|
||||
text-anchor: left;
|
||||
/* Dominant-baseline helps center vertically */
|
||||
dominant-baseline: middle;
|
||||
}
|
||||
|
||||
.time-label { /* General class if needed */
|
||||
font-size: 10px;
|
||||
font-weight: bold;
|
||||
fill: #333;
|
||||
}
|
||||
|
||||
.time-day {
|
||||
font-size: 10px;
|
||||
fill: #333;
|
||||
text-anchor: middle;
|
||||
}
|
||||
|
||||
.time-hour {
|
||||
font-size: 10px;
|
||||
fill: #333;
|
||||
text-anchor: middle;
|
||||
}
|
||||
|
||||
.tooltip {
|
||||
position: absolute;
|
||||
display: none;
|
||||
background: rgba(0, 0, 0, 0.8);
|
||||
color: white;
|
||||
padding: 5px;
|
||||
border-radius: 3px;
|
||||
pointer-events: none;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
#timeLabels {
|
||||
/* Increased height for 4 rows of text (Zulu Date, Zulu Hour, EST Date, EST Hour) */
|
||||
height: 75px; /* ADJUSTED */
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.legend {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 10px;
|
||||
margin: 10px 0 0 0;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.legend-item {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
.legend-color {
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
border: 1px solid #000;
|
||||
}
|
||||
</style>
|
||||
|
||||
<div id="metar">
|
||||
<label for="start">Start Date/Time (Zulu):</label>
|
||||
<input type="datetime-local" id="start" name="start" step="1">
|
||||
<br><br>
|
||||
<label for="end">End Date/Time (Zulu):</label>
|
||||
<input type="datetime-local" id="end" name="end" step="1">
|
||||
<button id="submitButton">Submit</button>
|
||||
</div>
|
||||
<div id="weatherGraph"></div>
|
||||
<!-- Ensure the div height matches the CSS -->
|
||||
<div id="timeLabels" style="width: 100%; height: 75px;"></div>
|
||||
<div class="legend"></div>
|
||||
|
||||
<script>
|
||||
function getUrlParams() {
|
||||
const params = new URLSearchParams(window.location.search);
|
||||
return {
|
||||
start: params.get('start'),
|
||||
end: params.get('end')
|
||||
};
|
||||
}
|
||||
|
||||
const startPicker = document.getElementById('start');
|
||||
const endPicker = document.getElementById('end');
|
||||
|
||||
document.addEventListener('DOMContentLoaded', (event) => {
|
||||
const urlParams = getUrlParams();
|
||||
|
||||
function adjustEndDate() {
|
||||
if (startPicker.value) {
|
||||
let startDate = new Date(startPicker.value + 'Z'); // Treat as Zulu
|
||||
let endDate = endPicker.value ? new Date(endPicker.value + 'Z') : new Date(startDate); // Treat as Zulu
|
||||
|
||||
if (!endPicker.value || endDate < startDate) {
|
||||
endDate = new Date(startDate);
|
||||
}
|
||||
|
||||
// Keep same day/month/year relative to start if end is invalid or before start
|
||||
if (endDate < startDate) {
|
||||
endDate.setUTCDate(startDate.getUTCDate()); // Use UTC functions
|
||||
endDate.setUTCMonth(startDate.getUTCMonth());
|
||||
endDate.setUTCFullYear(startDate.getUTCFullYear());
|
||||
}
|
||||
// Format back to ISO string suitable for datetime-local, removing the 'Z'
|
||||
let formattedEndDate = endDate.toISOString().slice(0, 19);
|
||||
endPicker.value = formattedEndDate;
|
||||
}
|
||||
}
|
||||
|
||||
if (urlParams.start) {
|
||||
const startDate = new Date(urlParams.start); // Assuming URL param is Zulu
|
||||
// Check if date is valid before formatting
|
||||
if (!isNaN(startDate)) {
|
||||
startPicker.value = startDate.toISOString().slice(0, 19);
|
||||
}
|
||||
}
|
||||
|
||||
if (urlParams.end) {
|
||||
const endDate = new Date(urlParams.end); // Assuming URL param is Zulu
|
||||
if (!isNaN(endDate)) {
|
||||
endPicker.value = endDate.toISOString().slice(0, 19);
|
||||
}
|
||||
}
|
||||
|
||||
if (startPicker.value && !endPicker.value) {
|
||||
adjustEndDate(); // Adjust end date if start is set but end isn't
|
||||
}
|
||||
|
||||
|
||||
if (urlParams.start && urlParams.end) {
|
||||
getValues();
|
||||
}
|
||||
|
||||
startPicker.addEventListener('change', adjustEndDate);
|
||||
});
|
||||
|
||||
|
||||
function getmetars(startDateStr, endDateStr, startZulu, endZulu) {
|
||||
const graphContainer = document.getElementById('weatherGraph');
|
||||
const timeLabelsContainer = document.getElementById('timeLabels');
|
||||
const legendContainer = document.querySelector('.legend');
|
||||
const startTime = startZulu;
|
||||
const endTime = endZulu;
|
||||
|
||||
// --- Clear previous state ---
|
||||
graphContainer.innerHTML = "";
|
||||
timeLabelsContainer.innerHTML = "";
|
||||
// legendContainer.innerHTML = ""; // Clear legend at start of function or before calling generateLegend
|
||||
|
||||
// --- Date Validation ---
|
||||
if (!startTime || !endTime || isNaN(startTime) || isNaN(endTime) || startTime >= endTime) {
|
||||
graphContainer.innerHTML = "<p>Error: Invalid date range selected.</p>";
|
||||
generateLegend(); // Show legend even on error
|
||||
return;
|
||||
}
|
||||
|
||||
// --- Fetch Data ---
|
||||
$.getJSON(`lsr.php?metars=true&start=${startDateStr}&end=${endDateStr}`, function(weatherdata) {
|
||||
// Ensure weatherdata is an array
|
||||
weatherdata = weatherdata || [];
|
||||
|
||||
const icaos = [...new Set(weatherdata.map(data => data.icao))].sort();
|
||||
const stationNames = {};
|
||||
// Pre-process data: Convert times and group by ICAO
|
||||
const dataByIcao = {};
|
||||
icaos.forEach(icao => { dataByIcao[icao] = []; }); // Initialize empty array for each ICAO
|
||||
|
||||
weatherdata.forEach(data => {
|
||||
if (data.icao && data.obtime) { // Basic check for valid data
|
||||
stationNames[data.icao] = data.stationname;
|
||||
data.obtimeDate = new Date(data.obtime); // Convert string time to Date object
|
||||
if (dataByIcao[data.icao]) { // Add to the correct ICAO group
|
||||
dataByIcao[data.icao].push(data);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Sort data within each ICAO group by time (important for efficient search)
|
||||
icaos.forEach(icao => {
|
||||
dataByIcao[icao].sort((a, b) => a.obtimeDate - b.obtimeDate);
|
||||
});
|
||||
|
||||
// --- Calculate graph dimensions ---
|
||||
const totalMillis = endTime - startTime;
|
||||
const hours = totalMillis / (1000 * 60 * 60);
|
||||
if (hours <= 0) {
|
||||
graphContainer.innerHTML = "<p>Error: End time must be after start time.</p>";
|
||||
generateLegend();
|
||||
return;
|
||||
}
|
||||
const containerWidth = graphContainer.clientWidth;
|
||||
const containerHeight = graphContainer.clientHeight;
|
||||
const hourWidth = containerWidth / hours;
|
||||
const icaoHeight = icaos.length > 0 ? containerHeight / icaos.length : containerHeight;
|
||||
const hourMillis = 1000 * 60 * 60;
|
||||
|
||||
// --- Create SVG & Tooltip ---
|
||||
const svg = document.createElementNS("http://www.w3.org/2000/svg", "svg");
|
||||
svg.setAttribute('width', containerWidth);
|
||||
svg.setAttribute('height', containerHeight);
|
||||
graphContainer.appendChild(svg);
|
||||
|
||||
const tooltip = document.createElement('div');
|
||||
tooltip.className = 'tooltip';
|
||||
document.body.appendChild(tooltip); // Append to body
|
||||
|
||||
// --- Draw Rectangles: Iterate through ICAOs and Time Slots ---
|
||||
icaos.forEach((icao, index) => {
|
||||
const y = index * icaoHeight;
|
||||
const icaoSpecificData = dataByIcao[icao]; // Get pre-processed data for this ICAO
|
||||
let dataPointer = 0; // Index to track position in sorted icaoSpecificData
|
||||
|
||||
// Draw horizontal line separator
|
||||
const line = document.createElementNS("http://www.w3.org/2000/svg", "line");
|
||||
line.setAttribute('x1', 0); line.setAttribute('y1', y);
|
||||
line.setAttribute('x2', '100%'); line.setAttribute('y2', y);
|
||||
line.setAttribute('stroke', '#e0e0e0'); // Lighter gray for less visual noise
|
||||
svg.appendChild(line);
|
||||
|
||||
// Loop through each *hour slot* on the graph
|
||||
for (let i = 0; i < Math.ceil(hours); i++) { // Use Math.ceil to cover partial last hour
|
||||
const slotStartTimeMillis = startTime.getTime() + i * hourMillis;
|
||||
const slotEndTimeMillis = slotStartTimeMillis + hourMillis;
|
||||
const x = i * hourWidth;
|
||||
|
||||
// Find the *most relevant* observation for this time slot.
|
||||
// Strategy: Use the *latest* observation whose time is *less than or equal to* the slot's END time.
|
||||
// This represents the conditions reported that would influence this hour.
|
||||
let relevantObservation = null;
|
||||
// Advance pointer past observations before the current slot potentially starts
|
||||
while (dataPointer < icaoSpecificData.length && icaoSpecificData[dataPointer].obtimeDate.getTime() < slotStartTimeMillis) {
|
||||
dataPointer++;
|
||||
}
|
||||
// Check observations within or just before the slot
|
||||
let searchIndex = dataPointer;
|
||||
while (searchIndex < icaoSpecificData.length && icaoSpecificData[searchIndex].obtimeDate.getTime() < slotEndTimeMillis) {
|
||||
relevantObservation = icaoSpecificData[searchIndex]; // Update with the latest found within range
|
||||
searchIndex++;
|
||||
}
|
||||
// If no observation *within* the slot, check the one immediately preceding it (if pointer > 0)
|
||||
if (relevantObservation === null && dataPointer > 0 && icaoSpecificData.length > 0) {
|
||||
// Check the observation pointed to by dataPointer-1 (the last one before this slot started)
|
||||
if (icaoSpecificData[dataPointer - 1].obtimeDate.getTime() < slotStartTimeMillis) {
|
||||
// This observation occurred *before* the slot began, use it if nothing else found
|
||||
// relevantObservation = icaoSpecificData[dataPointer - 1]; // Uncomment this line if you want the previous ob to fill the gap
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
let color;
|
||||
let rawData = null;
|
||||
let obTimeStr = null;
|
||||
|
||||
// Determine color based on whether an observation was found for this slot
|
||||
if (relevantObservation) {
|
||||
// An observation exists, use wxToColor to get Precip Color or Light Gray
|
||||
color = wxToColor(relevantObservation.wx);
|
||||
rawData = relevantObservation.raw;
|
||||
obTimeStr = relevantObservation.obtime; // Use original time string for display
|
||||
} else {
|
||||
// NO observation record found relevant to this time slot
|
||||
color = 'white'; // Missing observation
|
||||
}
|
||||
|
||||
// Draw the rectangle for this slot
|
||||
const rect = document.createElementNS("http://www.w3.org/2000/svg", "rect");
|
||||
rect.setAttribute('x', x);
|
||||
rect.setAttribute('y', y);
|
||||
// Ensure width doesn't exceed SVG boundary
|
||||
rect.setAttribute('width', Math.min(hourWidth, containerWidth - x));
|
||||
rect.setAttribute('height', icaoHeight);
|
||||
rect.setAttribute('fill', color);
|
||||
|
||||
// Add border to white/light gray for visibility
|
||||
if (color === 'white' || color === '#f0f0f0') {
|
||||
rect.setAttribute('stroke', '#cccccc');
|
||||
rect.setAttribute('stroke-width', '1');
|
||||
}
|
||||
|
||||
// --- Tooltip Logic ---
|
||||
rect.setAttribute('data-icao', icao);
|
||||
if (relevantObservation) {
|
||||
// Tooltip for existing observation
|
||||
rect.setAttribute('data-raw', rawData);
|
||||
rect.setAttribute('data-time', obTimeStr);
|
||||
rect.addEventListener('mouseover', function(e) {
|
||||
tooltip.innerHTML = `<b>${this.getAttribute('data-icao')}</b> (${this.getAttribute('data-time')})<br>${this.getAttribute('data-raw')}`;
|
||||
tooltip.style.display = 'block';
|
||||
});
|
||||
} else {
|
||||
// Tooltip for missing observation slot
|
||||
const slotStartTime = new Date(slotStartTimeMillis);
|
||||
const approxTimeStr = slotStartTime.toISOString().slice(11, 16) + "Z"; // HH:MMZ
|
||||
const approxDateStr = `${slotStartTime.getUTCMonth() + 1}/${slotStartTime.getUTCDate()}`;
|
||||
rect.setAttribute('data-time-slot', `${approxDateStr} ${approxTimeStr}`);
|
||||
rect.addEventListener('mouseover', function(e) {
|
||||
tooltip.innerHTML = `<b>${this.getAttribute('data-icao')}</b><br>No observation found for<br>${this.getAttribute('data-time-slot')} hour`;
|
||||
tooltip.style.display = 'block';
|
||||
});
|
||||
}
|
||||
// Common tooltip positioning and mouseout
|
||||
rect.addEventListener('mousemove', function(e) {
|
||||
// Position relative to page, offset from cursor
|
||||
tooltip.style.left = (e.pageX + 15) + 'px';
|
||||
tooltip.style.top = (e.pageY + 15) + 'px';
|
||||
});
|
||||
rect.addEventListener('mouseout', function() {
|
||||
tooltip.style.display = 'none';
|
||||
});
|
||||
|
||||
svg.appendChild(rect);
|
||||
|
||||
} // End of hour slot loop (i)
|
||||
}); // End of ICAO loop (index)
|
||||
|
||||
// --- Draw ICAO Labels (Draw AFTER rectangles) ---
|
||||
icaos.forEach((icao, index) => {
|
||||
const y = index * icaoHeight;
|
||||
const text = document.createElementNS("http://www.w3.org/2000/svg", "text");
|
||||
text.textContent = icao; // Or stationNames[icao]
|
||||
text.setAttribute('x', 5);
|
||||
text.setAttribute('y', y + icaoHeight / 2); // Vertically center
|
||||
text.setAttribute('class', 'icao-label'); // Ensure class with dominant-baseline is applied
|
||||
svg.appendChild(text);
|
||||
});
|
||||
|
||||
// --- Add final horizontal line at the bottom ---
|
||||
if (icaos.length > 0) {
|
||||
const finalY = containerHeight; // Bottom of the SVG
|
||||
const finalLine = document.createElementNS("http://www.w3.org/2000/svg", "line");
|
||||
finalLine.setAttribute('x1', 0); finalLine.setAttribute('y1', finalY);
|
||||
finalLine.setAttribute('x2', '100%'); finalLine.setAttribute('y2', finalY);
|
||||
finalLine.setAttribute('stroke', '#e0e0e0');
|
||||
svg.appendChild(finalLine);
|
||||
}
|
||||
|
||||
|
||||
// --- Add Time Scale (No changes needed here) ---
|
||||
const timeLabelsSVG = document.createElementNS("http://www.w3.org/2000/svg", "svg");
|
||||
// ... (rest of time scale code is identical to previous correct version) ...
|
||||
timeLabelsSVG.setAttribute('width', '100%');
|
||||
timeLabelsSVG.setAttribute('height', '75'); // Match the container height
|
||||
timeLabelsContainer.appendChild(timeLabelsSVG);
|
||||
const yZuluDate = 15;
|
||||
const yZuluHour = 30;
|
||||
const yEstDate = 45;
|
||||
const yEstHour = 60;
|
||||
const estOffsetMillis = -5 * 60 * 60 * 1000; // UTC-5 in milliseconds
|
||||
for (let i = 0; i <= hours; i++) { // Use <= hours to get label at the end time too
|
||||
const x = i * hourWidth;
|
||||
const tickTimeZulu = new Date(startTime.getTime() + i * hourMillis);
|
||||
// --- Zulu Labels ---
|
||||
const zuluDayText = document.createElementNS("http://www.w3.org/2000/svg", "text");
|
||||
zuluDayText.textContent = `${tickTimeZulu.getUTCMonth() + 1}/${tickTimeZulu.getUTCDate()}`;
|
||||
zuluDayText.setAttribute('x', x); zuluDayText.setAttribute('y', yZuluDate);
|
||||
zuluDayText.setAttribute('class', 'time-day'); timeLabelsSVG.appendChild(zuluDayText);
|
||||
const zuluHourText = document.createElementNS("http://www.w3.org/2000/svg", "text");
|
||||
zuluHourText.textContent = `${String(tickTimeZulu.getUTCHours()).padStart(2, '0')}Z`;
|
||||
zuluHourText.setAttribute('x', x); zuluHourText.setAttribute('y', yZuluHour);
|
||||
zuluHourText.setAttribute('class', 'time-hour'); timeLabelsSVG.appendChild(zuluHourText);
|
||||
// --- EST Labels (UTC-5) ---
|
||||
const tickTimeEst = new Date(tickTimeZulu.getTime() + estOffsetMillis);
|
||||
const estDayText = document.createElementNS("http://www.w3.org/2000/svg", "text");
|
||||
estDayText.textContent = `${tickTimeEst.getUTCMonth() + 1}/${tickTimeEst.getUTCDate()}`;
|
||||
estDayText.setAttribute('x', x); estDayText.setAttribute('y', yEstDate);
|
||||
estDayText.setAttribute('class', 'time-day'); timeLabelsSVG.appendChild(estDayText);
|
||||
const estHourText = document.createElementNS("http://www.w3.org/2000/svg", "text");
|
||||
estHourText.textContent = `${String(tickTimeEst.getUTCHours()).padStart(2, '0')}E`;
|
||||
estHourText.setAttribute('x', x); estHourText.setAttribute('y', yEstHour);
|
||||
estHourText.setAttribute('class', 'time-hour'); timeLabelsSVG.appendChild(estHourText);
|
||||
}
|
||||
|
||||
|
||||
}).fail(function(jqXHR, textStatus, errorThrown) {
|
||||
console.error("Failed to fetch METAR data:", textStatus, errorThrown);
|
||||
graphContainer.innerHTML = `<p>Error fetching METAR data. Status: ${textStatus}.</p>`;
|
||||
generateLegend(); // Show legend even on failure
|
||||
}).always(function() {
|
||||
// Final check to generate legend if it hasn't been done
|
||||
if (!legendContainer.hasChildNodes()) {
|
||||
generateLegend();
|
||||
}
|
||||
});
|
||||
|
||||
// Call generateLegend immediately so it shows while data loads/if errors occur before AJAX completes
|
||||
generateLegend();
|
||||
}
|
||||
|
||||
function wxToColor(wx) {
|
||||
// --- This function is called ONLY when an observation record exists ---
|
||||
// It determines the color based on the content of the 'wx' field.
|
||||
|
||||
// 1. Normalize the input string (handle null/undefined from DB field)
|
||||
const normalizedWx = String(wx || "").toLowerCase().trim(); // Default to "" if wx is null/undefined
|
||||
|
||||
// 2. Handle cases indicating no precipitation or only other phenomena
|
||||
if (normalizedWx === "") {
|
||||
// Catches empty strings, null, undefined from the DB field for an existing record
|
||||
return '#f0f0f0'; // Very light gray for No Reported WX / Clear
|
||||
}
|
||||
|
||||
// 3. Check for specific precipitation types with intensity
|
||||
// Helper functions (no changes needed)
|
||||
const checkWx = (pattern) => {
|
||||
const regex = new RegExp(`(^|\\s)[\\+\\-]?${pattern}(\\s|$)`);
|
||||
return regex.test(normalizedWx);
|
||||
};
|
||||
const getIntensity = (pattern) => {
|
||||
if (new RegExp(`(^|\\s)\\+${pattern}(\\s|$)`).test(normalizedWx)) return '+';
|
||||
if (new RegExp(`(^|\\s)\\-${pattern}(\\s|$)`).test(normalizedWx)) return '-';
|
||||
if (new RegExp(`(^|\\s)${pattern}(\\s|$)`).test(normalizedWx)) return '';
|
||||
return null;
|
||||
};
|
||||
|
||||
// Precipitation Checks (order matters)
|
||||
let intensity = getIntensity('fzra|fzdz'); // Freezing Precip
|
||||
if (intensity !== null) {
|
||||
if (intensity === '+') return '#4b0082'; // Heavy FZRA/FZDZ
|
||||
if (intensity === '-') return '#dda0dd'; // Light FZRA/FZDZ
|
||||
return '#800080'; // Moderate FZRA/FZDZ
|
||||
}
|
||||
if (checkWx('blsn')) return 'red'; // Blowing Snow
|
||||
intensity = getIntensity('sn'); // Snow
|
||||
if (intensity !== null) {
|
||||
if (intensity === '+') return '#00008b'; // Heavy SN
|
||||
if (intensity === '-') return '#b0e0e6'; // Light SN
|
||||
return '#4682b4'; // Moderate SN
|
||||
}
|
||||
intensity = getIntensity('pl|pe'); // Ice Pellets
|
||||
if (intensity !== null) {
|
||||
return 'pink'; // All PL intensity as pink
|
||||
}
|
||||
intensity = getIntensity('ra|dz'); // Rain/Drizzle
|
||||
if (intensity !== null) {
|
||||
if (intensity === '+') return '#006400'; // Heavy RA/DZ
|
||||
if (intensity === '-') return '#90ee90'; // Light RA/DZ
|
||||
return '#228b22'; // Moderate RA/DZ
|
||||
}
|
||||
if (checkWx('up')) return '#dda0dd'; // Unknown Precip
|
||||
|
||||
// 4. If the 'wx' field had content, but it didn't match any known precipitation,
|
||||
// it represents other reported phenomena (FG, HZ, BR, clouds, etc.).
|
||||
return '#f0f0f0'; // Very light gray for Other Reported WX (non-precip)
|
||||
}
|
||||
function getValues() {
|
||||
let startDateStr = startPicker.value;
|
||||
let endDateStr = endPicker.value;
|
||||
|
||||
if (!startDateStr || !endDateStr) {
|
||||
alert("Please select both a start and end date/time.");
|
||||
return;
|
||||
}
|
||||
|
||||
// Convert input strings (assumed local but representing Zulu) to Zulu Date objects
|
||||
// Appending 'Z' tells the Date constructor to parse it as UTC/Zulu
|
||||
let startZulu = new Date(startDateStr + 'Z');
|
||||
let endZulu = new Date(endDateStr + 'Z');
|
||||
|
||||
// Basic validation
|
||||
if (isNaN(startZulu) || isNaN(endZulu)) {
|
||||
alert("Invalid date format selected. Please check your input.");
|
||||
console.error("Invalid Date object created:", startDateStr, endDateStr, startZulu, endZulu);
|
||||
return;
|
||||
}
|
||||
if (startZulu >= endZulu) {
|
||||
alert("Start date must be before the end date.");
|
||||
return;
|
||||
}
|
||||
|
||||
console.log("Raw Inputs:", startDateStr, endDateStr);
|
||||
console.log("Parsed Zulu Dates:", startZulu, endZulu);
|
||||
|
||||
// Pass both the original strings (for PHP) and the Date objects (for JS)
|
||||
getmetars(startDateStr, endDateStr, startZulu, endZulu);
|
||||
}
|
||||
|
||||
document.getElementById('submitButton').addEventListener('click', getValues);
|
||||
|
||||
function generateLegend() {
|
||||
const legendContainer = document.querySelector('.legend');
|
||||
legendContainer.innerHTML = ''; // Clear previous legend items
|
||||
|
||||
// Define the very light gray color
|
||||
const noPrecipColor = '#f0f0f0';
|
||||
|
||||
const legendData = [
|
||||
// Grouped by Precipitation Type
|
||||
{ group: 'Freezing', items: [
|
||||
{ label: '-FZRA/DZ', color: '#dda0dd'},
|
||||
{ label: 'FZRA/DZ', color: '#800080'},
|
||||
{ label: '+FZRA/DZ', color: '#4b0082'}
|
||||
]},
|
||||
{ group: 'Snow', items: [
|
||||
{ label: '-SN', color: '#b0e0e6'},
|
||||
{ label: 'SN', color: '#4682b4'},
|
||||
{ label: '+SN', color: '#00008b'}
|
||||
]},
|
||||
{ group: 'Ice Pellets', items: [
|
||||
{ label: 'PL/PE', color: 'pink'}
|
||||
]},
|
||||
{ group: 'Rain/Drizzle', items: [
|
||||
{ label: '-RA/DZ', color: '#90ee90'},
|
||||
{ label: 'RA/DZ', color: '#228b22'},
|
||||
{ label: '+RA/DZ', color: '#006400'}
|
||||
]},
|
||||
// Other Phenomena
|
||||
{ group: 'Other WX', items: [
|
||||
{ label: 'BLSN', color: 'red'},
|
||||
{ label: 'UP', color: '#dda0dd'}
|
||||
]},
|
||||
// Status/Misc
|
||||
{ group: 'Status', items: [
|
||||
// Use the specific very light gray color here
|
||||
{ label: 'No Precip / Other WX', color: noPrecipColor },
|
||||
{ label: 'Missing Ob', color: 'white' }
|
||||
]}
|
||||
];
|
||||
|
||||
legendData.forEach(groupData => {
|
||||
groupData.items.forEach(item => {
|
||||
const legendItem = document.createElement('div');
|
||||
legendItem.className = 'legend-item';
|
||||
|
||||
const colorBox = document.createElement('div');
|
||||
colorBox.className = 'legend-color';
|
||||
colorBox.style.backgroundColor = item.color;
|
||||
// Add border to white and very light gray boxes so they are visible
|
||||
if (item.color === 'white' || item.color === noPrecipColor) {
|
||||
colorBox.style.borderColor = '#ccc'; // Use a light border for contrast
|
||||
} else {
|
||||
colorBox.style.borderColor = '#000'; // Keep black border for colored boxes
|
||||
}
|
||||
legendItem.appendChild(colorBox);
|
||||
|
||||
const label = document.createElement('span');
|
||||
label.textContent = item.label;
|
||||
legendItem.appendChild(label);
|
||||
|
||||
legendContainer.appendChild(legendItem);
|
||||
});
|
||||
// Optional spacer can still be added here if desired
|
||||
});
|
||||
}
|
||||
|
||||
// Call generateLegend once on load to show it initially
|
||||
generateLegend();
|
||||
|
||||
</script>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
90
metararchive.py
Normal file
90
metararchive.py
Normal file
@@ -0,0 +1,90 @@
|
||||
import requests
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
import re
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
import time
|
||||
|
||||
|
||||
metarlist = ['KCRW','KHTS','KBKW','KEKN','KPKB','KCKB','KBLF','K6L4','K48I','KHLG','KI16','KLWB','KMGW','KMRB','KW22','KW99','KUNI','KJRO','KPMH','KDWU','KZZV','KLNP','KJKL','KPBX','KSYM','KFGX','KJKL','KLHQ','KRZT','KCMH']
|
||||
|
||||
|
||||
S = requests.Session()
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
|
||||
|
||||
|
||||
#https://aviationweather.gov/api/data/metar?ids=KCRW%2CKHTS%2CKPKB&format=json&taf=false&hours=5
|
||||
|
||||
fetcharray = []
|
||||
|
||||
|
||||
def createurl(metarslist):
|
||||
baseurl = 'https://aviationweather.gov/api/data/metar?ids='
|
||||
for i, metar in enumerate(metarlist):
|
||||
if i == len(metarlist) - 1:
|
||||
baseurl = baseurl + metar
|
||||
else:
|
||||
baseurl = baseurl + metar + "%2C"
|
||||
baseurl = baseurl + "&format=json&taf=false&hours=2"
|
||||
fetcharray.append(baseurl)
|
||||
|
||||
|
||||
def backfillmetars(metarlist):
|
||||
for i in metarlist:
|
||||
baseurl = 'https://aviationweather.gov/api/data/metar?ids='
|
||||
baseurl = baseurl + i + "&format=json&taf=false&hours=120"
|
||||
fetcharray.append(baseurl)
|
||||
#print(baseurl)
|
||||
|
||||
#backfillmetars(['KCRW'])
|
||||
createurl(metarlist)
|
||||
|
||||
|
||||
for k in fetcharray:
|
||||
|
||||
|
||||
content = S.get(k)
|
||||
|
||||
|
||||
metardict = content.json()
|
||||
print(metardict)
|
||||
|
||||
|
||||
for i in metardict:
|
||||
obid = i.get('metar_id')
|
||||
# Generate a unique obid if it's None
|
||||
if obid is None:
|
||||
# Use a combination of icaoId and reportTime to generate a unique identifier
|
||||
icao = i.get('icaoId')
|
||||
obtime = i.get('reportTime')
|
||||
if icao is not None and obtime is not None:
|
||||
obid = f"{icao}_{obtime}"
|
||||
else:
|
||||
# If both are None, we can't generate a reliable ID, so skip this record
|
||||
print(f"Skipping record due to missing icaoId or reportTime: {i}")
|
||||
continue
|
||||
else:
|
||||
icao = i.get('icaoId')
|
||||
obtime = i.get('reportTime')
|
||||
|
||||
temp = i.get('temp')
|
||||
dewp = i.get('dewp')
|
||||
visib = i.get('visib')
|
||||
wx = i.get('wxString')
|
||||
precip1 = i.get('precip')
|
||||
precip3 = i.get('precip3hr')
|
||||
precip6 = i.get('precip6hr')
|
||||
lat = i.get('lat')
|
||||
lon = i.get('lon')
|
||||
raw = i.get('rawOb')
|
||||
stationname = i.get('name')
|
||||
|
||||
params = (obid, icao, obtime, temp, dewp, visib, wx, precip1, precip3, precip6, lat, lon, raw, stationname)
|
||||
sql = "INSERT INTO metars (obid, icao, obtime, temp, dewp, visib, wx, precip1, precip3, precip6, lat, lon, raw, stationname) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) on conflict do nothing"
|
||||
cursor.execute(sql,params)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
73
mp4.php
Normal file
73
mp4.php
Normal file
@@ -0,0 +1,73 @@
|
||||
<?php
|
||||
//error_reporting(E_ALL);
|
||||
ini_set('display_errors', 'off');
|
||||
// Create an array containing file paths, resource var (initialized with imagecreatefromXXX),
|
||||
// image URLs or even binary code from image files.
|
||||
// All sorted in order to appear.
|
||||
|
||||
//$array=json_decode($_POST['camarray']);
|
||||
|
||||
// Use an array containing file paths, resource vars (initialized with imagecreatefromXXX),
|
||||
// image URLs or binary image data.
|
||||
|
||||
//$elements = $_POST['data'];
|
||||
$elements = $_POST['data'];
|
||||
$numimages = $_POST['images'];
|
||||
$delay = $_POST['delay'];
|
||||
$lastdelay = $_POST['lastdelay'];
|
||||
$maxh = $_POST['maxh'];
|
||||
$maxv = $_POST['maxv'];
|
||||
|
||||
if (! is_numeric($maxh)) {
|
||||
$maxh = 500;
|
||||
|
||||
}
|
||||
|
||||
if (! is_numeric($maxv)) {
|
||||
$maxv = 400;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
$numimages = $numimages - 1;
|
||||
//$elements = json_decode($elements);
|
||||
|
||||
|
||||
$inputfiles = "";
|
||||
|
||||
//foreach ($elements as &$value) {
|
||||
// $newin = $value. ":";
|
||||
// $inputfiles = $inputfiles.$newin;
|
||||
//}
|
||||
//$inputfiles = rtrim($inputfiles, ":");
|
||||
//echo $inputfiles;
|
||||
//echo "ffmpeg -i concat:{$inputfiles} output.avi 2>&1";
|
||||
//echo shell_exec("ffmpeg -i concat:{$inputfiles} output.avi 2>&1");
|
||||
foreach ($elements as $value) {
|
||||
if ($value != $elements[array_key_last($elements)]) {
|
||||
$inputfiles = $inputfiles . " -delay {$delay} {$value}";
|
||||
}
|
||||
if ($value == $elements[array_key_last($elements)]) {
|
||||
$inputfiles = $inputfiles . " -delay {$lastdelay} {$value}";
|
||||
}
|
||||
}
|
||||
|
||||
//echo "convert -delay 20 {$inputfiles} output.gif 2>&1";
|
||||
$gif = shell_exec("convert {$inputfiles} -resize {$maxh}x{$maxv}\> -layers Optimize gif:-");
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
echo base64_encode($gif);
|
||||
exit;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
?>
|
||||
901
news.html
Normal file
901
news.html
Normal file
@@ -0,0 +1,901 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
|
||||
<script src="https://code.jquery.com/ui/1.13.1/jquery-ui.js" integrity="sha256-6XMVI0zB8cRzfZjqKcD01PBsAy3FlDASrlC8SxCpInY=" crossorigin="anonymous"></script>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>RLX News</title>
|
||||
<style>
|
||||
/* --- Flexbox Sticky Footer --- */
|
||||
html {
|
||||
height: 100%;
|
||||
}
|
||||
body {
|
||||
min-height: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
html, body {
|
||||
font-family: Arial, sans-serif;
|
||||
background-color: #f4f4f4;
|
||||
padding: 10px;
|
||||
box-sizing: border-box;
|
||||
padding-bottom: 0;
|
||||
}
|
||||
|
||||
#full-display-container, #sad-display-area {
|
||||
flex-grow: 1;
|
||||
}
|
||||
|
||||
.form-container {
|
||||
max-width: 800px;
|
||||
position: relative;
|
||||
padding: 15px;
|
||||
background: white;
|
||||
margin-bottom: 10px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.toggle-btn, button { font-size: 20px; padding: 8px 15px; cursor: pointer; }
|
||||
button { background-color: #4CAF50; color: white; border: none; border-radius: 4px; }
|
||||
button:hover { background-color: #45a049; }
|
||||
.form-group label { font-size: 20px; margin-bottom: 5px; display: block; }
|
||||
input, textarea { width: 100%; padding: 10px; font-size: 18px; box-sizing: border-box; }
|
||||
|
||||
#full-display-container {
|
||||
max-width: 100%;
|
||||
margin: 0 auto;
|
||||
display: grid;
|
||||
grid-template-columns: 1fr 1fr;
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
#sad-display-area {
|
||||
flex-grow: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 10px;
|
||||
min-height: 0;
|
||||
}
|
||||
|
||||
#top-stories-container, #bottom-stories-container {
|
||||
flex-grow: 1;
|
||||
overflow: hidden;
|
||||
min-height: 0;
|
||||
}
|
||||
#top-stories-container { flex-basis: 50%; flex-shrink: 0; }
|
||||
#bottom-stories-container { background: #e9e9e9; padding: 5px; border-radius: 8px; }
|
||||
|
||||
.scroller-inner {
|
||||
animation-name: continuous-scroll;
|
||||
animation-timing-function: linear;
|
||||
animation-iteration-count: infinite;
|
||||
}
|
||||
|
||||
.content-block {
|
||||
display: grid;
|
||||
grid-template-columns: 1fr 1fr;
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
@keyframes continuous-scroll {
|
||||
0% {
|
||||
transform: translateY(0);
|
||||
}
|
||||
100% {
|
||||
transform: translateY(-50%);
|
||||
}
|
||||
}
|
||||
|
||||
.news-item { background: white; padding: 5px; border-radius: 8px; box-shadow: 0 2px 4px rgba(0,0,0,0.1); display: flex; align-items: center; }
|
||||
.news-item img { width: 150px; height: 100px; object-fit: cover; border-radius: 6px; margin-right: 10px; }
|
||||
.news-content { flex: 2; }
|
||||
.headline { color: #333; font-size: 36px; margin: 0 0 5px 0; line-height: 1.0; font-weight: bold; }
|
||||
.summary { color: #666; font-size: 28px; margin: 0 0 5px 0; line-height: 1.0; display: -webkit-box; -webkit-line-clamp: 4; -webkit-box-orient: vertical; }
|
||||
.storylink { color: #007BFF; text-decoration: none; }
|
||||
.storylink:hover { text-decoration: underline; }
|
||||
|
||||
.relevance-high { background-color: lightblue; }
|
||||
.relevance-really-high { background-color: cyan; }
|
||||
.relevance-super-high { background-color: yellow; }
|
||||
.relevance-mazza-high { background-color: orange; }
|
||||
.relevance-cheech-high { background-color: #FF8790; }
|
||||
|
||||
@keyframes flashRedOutline {
|
||||
0% { outline: 7px solid red; }
|
||||
50% { outline: 7px solid transparent; }
|
||||
100% { outline: 10px solid red; }
|
||||
}
|
||||
.new-story-flash {
|
||||
animation: flashRedOutline 2s linear infinite;
|
||||
border-radius: 8px; /* Match the news-item border-radius */
|
||||
}
|
||||
|
||||
#ticker-container {
|
||||
width: 100%;
|
||||
background-color: black;
|
||||
overflow: hidden;
|
||||
padding: 5px 0;
|
||||
box-sizing: border-box;
|
||||
z-index: 1000;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
#ticker-content {
|
||||
display: inline-block;
|
||||
white-space: nowrap;
|
||||
/* animation removed, now handled by JS */
|
||||
}
|
||||
|
||||
#ticker-content > span {
|
||||
margin: 0 20px;
|
||||
font-size: 30px;
|
||||
}
|
||||
|
||||
.ticker-year { color: lightgray; }
|
||||
.ticker-event { color: white; }
|
||||
.ticker-report { color: lightblue; }
|
||||
.ticker-wikimedia { color: lightpink; } /* Style for Wikimedia events */
|
||||
.ticker-holiday { color: lightgreen; } /* Style for holiday events */
|
||||
.ticker-upcoming { color: cyan; } /* Style for upcoming events */
|
||||
|
||||
@media (max-width: 768px) {
|
||||
#full-display-container {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
.content-block {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div class="form-container" id="form-container">
|
||||
<button class="toggle-btn" onclick="toggleForm();">Expand News Search Dialog</button>
|
||||
<div id="searchForm" style="display: none;">
|
||||
<div class="form-group"><label for="startTime">Start Time (Zulu/UTC):</label><input type="datetime-local" id="startTime" name="startTime" required></div>
|
||||
<div class="form-group"><label for="endTime">End Time (Zulu/UTC):</label><input type="datetime-local" id="endTime" name="endTime" required></div>
|
||||
<div class="form-group"><label for="keyTerms">Key Terms (comma-separated):</label><textarea id="keyTerms" name="keyTerms" rows="3" required>weather,flood,fire,fog,snow,emergency,wind,ice,rain,power,explosion,warmer,colder,drown,stream,river,air,wind,destroyed,rime,glaze,river,ice,creek,crash,thunder,spinup,black ice,fog,spill,pileup,pile-up,gust,frozen,funnel,rainfall,fatal,injury,sleet,injured,frost,dead,death,landslide,culvert,slippery,wildfire,tornado,blizzard,creek,hail,thunderstorm,downburst,microburst,crash,heatstroke,derecho,lightning,hypothermia,slide,flow,ski,water,innundation,victim,victims,flooding,flooded,snowing,freezing rain,clouds,cloud,storm,aircraft</textarea></div>
|
||||
<button onclick="updatenews();">Submit</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="full-display-container"></div>
|
||||
|
||||
<div class="display-area" id="sad-display-area" style="display: none;">
|
||||
<div id="top-stories-container"></div>
|
||||
<div id="bottom-stories-container"></div>
|
||||
</div>
|
||||
|
||||
<div id="ticker-container" style="display: none;">
|
||||
<div id="ticker-content"></div>
|
||||
</div>
|
||||
<script>
|
||||
let lastTickerData = null;
|
||||
let isSadMode = false;
|
||||
var refreshTimer;
|
||||
var tickerWatchdogTimer;
|
||||
const NUM_TOP_STORIES = 8;
|
||||
const BOTTOM_SCROLLER_SPEED_MULTIPLIER = 2;
|
||||
window.currentWikimediaEvents = []; // Initialize the global variable
|
||||
let tickerCycleCount = 0;
|
||||
let upcomingHolidays = [];
|
||||
let triviaQuestions = [];
|
||||
let lastTriviaFetchTime = 0;
|
||||
|
||||
// Always fetch fresh Wikimedia events
|
||||
// No need to cache them between cycles
|
||||
|
||||
const TARGET_BROADCAST_SECONDS = 150;
|
||||
const PIXELS_PER_SECOND_SPEED = 150; // Adjust this value to control scroll speed. Higher is faster.
|
||||
let lastTickerUpdateTime = Date.now();
|
||||
let lastNewsData = null;
|
||||
let currentNewsUrl = 'https://wx.stoat.org/lsr.php?news3=potato';
|
||||
|
||||
let nextTickerHtml = null;
|
||||
let animationId = null;
|
||||
let tickerPosition = 0;
|
||||
let lastTime = 0;
|
||||
let tickerContent = document.getElementById('ticker-content');
|
||||
let injectionHtml = null;
|
||||
|
||||
function animateTicker(currentTime) {
|
||||
if (!lastTime) lastTime = currentTime;
|
||||
const deltaTime = (currentTime - lastTime) / 1000;
|
||||
lastTime = currentTime;
|
||||
tickerPosition -= PIXELS_PER_SECOND_SPEED * deltaTime;
|
||||
const scrollWidth = tickerContent.scrollWidth;
|
||||
if (tickerPosition <= -scrollWidth) {
|
||||
updateTickerContent();
|
||||
tickerPosition = 0;
|
||||
}
|
||||
tickerContent.style.transform = `translateX(${tickerPosition}px)`;
|
||||
animationId = requestAnimationFrame(animateTicker);
|
||||
}
|
||||
|
||||
function updateTickerContent() {
|
||||
if (nextTickerHtml) {
|
||||
tickerContent.innerHTML = nextTickerHtml;
|
||||
const containerWidth = document.getElementById('ticker-container').clientWidth;
|
||||
tickerPosition = -containerWidth;
|
||||
nextTickerHtml = null;
|
||||
// Fetch new
|
||||
fetchAndDisplayTickerData(false);
|
||||
}
|
||||
}
|
||||
|
||||
function fetchAndDisplayNews(url = 'https://wx.stoat.org/lsr.php?news3=potato') {
|
||||
// Update the current URL if provided
|
||||
if (url) {
|
||||
currentNewsUrl = url;
|
||||
}
|
||||
|
||||
$.getJSON(currentNewsUrl, function(newsData) {
|
||||
// Sort the data
|
||||
newsData.sort((a, b) => {
|
||||
if (b.impact_score !== a.impact_score) return b.impact_score - a.impact_score;
|
||||
return new Date(b.timeutc) - new Date(a.timeutc);
|
||||
});
|
||||
|
||||
// Check if data has changed
|
||||
if (lastNewsData && JSON.stringify(newsData) === JSON.stringify(lastNewsData)) {
|
||||
console.log('News data unchanged, skipping update');
|
||||
return;
|
||||
}
|
||||
|
||||
// Update cache
|
||||
lastNewsData = newsData;
|
||||
|
||||
if (isSadMode) {
|
||||
const topContainer = document.getElementById('top-stories-container');
|
||||
const bottomContainer = document.getElementById('bottom-stories-container');
|
||||
topContainer.innerHTML = '';
|
||||
bottomContainer.innerHTML = '';
|
||||
|
||||
const topStories = newsData.slice(0, NUM_TOP_STORIES);
|
||||
const scrollingStories = newsData.slice(NUM_TOP_STORIES);
|
||||
|
||||
function createScroller(stories, durationMultiplier, isBottomScroller = false) {
|
||||
if (stories.length === 0) return null;
|
||||
const scrollerInner = document.createElement('div');
|
||||
scrollerInner.className = 'scroller-inner';
|
||||
const contentBlock1 = document.createElement('div');
|
||||
contentBlock1.className = 'content-block';
|
||||
stories.forEach(news => contentBlock1.appendChild(createNewsItem(news)));
|
||||
const contentBlock2 = contentBlock1.cloneNode(true);
|
||||
contentBlock2.setAttribute('aria-hidden', 'true');
|
||||
scrollerInner.appendChild(contentBlock1);
|
||||
scrollerInner.appendChild(contentBlock2);
|
||||
const duration = stories.length * durationMultiplier;
|
||||
scrollerInner.style.animationName = 'continuous-scroll';
|
||||
scrollerInner.style.animationDuration = `${duration}s`;
|
||||
// Ensure no delay is applied to any scroller
|
||||
scrollerInner.style.animationDelay = '0s';
|
||||
return scrollerInner;
|
||||
}
|
||||
|
||||
const topScroller = createScroller(topStories, 7, false);
|
||||
if (topScroller) topContainer.appendChild(topScroller);
|
||||
|
||||
const bottomScroller = createScroller(scrollingStories, BOTTOM_SCROLLER_SPEED_MULTIPLIER, true);
|
||||
if (bottomScroller) bottomContainer.appendChild(bottomScroller);
|
||||
|
||||
} else {
|
||||
const fullContainer = document.getElementById('full-display-container');
|
||||
fullContainer.innerHTML = '';
|
||||
newsData.forEach(news => fullContainer.appendChild(createNewsItem(news)));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function createNewsItem(news) {
|
||||
const newsItem = document.createElement('div');
|
||||
newsItem.className = 'news-item';
|
||||
let score = news.impact_score;
|
||||
const storyTime = new Date(news.timeutc);
|
||||
const currentTime = new Date();
|
||||
const oneHourInMs = 3600000;
|
||||
|
||||
// Add flashRedOutline class if the story is less than 1 hour old
|
||||
if (currentTime - storyTime < oneHourInMs) {
|
||||
newsItem.classList.add('new-story-flash');
|
||||
}
|
||||
|
||||
const relevanceClasses = {
|
||||
'relevance-high': score > 15,
|
||||
'relevance-really-high': score > 25,
|
||||
'relevance-super-high': score > 50,
|
||||
'relevance-mazza-high': score > 90,
|
||||
'relevance-cheech-high': score > 150
|
||||
};
|
||||
Object.entries(relevanceClasses).filter(([, c]) => c).forEach(([cN]) => newsItem.classList.add(cN));
|
||||
|
||||
newsItem.innerHTML = `<a href="${news.storylink}" target="_blank"><img src="${news.imageurl}"></a><div class="news-content"><h2 class="headline"><a href="${news.storylink}" target="_blank" class="storylink">(${extractTextBetweenHttpAndCom(news.storylink)}) ${news.headline}</a></h2><p class="summary">${news.summary} ${convertPostgresTimestamp(news.timeutc)}L</p></div>`;
|
||||
return newsItem;
|
||||
}
|
||||
|
||||
function extractTextBetweenHttpAndCom(url) {
|
||||
url = url.replace(/www\./, '');
|
||||
const match = url.match(/https?:\/\/(.*?)\.com/);
|
||||
return match && match[1] ? match[1].toUpperCase() : 'FAUXNEWS';
|
||||
}
|
||||
|
||||
function convertPostgresTimestamp(timestamp) {
|
||||
const d = new Date(timestamp.replace('Z', ''));
|
||||
return `${d.getFullYear()}-${String(d.getMonth()+1).padStart(2,'0')}-${String(d.getDate()).padStart(2,'0')} ${String(d.getHours()).padStart(2,'0')}:${String(d.getMinutes()).padStart(2,'0')}`;
|
||||
}
|
||||
|
||||
function toggleForm() {
|
||||
const formContent = document.getElementById("searchForm");
|
||||
const toggleBtn = document.querySelector(".toggle-btn");
|
||||
if (formContent.style.display === "none") {
|
||||
formContent.style.display = "block";
|
||||
toggleBtn.textContent = "Collapse News Search Dialog";
|
||||
if (refreshTimer) clearInterval(refreshTimer);
|
||||
// Stop ticker when form is expanded (not in SAD mode)
|
||||
if (animationId) {
|
||||
cancelAnimationFrame(animationId);
|
||||
animationId = null;
|
||||
}
|
||||
if (tickerWatchdogTimer) {
|
||||
clearInterval(tickerWatchdogTimer);
|
||||
tickerWatchdogTimer = null;
|
||||
}
|
||||
// Hide ticker when form is expanded
|
||||
updateTickerVisibility();
|
||||
} else {
|
||||
formContent.style.display = "none";
|
||||
toggleBtn.textContent = "Expand News Search Dialog";
|
||||
// Always use the current URL
|
||||
fetchAndDisplayNews(currentNewsUrl);
|
||||
refreshTimer = setInterval(() => {
|
||||
fetchAndDisplayNews(currentNewsUrl);
|
||||
}, 300000);
|
||||
// Update ticker visibility based on mode
|
||||
updateTickerVisibility();
|
||||
}
|
||||
}
|
||||
|
||||
function completelyHide() { document.getElementById("form-container").style.display = "none"; }
|
||||
|
||||
function updatenews() {
|
||||
const start = document.getElementById("startTime").value;
|
||||
const end = document.getElementById("endTime").value;
|
||||
const keyTerms = document.getElementById("keyTerms").value;
|
||||
const terms = keyTerms.split(',');
|
||||
let arrayterms = terms.map(term => `key[]=${encodeURIComponent(term)}`).join('&');
|
||||
url = `lsr.php?newsarchive=true&start=${start}&end=${end}&${arrayterms}`;
|
||||
// Clear the cache to force an update
|
||||
lastNewsData = null;
|
||||
fetchAndDisplayNews(url);
|
||||
}
|
||||
|
||||
function sadCheck() {
|
||||
const params = new URLSearchParams(document.location.search);
|
||||
if (params.has("sad") || params.has("SAD")) {
|
||||
isSadMode = true;
|
||||
completelyHide();
|
||||
Object.assign(document.documentElement.style, {height: '100%'});
|
||||
Object.assign(document.body.style, {height: '100%', overflow: 'hidden', display: 'flex', flexDirection: 'column'});
|
||||
document.getElementById('sad-display-area').style.display = 'flex';
|
||||
document.getElementById('full-display-container').style.display = 'none';
|
||||
// Update ticker visibility and start when entering SAD mode
|
||||
updateTickerVisibility();
|
||||
fetchAndDisplayTickerData(true);
|
||||
startTickerWatchdog();
|
||||
}
|
||||
}
|
||||
|
||||
function format_date_with_ordinal(date) {
|
||||
const day = date.getDate();
|
||||
const month = date.toLocaleString('default', { month: 'long' });
|
||||
const get_ordinal_suffix = (day) => {
|
||||
if (day > 3 && day < 21) return 'th';
|
||||
switch (day % 10) {
|
||||
case 1: return "st";
|
||||
case 2: return "nd";
|
||||
case 3: return "rd";
|
||||
default: return "th";
|
||||
}
|
||||
};
|
||||
const suffix = get_ordinal_suffix(day);
|
||||
return `${month} ${day}${suffix}`;
|
||||
}
|
||||
|
||||
function fetchWikimediaEvents() {
|
||||
const now = new Date();
|
||||
|
||||
// Get the individual components
|
||||
const year = now.getFullYear();
|
||||
const month = String(now.getMonth() + 1).padStart(2, '0'); // Months are 0-indexed, so add 1
|
||||
const day = String(now.getDate()).padStart(2, '0');
|
||||
const hours = String(now.getHours()).padStart(2, '0');
|
||||
|
||||
// Concatenate them into the final string
|
||||
const formattedDate = `${year}${month}${day}${hours}`;
|
||||
|
||||
|
||||
const url = 'https://wx.stoat.org/calendar/wikimedia_onthisday.json';
|
||||
const cacheBustingUrl = `${url}?v=${formattedDate}`;
|
||||
|
||||
return $.getJSON(cacheBustingUrl)
|
||||
.done(function(data) {
|
||||
if (data && data.events && Array.isArray(data.events) && data.events.length > 0) {
|
||||
// Always shuffle and use all events
|
||||
window.currentWikimediaEvents = [...data.events].sort(() => 0.5 - Math.random());
|
||||
} else {
|
||||
console.warn("Wikimedia JSON is empty, invalid, or does not contain an 'events' array.");
|
||||
window.currentWikimediaEvents = [];
|
||||
}
|
||||
})
|
||||
.fail(function(jqXHR, textStatus, errorThrown) {
|
||||
console.error(`Failed to load from ${cacheBustingUrl}. Status: ${textStatus}, Error: ${errorThrown}`);
|
||||
window.currentWikimediaEvents = [];
|
||||
});
|
||||
}
|
||||
|
||||
function fetchHolidays() {
|
||||
const now = new Date();
|
||||
|
||||
// Format today's date as YYYY-MM-DD to match the API response keys
|
||||
const year = now.getFullYear();
|
||||
const month = String(now.getMonth() + 1).padStart(2, '0');
|
||||
const day = String(now.getDate()).padStart(2, '0');
|
||||
const todayFormatted = `${year}-${month}-${day}`;
|
||||
|
||||
// Get the individual components for cache busting
|
||||
const hours = String(now.getHours()).padStart(2, '0');
|
||||
const formattedDate = `${year}${month}${day}${hours}`;
|
||||
|
||||
const url = 'https://calendar.wx4rlx.org/get_holidays.py';
|
||||
const cacheBustingUrl = `${url}?time=${formattedDate}`;
|
||||
|
||||
return $.getJSON(cacheBustingUrl)
|
||||
.done(function(data) {
|
||||
if (data && data.holidays) {
|
||||
// Get holidays for today using the formatted date as the key
|
||||
window.currentHolidays = data.holidays[todayFormatted] || [];
|
||||
|
||||
// Store upcoming holidays for the next 9 days
|
||||
upcomingHolidays = [];
|
||||
for (let i = 1; i <= 9; i++) {
|
||||
const nextDate = new Date(now);
|
||||
nextDate.setDate(now.getDate() + i);
|
||||
const nextYear = nextDate.getFullYear();
|
||||
const nextMonth = String(nextDate.getMonth() + 1).padStart(2, '0');
|
||||
const nextDay = String(nextDate.getDate()).padStart(2, '0');
|
||||
const nextFormatted = `${nextYear}-${nextMonth}-${nextDay}`;
|
||||
|
||||
const nextHolidays = data.holidays[nextFormatted] || [];
|
||||
if (nextHolidays.length > 0) {
|
||||
upcomingHolidays.push({
|
||||
date: nextDate,
|
||||
holidays: nextHolidays
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
console.warn("Holidays JSON is empty, invalid, or does not contain 'holidays'.");
|
||||
window.currentHolidays = [];
|
||||
upcomingHolidays = [];
|
||||
}
|
||||
})
|
||||
.fail(function(jqXHR, textStatus, errorThrown) {
|
||||
console.error(`Failed to load holidays from ${cacheBustingUrl}. Status: ${textStatus}, Error: ${errorThrown}`);
|
||||
window.currentHolidays = [];
|
||||
upcomingHolidays = [];
|
||||
});
|
||||
}
|
||||
|
||||
function fetchTriviaQuestions() {
|
||||
const now = Date.now();
|
||||
// Check if we need to fetch new questions and respect the 5-second rate limit
|
||||
if (triviaQuestions.length > 10 || now - lastTriviaFetchTime < 5000) {
|
||||
return $.Deferred().resolve().promise();
|
||||
}
|
||||
|
||||
lastTriviaFetchTime = now;
|
||||
const url = 'https://opentdb.com/api.php?amount=50&type=multiple';
|
||||
return $.getJSON(url)
|
||||
.done(function(data) {
|
||||
if (data.response_code === 0 && data.results) {
|
||||
triviaQuestions = data.results;
|
||||
console.log(`Fetched ${triviaQuestions.length} trivia questions`);
|
||||
} else {
|
||||
console.warn('Trivia API returned non-zero response code or no results');
|
||||
triviaQuestions = [];
|
||||
}
|
||||
})
|
||||
.fail(function(jqXHR, textStatus, errorThrown) {
|
||||
console.error(`Failed to fetch trivia questions: ${textStatus}, ${errorThrown}`);
|
||||
triviaQuestions = [];
|
||||
});
|
||||
}
|
||||
|
||||
function fetchInjection() {
|
||||
const injectionApiUrl = 'https://calendar.wx4rlx.org/onetime.py?action=api';
|
||||
return $.ajax({
|
||||
url: injectionApiUrl,
|
||||
dataType: 'text' // Treat the response as plain text
|
||||
})
|
||||
.done(function(data) {
|
||||
if (data && data.trim().length > 0) {
|
||||
injectionHtml = data;
|
||||
console.log('Injection content fetched:', injectionHtml);
|
||||
} else {
|
||||
injectionHtml = null;
|
||||
}
|
||||
})
|
||||
.fail(function() {
|
||||
console.log('No injection content available or error fetching');
|
||||
injectionHtml = null;
|
||||
});
|
||||
}
|
||||
|
||||
function fetchAndDisplayTickerData(startImmediately = true) {
|
||||
// First, fetch the injection content
|
||||
fetchInjection().always(function() {
|
||||
// Then fetch other data
|
||||
$.when(fetchWikimediaEvents(), fetchHolidays(), fetchTriviaQuestions()).always(function() {
|
||||
const tickerApiUrl = 'https://calendar.wx4rlx.org/?action=api';
|
||||
$.getJSON(tickerApiUrl, function(data) {
|
||||
if (data.status !== 'success') return;
|
||||
updateTickerLastUpdateTime();
|
||||
|
||||
// Always update the ticker with fresh data
|
||||
const today = new Date();
|
||||
const currentYear = today.getFullYear();
|
||||
const formatted_date = format_date_with_ordinal(today);
|
||||
const tickerContent = $('#ticker-content');
|
||||
|
||||
let localItems = [];
|
||||
data.events.forEach(item => localItems.push({ date: item.date, text: item.event, type: 'event' }));
|
||||
data.weather_reports.forEach(item => localItems.push({ date: item.date, text: item.report, type: 'report' }));
|
||||
// Add xmacis records
|
||||
if (data.xmacis_records) {
|
||||
data.xmacis_records.forEach(item => {
|
||||
// Extract year from the date field
|
||||
const year = parseInt(item.date.split('-')[0]);
|
||||
localItems.push({
|
||||
date: item.date,
|
||||
text: item.description,
|
||||
type: 'report', // Use 'report' type to match weather_reports color scheme
|
||||
year: year
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Increment cycle count
|
||||
tickerCycleCount++;
|
||||
|
||||
// Add upcoming events based on cycle count
|
||||
let upcomingEventItem = null;
|
||||
if (upcomingHolidays.length > 0) {
|
||||
// Every 10 cycles takes priority over every other cycle
|
||||
if (tickerCycleCount % 10 === 0) {
|
||||
const nextFiveDays = upcomingHolidays.slice(0, 5);
|
||||
let upcomingTexts = [];
|
||||
nextFiveDays.forEach(day => {
|
||||
const formattedDay = format_date_with_ordinal(day.date);
|
||||
upcomingTexts.push(`${formattedDay} - ${day.holidays.join(', ')}`);
|
||||
});
|
||||
if (upcomingTexts.length > 0) {
|
||||
upcomingEventItem = {
|
||||
date: today.toISOString().split('T')[0],
|
||||
text: 'Upcoming Special Days: ' + upcomingTexts.join('; '),
|
||||
type: 'upcoming',
|
||||
year: 'Upcoming'
|
||||
};
|
||||
}
|
||||
}
|
||||
// Only show tomorrow's events if it's an even cycle AND not a multiple of 10
|
||||
else if (tickerCycleCount % 2 === 0) {
|
||||
const tomorrow = upcomingHolidays[0];
|
||||
if (tomorrow) {
|
||||
upcomingEventItem = {
|
||||
date: tomorrow.date.toISOString().split('T')[0],
|
||||
text: 'Upcoming Special Days: Tomorrow - ' + tomorrow.holidays.join(', '),
|
||||
type: 'upcoming',
|
||||
year: 'Tomorrow'
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add today's holidays to local items
|
||||
if (window.currentHolidays && window.currentHolidays.length > 0) {
|
||||
localItems.push({
|
||||
date: today.toISOString().split('T')[0],
|
||||
text: 'Special Days: ' + window.currentHolidays.join(', '),
|
||||
type: 'holiday',
|
||||
year: 'Today'
|
||||
});
|
||||
}
|
||||
|
||||
// Add injection HTML at the beginning if available
|
||||
// Use the injectionHtml that was fetched at the start of this function
|
||||
if (injectionHtml) {
|
||||
localItems.unshift({
|
||||
date: today.toISOString().split('T')[0],
|
||||
text: injectionHtml,
|
||||
type: 'injection',
|
||||
year: 'INJECTION'
|
||||
});
|
||||
}
|
||||
|
||||
// Add upcoming event at the very end if it exists
|
||||
if (upcomingEventItem) {
|
||||
localItems.push(upcomingEventItem);
|
||||
}
|
||||
|
||||
// Sort items by year
|
||||
localItems.sort((a, b) => {
|
||||
const getYear = (item) => {
|
||||
if (item.year !== undefined) {
|
||||
// Handle 'INJECTION', 'Today', 'Tomorrow', 'Upcoming' etc.
|
||||
if (isNaN(Number(item.year))) return Infinity;
|
||||
return Number(item.year);
|
||||
}
|
||||
return Number(item.date.split('-')[0]);
|
||||
};
|
||||
|
||||
const yearA = getYear(a);
|
||||
const yearB = getYear(b);
|
||||
|
||||
if (isNaN(yearA) && isNaN(yearB)) return 0;
|
||||
if (isNaN(yearA)) return 1;
|
||||
if (isNaN(yearB)) return -1;
|
||||
|
||||
return yearA - yearB;
|
||||
});
|
||||
|
||||
// Calculate duration of local items
|
||||
const tempLocalHtml = buildTickerHtml(localItems, currentYear);
|
||||
tickerContent.html(tempLocalHtml);
|
||||
const localWidth = tickerContent[0].scrollWidth;
|
||||
const localDuration = localWidth / PIXELS_PER_SECOND_SPEED;
|
||||
|
||||
// Determine number of Wikimedia items to add
|
||||
let numToSprinkle = 0;
|
||||
if (window.currentWikimediaEvents && window.currentWikimediaEvents.length > 0) {
|
||||
// Always include at least one item
|
||||
numToSprinkle = 1;
|
||||
|
||||
// Calculate average width per Wikimedia item using the first few events
|
||||
const sampleEvents = window.currentWikimediaEvents.slice(0, Math.min(5, window.currentWikimediaEvents.length));
|
||||
let totalWidth = 0;
|
||||
sampleEvents.forEach(event => {
|
||||
const tempWikiItem = { date: `${event.year}-01-01`, text: event.text, type: 'wikimedia', year: event.year };
|
||||
tickerContent.html(buildTickerHtml([tempWikiItem], currentYear));
|
||||
totalWidth += tickerContent[0].scrollWidth;
|
||||
});
|
||||
|
||||
const avgWikiWidth = totalWidth / sampleEvents.length;
|
||||
const timePerWikiItem = avgWikiWidth / PIXELS_PER_SECOND_SPEED;
|
||||
|
||||
// Add more items if there's time
|
||||
const durationGap = TARGET_BROADCAST_SECONDS - localDuration;
|
||||
if (timePerWikiItem > 0 && durationGap > timePerWikiItem) {
|
||||
const additionalItems = Math.floor((durationGap - timePerWikiItem) / timePerWikiItem);
|
||||
numToSprinkle += Math.max(0, additionalItems);
|
||||
}
|
||||
numToSprinkle = Math.min(numToSprinkle, window.currentWikimediaEvents.length);
|
||||
}
|
||||
|
||||
// Add Wikimedia items to local items
|
||||
if (numToSprinkle > 0 && window.currentWikimediaEvents && window.currentWikimediaEvents.length > 0) {
|
||||
const eventsToAdd = window.currentWikimediaEvents.slice(0, numToSprinkle);
|
||||
eventsToAdd.forEach(event => {
|
||||
localItems.push({ date: `${event.year}-01-01`, text: event.text, type: 'wikimedia', year: event.year });
|
||||
});
|
||||
|
||||
// Re-sort with the new items
|
||||
localItems.sort((a, b) => {
|
||||
const getYear = (item) => {
|
||||
if (item.year !== undefined) {
|
||||
if (isNaN(Number(item.year))) return Infinity;
|
||||
return Number(item.year);
|
||||
}
|
||||
return Number(item.date.split('-')[0]);
|
||||
};
|
||||
|
||||
const yearA = getYear(a);
|
||||
const yearB = getYear(b);
|
||||
|
||||
if (isNaN(yearA) && isNaN(yearB)) return 0;
|
||||
if (isNaN(yearA)) return 1;
|
||||
if (isNaN(yearB)) return -1;
|
||||
|
||||
return yearA - yearB;
|
||||
});
|
||||
}
|
||||
|
||||
const finalContentHtml = buildTickerHtml(localItems, currentYear, formatted_date);
|
||||
|
||||
// Set the content
|
||||
tickerContent.html(finalContentHtml);
|
||||
nextTickerHtml = finalContentHtml; // For next update
|
||||
|
||||
if (startImmediately) {
|
||||
if (!animationId) {
|
||||
animationId = requestAnimationFrame(animateTicker);
|
||||
}
|
||||
// Clear injectionHtml after using it to prevent reuse in next cycle
|
||||
injectionHtml = null;
|
||||
// Fetch new for next cycle, which will fetch a new injection
|
||||
fetchAndDisplayTickerData(false);
|
||||
}
|
||||
|
||||
}).fail(function() {
|
||||
console.error("Failed to fetch data for the horizontal ticker.");
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function buildTickerHtml(items, currentYear, formatted_date) {
|
||||
let contentHtml = `<span style="display: inline-block; width: 100vw;"></span>`;
|
||||
|
||||
// First, add injection items at the very beginning
|
||||
const injectionItems = items.filter(item => item.type === 'injection');
|
||||
injectionItems.forEach(item => {
|
||||
contentHtml += `<span>${item.text}</span>`;
|
||||
});
|
||||
|
||||
// Add the "On This Day" header
|
||||
if (formatted_date) {
|
||||
contentHtml += `<span><span class="ticker-event">On This Day, ${formatted_date}:</span></span>`;
|
||||
}
|
||||
|
||||
// Add all other items (excluding injection items which we've already added)
|
||||
items.filter(item => item.type !== 'injection').forEach(item => {
|
||||
const year = item.year || parseInt(item.date.split('-')[0]);
|
||||
let textClass = `ticker-${item.type}`;
|
||||
// Add a specific class for holidays and upcoming events
|
||||
if (item.type === 'holiday') {
|
||||
textClass = 'ticker-holiday';
|
||||
} else if (item.type === 'upcoming') {
|
||||
textClass = 'ticker-upcoming';
|
||||
}
|
||||
const yearDiff = currentYear - year;
|
||||
let anniversaryPrefix = '';
|
||||
// Only show anniversary for positive year differences (past events)
|
||||
// Skip for holiday type
|
||||
if (item.type !== 'holiday' && yearDiff > 0 && yearDiff % 5 === 0) {
|
||||
anniversaryPrefix = `<span style="color: yellow; font-weight: bold;">${yearDiff} Years Ago: </span>`;
|
||||
}
|
||||
let itemText = item.text;
|
||||
let yearText = year;
|
||||
const arbitraryLength = 500;
|
||||
if (item.text.length > arbitraryLength) {
|
||||
const mazzaImgTag = '<img src="mazza.png" alt="Mazza" style="height: 1.2em; vertical-align: middle; margin: 0 0.3em;">';
|
||||
const imageCount = Math.floor((item.text.length - arbitraryLength) / 200);
|
||||
const imageTags = mazzaImgTag.repeat(imageCount);
|
||||
yearText = imageTags ? `${imageTags} ${year}` : year;
|
||||
}
|
||||
// For holidays and upcoming events, don't show the year prefix
|
||||
if (item.type === 'holiday' || item.type === 'upcoming') {
|
||||
contentHtml += `<span><span class="${textClass}">${itemText}</span></span>`;
|
||||
} else {
|
||||
contentHtml += `<span>${anniversaryPrefix}<span class="ticker-year">${yearText}:</span> <span class="${textClass}">${itemText}</span></span>`;
|
||||
}
|
||||
});
|
||||
|
||||
if (formatted_date) {
|
||||
contentHtml += `<span><span class="ticker-event">Office/Local Event</span></span>`;
|
||||
contentHtml += `<span><span class="ticker-wikimedia">World Event</span></span>`;
|
||||
contentHtml += `<span><span class="ticker-report">Local Weather Event</span></span>`;
|
||||
|
||||
// Add trivia question if available
|
||||
if (triviaQuestions.length > 0) {
|
||||
const trivia = triviaQuestions.shift();
|
||||
// Decode HTML entities in question and answers
|
||||
const question = $('<div>').html(trivia.question).text();
|
||||
const correctAnswer = $('<div>').html(trivia.correct_answer).text();
|
||||
const allAnswers = [correctAnswer, ...trivia.incorrect_answers.map(ans => $('<div>').html(ans).text())];
|
||||
|
||||
// Shuffle answers
|
||||
for (let i = allAnswers.length - 1; i > 0; i--) {
|
||||
const j = Math.floor(Math.random() * (i + 1));
|
||||
[allAnswers[i], allAnswers[j]] = [allAnswers[j], allAnswers[i]];
|
||||
}
|
||||
|
||||
// Build question with choices
|
||||
let questionHtml = `<span style="color: gold;">TRIVIA: ${question} `;
|
||||
const choices = ['A', 'B', 'C', 'D'];
|
||||
allAnswers.forEach((answer, index) => {
|
||||
questionHtml += `${choices[index]}) ${answer} `;
|
||||
});
|
||||
questionHtml += `</span>`;
|
||||
contentHtml += `<span>${questionHtml}</span>`;
|
||||
|
||||
// Store the correct answer for later display
|
||||
// Find which choice corresponds to the correct answer
|
||||
const correctIndex = allAnswers.indexOf(correctAnswer);
|
||||
const correctChoice = choices[correctIndex];
|
||||
window.lastTriviaAnswer = { correctChoice, correctAnswer };
|
||||
}
|
||||
|
||||
contentHtml += `<span><span class="ticker-event">Visit <b>calendar.wx4rlx.org</b> to make updates or see info for upcoming days!</span></span>`;
|
||||
|
||||
// Add trivia answer if available
|
||||
if (window.lastTriviaAnswer) {
|
||||
const { correctChoice, correctAnswer } = window.lastTriviaAnswer;
|
||||
contentHtml += `<span><span style="color: gold;">ANSWER: ${correctChoice}) ${correctAnswer}</span></span>`;
|
||||
// Clear the answer after displaying it
|
||||
window.lastTriviaAnswer = null;
|
||||
}
|
||||
}
|
||||
return contentHtml;
|
||||
}
|
||||
|
||||
function startTickerWatchdog() {
|
||||
// Clear any existing watchdog
|
||||
if (tickerWatchdogTimer) {
|
||||
clearInterval(tickerWatchdogTimer);
|
||||
}
|
||||
// Check every 30 seconds if the ticker hasn't updated in 3x the expected duration
|
||||
tickerWatchdogTimer = setInterval(() => {
|
||||
const timeSinceLastUpdate = Date.now() - lastTickerUpdateTime;
|
||||
const maxAllowedTime = (TARGET_BROADCAST_SECONDS + 5) * 3 * 1000; // 3x expected duration in ms
|
||||
if (timeSinceLastUpdate > maxAllowedTime) {
|
||||
console.warn('Ticker watchdog triggered - forcing refresh');
|
||||
fetchAndDisplayTickerData(true);
|
||||
}
|
||||
}, 30000);
|
||||
}
|
||||
|
||||
function updateTickerLastUpdateTime() {
|
||||
lastTickerUpdateTime = Date.now();
|
||||
}
|
||||
|
||||
function updateTickerVisibility() {
|
||||
const tickerContainer = document.getElementById('ticker-container');
|
||||
if (isSadMode) {
|
||||
tickerContainer.style.display = 'block';
|
||||
} else {
|
||||
tickerContainer.style.display = 'none';
|
||||
// Stop animation and watchdog when hiding
|
||||
if (animationId) {
|
||||
cancelAnimationFrame(animationId);
|
||||
animationId = null;
|
||||
}
|
||||
if (tickerWatchdogTimer) {
|
||||
clearInterval(tickerWatchdogTimer);
|
||||
tickerWatchdogTimer = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Debug function to print ticker info and force next cycle
|
||||
window.debugTicker = function() {
|
||||
console.log('=== TICKER DEBUG INFO ===');
|
||||
console.log('Current cycle count:', tickerCycleCount);
|
||||
console.log('Upcoming holidays:', upcomingHolidays);
|
||||
console.log('Current Wikimedia events count:', window.currentWikimediaEvents ? window.currentWikimediaEvents.length : 0);
|
||||
console.log('Current holidays:', window.currentHolidays);
|
||||
console.log('Next ticker HTML length:', nextTickerHtml ? nextTickerHtml.length : 'null');
|
||||
console.log('Current ticker content:', document.getElementById('ticker-content').innerHTML);
|
||||
|
||||
// Force next cycle
|
||||
console.log('Forcing next ticker cycle...');
|
||||
tickerCycleCount++;
|
||||
fetchAndDisplayTickerData(true);
|
||||
};
|
||||
|
||||
sadCheck();
|
||||
toggleForm();
|
||||
refreshTimer = setInterval(() => {
|
||||
fetchAndDisplayNews(currentNewsUrl);
|
||||
}, 300000);
|
||||
fetchAndDisplayNews(currentNewsUrl);
|
||||
// Show/hide ticker based on initial mode
|
||||
updateTickerVisibility();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
426
news.py
Normal file
426
news.py
Normal file
@@ -0,0 +1,426 @@
|
||||
import requests
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
import re
|
||||
from bs4 import BeautifulSoup
|
||||
import psycopg2
|
||||
from nltk.tokenize import word_tokenize
|
||||
from nltk.corpus import stopwords
|
||||
import nltk
|
||||
|
||||
# --- Configuration & Setup ---
|
||||
|
||||
# --- Database Connection ---
|
||||
try:
|
||||
conn = psycopg2.connect(
|
||||
host='localhost',
|
||||
database='nws',
|
||||
user='nws',
|
||||
password='nws'
|
||||
)
|
||||
cursor = conn.cursor()
|
||||
except psycopg2.OperationalError as e:
|
||||
print(f"FATAL: Could not connect to the database. Error: {e}")
|
||||
exit()
|
||||
|
||||
S = requests.Session()
|
||||
S.headers.update({'User-Agent': 'NWS Charleston News Aggregator Bot/1.0'})
|
||||
|
||||
# --- NLTK Downloader ---
|
||||
try:
|
||||
stopwords.words('english')
|
||||
nltk.data.find('tokenizers/punkt')
|
||||
except nltk.downloader.DownloadError:
|
||||
print("Downloading NLTK data (stopwords, punkt)...")
|
||||
nltk.download('stopwords', quiet=True)
|
||||
nltk.download('punkt', quiet=True)
|
||||
print("Download complete.")
|
||||
|
||||
# --- START: Original Scoring Components ---
|
||||
WEATHER_KEYWORDS = {
|
||||
'weather', 'flood', 'fire', 'fog', 'snow', 'emergency', 'wind', 'ice', 'rain',
|
||||
'power', 'explosion', 'warmer', 'colder', 'drown', 'stream', 'river', 'air',
|
||||
'destroyed', 'rime', 'glaze', 'creek', 'crash', 'thunder', 'spinup', 'black ice',
|
||||
'spill', 'pileup', 'pile-up', 'gust', 'frozen', 'funnel', 'rainfall', 'fatal',
|
||||
'injury', 'sleet', 'injured', 'frost', 'dead', 'death', 'landslide', 'culvert',
|
||||
'slippery', 'wildfire', 'tornado', 'blizzard', 'hail', 'thunderstorm',
|
||||
'downburst', 'microburst', 'heatstroke', 'derecho', 'lightning', 'hypothermia',
|
||||
'slide', 'flow', 'ski', 'water', 'inundation', 'victim', 'victims', 'flooding',
|
||||
'flooded','snowing','freezing rain','clouds','cloud','storm'
|
||||
}
|
||||
NEGATIVE_KEYWORDS = {
|
||||
'tennessee', 'frankfurt', 'louisville', 'pennsylvania', 'johnson co',
|
||||
'taylor swift', 'pittsburgh'
|
||||
}
|
||||
|
||||
def calculate_relevance(headline, summary):
|
||||
headline_str = (headline or "").lower()
|
||||
summary_str = (summary or "").lower()
|
||||
text = headline_str + " " + summary_str
|
||||
if not text.strip(): return 0.0
|
||||
tokens = word_tokenize(text)
|
||||
if any(token in NEGATIVE_KEYWORDS for token in tokens): return 0.0
|
||||
stop_words = set(stopwords.words('english'))
|
||||
filtered_tokens = [word for word in tokens if word not in stop_words and word.isalnum()]
|
||||
if not filtered_tokens: return 0.0
|
||||
matches = sum(1 for word in filtered_tokens if word in WEATHER_KEYWORDS)
|
||||
score = (matches / len(filtered_tokens)) * 2.0
|
||||
return round(min(1.0, score), 2)
|
||||
|
||||
# --- END: Original Scoring Components ---
|
||||
|
||||
|
||||
# --- Advanced Scorer Keywords ---
|
||||
IMPACT_KEYWORDS = {
|
||||
# High Impact (Catastrophic, Life-Threatening)
|
||||
'fatal': 25, 'fatality': 25, 'drown': 25, 'death': 25, 'killed': 25,
|
||||
'tornado': 120, 'derecho': 20, 'landslide': 50, 'mudslide': 50, 'evacuation': 100,
|
||||
'explosion': 100, 'spill': 15, 'derailment': 55, 'emergency': 15, 'injured': 10,
|
||||
'injury': 10, 'crash': 10, 'pileup': 10, 'pile-up': 100, 'flood': 40,
|
||||
'flooding': 10, 'inundation': 10, 'rescue': 10, 'wildfire': 40, 'water rescue': 100,
|
||||
'dam failure': 100, 'water rescue': 100, 'dam break': 100,
|
||||
# New High Impact Suggestions
|
||||
'deceased': 25, 'victim': 25, 'tragedy': 25, # Synonyms for fatality
|
||||
'avalanche': 20, 'mudflow': 80, 'debris flow': 80, 'rockslide': 80, # Earth movement
|
||||
'levee break': 80, 'dam break': 120, 'levee breach': 100, # Water infrastructure failure
|
||||
'state of emergency': 20, 'disaster': 20, 'shelter': 15, # Official declarations & response
|
||||
'hazmat': 95, 'hazardous material': 55, # Secondary emergencies
|
||||
'hypothermia': 15, 'heat stroke': 55, 'heat injury': 55, # Direct human health impacts
|
||||
'structure collapse': 100, 'building collapse': 100, 'roof collapse': 35, # Damage
|
||||
|
||||
# Medium Impact (Significant Disruption & Damage)
|
||||
'thunderstorm': 40, 't-storm': 8, 'lightning': 80, 'hail': 80, 'power outage': 8,
|
||||
'outage': 8, 'downed trees': 50, 'tree down': 50, 'road closure': 7, 'closed': 7,
|
||||
'stuck': 7, 'stranded': 7, 'high water': 7, 'wind': 7, 'gust': 7, 'blizzard': 7,
|
||||
# New Medium Impact Suggestions
|
||||
'microburst': 50, 'downburst': 50, 'wind damage': 50, # Damaging winds
|
||||
'power lines down': 80, 'utility pole': 50, 'transformer fire': 8, # Power infrastructure
|
||||
'washout': 8, 'washed out': 8, 'submerged': 8, # Flooding impacts
|
||||
'accident': 7, 'wreck': 7, 'rollover': 7, # Transportation
|
||||
'roadblock': 7, 'detour': 7, 'traffic': 7, # Transportation flow
|
||||
'damage': 7, 'damaged': 7, # General damage term
|
||||
'funnel cloud': 99, 'waterspout': 99, # Tornadic precursors
|
||||
'storm surge': 9, 'coastal flood': 9, 'high surf': 8, 'rip current': 8, # Coastal impacts
|
||||
'dust storm': 8, 'haboob': 8, # Dust/visibility (regional)
|
||||
|
||||
# Situational Awareness (Large outdoor events sensitive to weather)
|
||||
'festival': 5, 'marathon': 5, 'state fair': 5, 'parade': 5,
|
||||
# New Situational Awareness Suggestions
|
||||
'concert': 5, 'game': 5, 'stadium': 5, 'county fair': 5, # Events
|
||||
'regatta': 5, 'triathlon': 5, 'outdoor event': 5, 'fireworks': 5, # Events
|
||||
|
||||
# Low Impact / Hazardous Conditions (Note: some scores are high due to potential)
|
||||
'rain': 5, 'snow': 15, 'sleet': 45, 'ice': 35, 'frost': 10,
|
||||
'storm': 5, 'slippery': 35, 'slide': 5, 'flow': 2,
|
||||
# New Low Impact / Hazardous Conditions Suggestions
|
||||
'freezing rain': 55, 'black ice': 55, 'icy roads': 55, # High-impact ice
|
||||
'fog': 25, 'low visibility': 15, 'whiteout': 55, 'blowing snow': 12, # Visibility hazards
|
||||
'heat wave': 32, 'excessive heat': 32, 'heat index': 10, 'wind chill': 10, # Temp hazards
|
||||
'heavy rain': 35, 'downpour': 35, 'heavy snow': 35, # Intense precip
|
||||
'slick': 35, 'treacherous': 55, # Road conditions
|
||||
'advisory': 6, 'warning': 8, 'watch': 7, # NWS products
|
||||
'i-64': 10, 'i-77': 10, 'i-79': 10, 'route 60': 10, 'corridor g': 10,
|
||||
'turnpike': 10, 'kanawha river': 15, 'ohio river': 15, 'elk river': 15,
|
||||
'mud river': 15, 'guyandotte': 15, 'gauley': 15, 'coal river': 15,
|
||||
}
|
||||
|
||||
GEOGRAPHIC_KEYWORDS = {
|
||||
# WV Counties
|
||||
'barbour', 'boone', 'braxton', 'cabell', 'calhoun', 'clay', 'doddridge', 'gilmer', 'harrison',
|
||||
'jackson', 'kanawha', 'lewis', 'lincoln', 'logan', 'mason', 'mingo', 'pleasants', 'putnam',
|
||||
'ritchie', 'roane', 'taylor', 'tyler', 'upshur', 'wayne', 'webster', 'wirt', 'wood', 'wyoming',
|
||||
# KY Counties
|
||||
'boyd', 'carter', 'greenup', 'lawrence',
|
||||
# OH Counties
|
||||
'athens', 'gallia', 'jackson', 'lawrence', 'meigs', 'morgan', 'perry', 'vinton',
|
||||
'washington',
|
||||
# VA Counties
|
||||
'buchanan', 'dickenson',
|
||||
|
||||
# Major Cities (Original)
|
||||
'charleston', 'huntington', 'parkersburg', 'clarksburg', 'ashland', 'marietta', 'portsmouth',
|
||||
|
||||
# --- NEW ADDITIONS ---
|
||||
# West Virginia Cities & Towns
|
||||
'philippi', 'madison', 'sutton', 'gassaway', 'barboursville', 'milton', 'grantsville', 'beckley',
|
||||
'west union', 'glenville', 'bridgeport', 'shinnston', 'salem', 'ripley', 'ravenswood',
|
||||
'south charleston', 'st. albans', 'dunbar', 'nitro', 'teays valley', 'hurricane',
|
||||
'weston', 'hamlin', 'logan', 'chapmanville', 'point pleasant', 'williamson', 'st. marys',
|
||||
'winfield', 'harrisville', 'pennsboro', 'spencer', 'grafton', 'middlebourne', 'sistersville',
|
||||
'buckhannon', 'wayne', 'kenova', 'ceredo', 'webster springs', 'elizabeth', 'vienna',
|
||||
'williamstown', 'pineville', 'mullens',
|
||||
|
||||
# Ohio Cities & Towns
|
||||
'belpre', 'nelsonville', 'gallipolis', 'wellston', 'oak hill', 'ironton', 'south point',
|
||||
'pomeroy', 'middleport', 'mcconnelsville', 'new lexington', 'somerset', 'mcarthur', 'vienna',
|
||||
|
||||
# Kentucky Cities & Towns
|
||||
'catlettsburg', 'grayson', 'olive hill', 'russell', 'louisa',
|
||||
|
||||
# Virginia Towns
|
||||
'grundy', 'clintwood'
|
||||
}
|
||||
|
||||
# --- MODIFICATION: Renamed and added your requested keywords ---
|
||||
PENALTY_KEYWORDS = {
|
||||
# Out-of-area locations
|
||||
'pittsburgh': -50, 'columbus': -25, 'cincinnati': -50, 'lexington': -25, 'texas': -50,
|
||||
'tennessee': -100, 'pennsylvania': -50,
|
||||
'morgantown': -40, 'bluefield': -40, 'wheeling': -50, # In WV, but outside RLX CWA
|
||||
'roanoke': -50, 'blacksburg': -50, # In VA, but outside RLX CWA
|
||||
'cleveland': -75, 'dayton': -50, 'akron': -60, # Other OH cities
|
||||
'louisville': -50, # Other KY cities
|
||||
'knoxville': -75, 'bristol': -50, # TN cities
|
||||
'north carolina': -100, 'maryland': -100, 'Gatlinburg': -50,
|
||||
# Low-impact events
|
||||
'homecoming': -90, 'back to school': -40, 'dance': -20, 'book fair': -10, 'classes': -10, 'taylor swift': -100, 'trump': -50,
|
||||
}
|
||||
|
||||
def calculate_impact_score(headline, summary):
|
||||
headline_str = (headline or "").lower()
|
||||
summary_str = (summary or "").lower()
|
||||
# Combine text but keep a raw version for accurate indexing
|
||||
text = f"{headline_str} {summary_str}"
|
||||
|
||||
if not text.strip(): return 0
|
||||
|
||||
score = 0
|
||||
geo_relevance_found = False
|
||||
|
||||
# 0. Negation Set
|
||||
# If these words appear immediately before a keyword, ignore the keyword
|
||||
negators = {'no', 'not', 'unlikely', 'false', 'zero', 'minimal', 'avoided'}
|
||||
|
||||
# 1. Geographic Bonus (Unchanged but efficient)
|
||||
if any(area in text for area in GEOGRAPHIC_KEYWORDS):
|
||||
score += 50
|
||||
geo_relevance_found = True
|
||||
|
||||
# 2. Impact Keyword Scores with Negation Check
|
||||
tokens = word_tokenize(text) # Use NLTK tokens for positional accuracy
|
||||
|
||||
for keyword, value in IMPACT_KEYWORDS.items():
|
||||
# Handle multi-word keywords (e.g., "dam break") separately
|
||||
if " " in keyword:
|
||||
if keyword in text:
|
||||
score += value
|
||||
# Handle single-word keywords with negation check
|
||||
elif keyword in tokens:
|
||||
# Find index of keyword
|
||||
indices = [i for i, x in enumerate(tokens) if x == keyword]
|
||||
for i in indices:
|
||||
# Check previous 2 words for negators
|
||||
prev_words = set(tokens[max(0, i-2):i])
|
||||
if not prev_words.intersection(negators):
|
||||
score += value
|
||||
# Headline Bonus
|
||||
if keyword in headline_str:
|
||||
score += (value * 0.5)
|
||||
break # Only count the keyword once per article to prevent inflation
|
||||
|
||||
# 3. Compound Impact Bonus (New Feature)
|
||||
# Weather impacts rarely happen in isolation.
|
||||
# If we see multiple categories, boost the score.
|
||||
found_keywords = [k for k in IMPACT_KEYWORDS if k in text]
|
||||
if 'wind' in str(found_keywords) and 'outage' in str(found_keywords):
|
||||
score += 30 # Confidence boost: Wind causing outages
|
||||
if 'rain' in str(found_keywords) and 'slide' in str(found_keywords):
|
||||
score += 50 # Confidence boost: Rain causing mudslides
|
||||
|
||||
# 4. Penalty Logic (Refined)
|
||||
for keyword, penalty in PENALTY_KEYWORDS.items():
|
||||
if keyword in text:
|
||||
if not geo_relevance_found and keyword in ['pittsburgh', 'columbus', 'tennessee']:
|
||||
score += (penalty * 2.0) # Double penalty for out-of-area specifics
|
||||
else:
|
||||
score += penalty
|
||||
|
||||
return max(0, int(score))
|
||||
|
||||
|
||||
def standardize_time(time_string):
|
||||
if not time_string: return None
|
||||
time_string_cleaned = re.sub(r'(\.\d+)', '', str(time_string).strip())
|
||||
try:
|
||||
dt = datetime.fromisoformat(time_string_cleaned.replace('Z', '+00:00'))
|
||||
return dt.astimezone(timezone.utc).strftime("%Y-%m-%d %H:%M")
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
# --- News Source Scraping Functions (Unchanged) ---
|
||||
def get_json(url):
|
||||
try:
|
||||
response = S.get(url, timeout=10)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except (requests.RequestException, json.JSONDecodeError) as e:
|
||||
print(f"Error fetching or parsing JSON from {url}: {e}")
|
||||
return None
|
||||
|
||||
def trendingstories(url, extras):
|
||||
data = get_json(url + extras)
|
||||
if not data: return
|
||||
for item in data.get('teasers', []):
|
||||
story_path = item.get('url')
|
||||
if not story_path: continue
|
||||
stories.append([
|
||||
item.get('title'), item.get('summary'), url + story_path,
|
||||
item.get('updatedDateISO8601'),
|
||||
url + item.get('heroImageUrl') if item.get('heroImageUrl') else None, url
|
||||
])
|
||||
|
||||
def lakana(url, extras):
|
||||
data = get_json(url + extras)
|
||||
if not data: return
|
||||
|
||||
# Helper to handle the API returning either a List [] or a Dict {}
|
||||
def extract_articles(section_data):
|
||||
if isinstance(section_data, dict):
|
||||
# Normal case: it's a dict containing an "articles" key
|
||||
return section_data.get('articles', [])
|
||||
elif isinstance(section_data, list):
|
||||
# Edge case: it's already a list of articles (or empty list)
|
||||
return section_data
|
||||
return []
|
||||
|
||||
# Safely extract from both sections
|
||||
# one of these is likely a list, which was breaking the old code
|
||||
articles = extract_articles(data.get('additional_top_stories')) + \
|
||||
extract_articles(data.get('top_stories'))
|
||||
|
||||
for item in articles:
|
||||
# Ensure we are looking at a dictionary item
|
||||
if not isinstance(item, dict): continue
|
||||
|
||||
# Safely extract nested fields
|
||||
date_val = item.get('date')
|
||||
date_str = date_val.get('datetime') if isinstance(date_val, dict) else str(date_val)
|
||||
|
||||
thumb_val = item.get('thumbnail')
|
||||
thumb_src = thumb_val.get('src') if isinstance(thumb_val, dict) else None
|
||||
|
||||
stories.append([
|
||||
item.get('home_page_title') or item.get('title'),
|
||||
item.get('title'),
|
||||
item.get('link'),
|
||||
date_str,
|
||||
thumb_src,
|
||||
url
|
||||
])
|
||||
|
||||
def lakanatitle(url, extras):
|
||||
data = get_json(url + extras)
|
||||
if not data: return
|
||||
articles = data.get('additional_top_stories', {}).get('articles', []) + \
|
||||
data.get('top_stories', {}).get('articles', [])
|
||||
for item in articles:
|
||||
stories.append([
|
||||
item.get('title'), item.get('title'), item.get('link'),
|
||||
item.get('date', {}).get('datetime'), item.get('thumbnail', {}).get('src'), url
|
||||
])
|
||||
|
||||
def fusion(url, extras):
|
||||
try:
|
||||
html_content = S.get(url, timeout=10).text
|
||||
except requests.RequestException as e:
|
||||
print(f"Error fetching HTML from {url}: {e}")
|
||||
return
|
||||
soup = BeautifulSoup(html_content, 'html.parser')
|
||||
script_tag = soup.find('script', id='fusion-metadata')
|
||||
if script_tag and script_tag.string:
|
||||
match = re.search(r'Fusion\.globalContent\s*=\s*(\{.*?\});', script_tag.string, re.DOTALL)
|
||||
if match:
|
||||
try:
|
||||
data = json.loads(match.group(1))
|
||||
for item in data.get('content_elements', []):
|
||||
try:
|
||||
website_url_path = item.get('website_url')
|
||||
if not website_url_path: continue
|
||||
stories.append([
|
||||
item.get('headlines', {}).get('basic'),
|
||||
item.get('description', {}).get('basic'),
|
||||
url + website_url_path,
|
||||
item.get('display_date'),
|
||||
item.get('promo_items', {}).get('basic', {}).get('url'), url
|
||||
])
|
||||
except (KeyError, TypeError):
|
||||
continue
|
||||
except json.JSONDecodeError:
|
||||
print(f"Failed to parse Fusion JSON from {url}")
|
||||
# --- End of Scraping Functions ---
|
||||
|
||||
stories = []
|
||||
newsarray = [
|
||||
["https://wsaz.com", "", "fusion"], ["https://wtap.com", "", "fusion"],
|
||||
["https://wdtv.com", "", "fusion"], ["https://wymt.com", "", "fusion"],
|
||||
["https://wboy.com", "/wp-json/lakana/v1/template-variables/", "lakana"],
|
||||
["https://wowktv.com", "/wp-json/lakana/v1/template-variables/", "lakanatitle"],
|
||||
["https://wchstv.com", "/api/rest/audience/trending-stories?count=25", "trendingstories"],
|
||||
["https://wvva.com", "", "fusion"],
|
||||
["https://wjhl.com", "/wp-json/lakana/v1/template-variables/", "lakana"],
|
||||
["https://wcyb.com", "/api/rest/audience/trending-stories?count=25", "trendingstories"],
|
||||
["https://wvnstv.com", "/wp-json/lakana/v1/template-variables/", "lakanatitle"],
|
||||
]
|
||||
|
||||
def runmedia():
|
||||
for source_info in newsarray:
|
||||
url, extra, func_name = source_info
|
||||
try:
|
||||
print(f"Fetching from: {url}")
|
||||
func = globals()[func_name]
|
||||
func(url, extra)
|
||||
except Exception as e:
|
||||
print(f"Failed to process {url}. Error: {e}")
|
||||
|
||||
def process_and_insert_stories():
|
||||
print(f"\nCollected {len(stories)} stories. Processing and inserting...")
|
||||
inserted_count = 0
|
||||
for story_data in stories:
|
||||
try:
|
||||
if len(story_data) != 6: continue
|
||||
headline, summary, link, dt_str, img_url, source = story_data
|
||||
timeutc = standardize_time(dt_str)
|
||||
if not timeutc or not link or not headline: continue
|
||||
|
||||
impact_score = calculate_impact_score(headline, summary)
|
||||
nlpscore = calculate_relevance(headline, summary)
|
||||
|
||||
print(f" - Impact[{impact_score}] NLP[{nlpscore}]: {headline}")
|
||||
|
||||
sql = """
|
||||
INSERT INTO news (headline, summary, storylink, updated, source, imageurl, timeutc, impact_score, nlpscore)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
|
||||
ON CONFLICT (storylink) DO UPDATE SET
|
||||
(headline, summary, updated, imageurl, timeutc, impact_score, nlpscore) =
|
||||
(EXCLUDED.headline, EXCLUDED.summary, EXCLUDED.updated, EXCLUDED.imageurl,
|
||||
EXCLUDED.timeutc, EXCLUDED.impact_score, EXCLUDED.nlpscore);
|
||||
"""
|
||||
|
||||
params = (headline, summary, link, timeutc, source, img_url, timeutc, impact_score, nlpscore)
|
||||
|
||||
cursor.execute(sql, params)
|
||||
inserted_count += 1
|
||||
except Exception as e:
|
||||
print(f"!!! DATABASE ERROR processing story row: {story_data}. Error: {e}")
|
||||
continue
|
||||
|
||||
conn.commit()
|
||||
print(f"\nDatabase insertion/update complete. {inserted_count} stories were processed and inserted/updated.")
|
||||
|
||||
def cleandb():
|
||||
print("Cleaning duplicate summaries from the database...")
|
||||
sql = """
|
||||
DELETE FROM news a USING news b
|
||||
WHERE a.id < b.id AND a.summary = b.summary;
|
||||
"""
|
||||
cursor.execute(sql)
|
||||
conn.commit()
|
||||
print(f"Cleaned {cursor.rowcount} duplicate entries.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
runmedia()
|
||||
process_and_insert_stories()
|
||||
cleandb()
|
||||
cursor.close()
|
||||
conn.close()
|
||||
print("Process finished and connection closed.")
|
||||
311
news.txt
Normal file
311
news.txt
Normal file
@@ -0,0 +1,311 @@
|
||||
Fetching from: https://wsaz.com
|
||||
Fetching from: https://wtap.com
|
||||
Fetching from: https://wdtv.com
|
||||
Fetching from: https://wymt.com
|
||||
Fetching from: https://wboy.com
|
||||
Fetching from: https://wowktv.com
|
||||
Fetching from: https://wchstv.com
|
||||
Fetching from: https://wvva.com
|
||||
Fetching from: https://wjhl.com
|
||||
Fetching from: https://wcyb.com
|
||||
Fetching from: https://wvnstv.com
|
||||
|
||||
Collected 293 stories. Processing and inserting...
|
||||
- Impact[0] NLP[0.12]: Two WV National Guard members critical after D.C. shooting; victims identified
|
||||
- Impact[75] NLP[0.29]: Woodward Drive closed due to ATV crash
|
||||
- Impact[0] NLP[0.0]: WSAZ Gives Thanks
|
||||
- Impact[0] NLP[0.0]: Afghan national charged in Guard ambush shooting drove across US to carry out attack, officials say
|
||||
- Impact[0] NLP[0.0]: Trump administration orders 500 more National Guard troops to DC after shooting of soldiers
|
||||
- Impact[0] NLP[0.0]: First Alert Weather+ | Thanksgiving weekend
|
||||
- Impact[0] NLP[0.0]: 2 National Guard members shot in an ambush attack just blocks from the White House
|
||||
- Impact[0] NLP[0.0]: Man arrested after shots-fired incident
|
||||
- Impact[0] NLP[0.0]: WSAZ Sports+ | The Game + Herd & Wildcats play for Bowl Eligibility
|
||||
- Impact[0] NLP[0.0]: Investigation underway after shooting involving two juveniles
|
||||
- Impact[0] NLP[0.32]: First Alert Weather Day: Gusty winds usher in much colder temperatures for Thanksgiving
|
||||
- Impact[65] NLP[0.1]: Chase ends with WVSP cruiser crash
|
||||
- Impact[0] NLP[0.0]: Preparations underway for annual Frank Veltri dinner
|
||||
- Impact[10] NLP[0.18]: Facility for children with psychiatric issues to close
|
||||
- Impact[15] NLP[0.0]: Thanksgiving turnpike travel
|
||||
- Impact[50] NLP[0.0]: Man charged with attacking city workers on sidewalk
|
||||
- Impact[0] NLP[0.0]: US 23 Country Music Highway Museum
|
||||
- Impact[7] NLP[0.0]: Cooking safety tips from the American Red Cross
|
||||
- Impact[50] NLP[0.0]: Cabell-Huntington Health Department to end needle exchange program
|
||||
- Impact[0] NLP[0.0]: WSAZ Sports+ | State Semi-Finals in WV, KY & OH
|
||||
- Impact[0] NLP[0.0]: Christmas gift ideas at Twelvepole Trading Post
|
||||
- Impact[50] NLP[0.0]: Small Business Saturday Vendor Craft Showcase
|
||||
- Impact[50] NLP[0.0]: Elks Lodge annual Thanksgiving dinner
|
||||
- Impact[55] NLP[0.0]: Christmas lights at Gallipolis City Park slated Wednesday night
|
||||
- Impact[0] NLP[0.0]: This Day in History - 11/26/25
|
||||
- Impact[18] NLP[0.47]: First Alert Weather Day: How will rain and wind impact your holiday travel?
|
||||
- Impact[57] NLP[0.0]: Ashland Christmas parade tonight
|
||||
- Impact[0] NLP[0.0]: Mayor: Man arrested after ‘unprovoked attack’ on city workers
|
||||
- Impact[62] NLP[0.33]: New details emerge about ATV crash that killed one teen
|
||||
- Impact[50] NLP[0.0]: AD Lewis Community Center gives out Thanksgiving food boxes
|
||||
- Impact[50] NLP[0.0]: Holly Jolly Brawley Trolly ready for service
|
||||
- Impact[0] NLP[0.0]: Holiday shopping tips to avoid online scams
|
||||
- Impact[0] NLP[0.11]: Rate hike settlement proposed in Kentucky
|
||||
- Impact[50] NLP[0.0]: Holiday safety: Avoiding porch pirates
|
||||
- Impact[0] NLP[0.0]: Gov. Morrisey kicks off holiday toy drive
|
||||
- Impact[0] NLP[0.0]: Afghan national charged in Guard ambush shooting drove across US to carry out attack, officials say
|
||||
- Impact[0] NLP[0.0]: Arts and entertainment events happening November 27th-30th across the Mid-Ohio Valley
|
||||
- Impact[0] NLP[0.0]: Cold and breezy Thanksgiving
|
||||
- Impact[50] NLP[0.0]: Parkersburg Police investigating hit and run
|
||||
- Impact[0] NLP[0.0]: List of Thanksgiving meals offered around the Mid-Ohio Valley
|
||||
- Impact[0] NLP[0.0]: 2 National Guard members shot in an ambush attack just blocks from the White House
|
||||
- Impact[50] NLP[0.0]: Truck crashed into apartment building on 13th and Lynn Street
|
||||
- Impact[0] NLP[0.0]: One person in the hospital after head-on collision on U.S. 50; All lanes re-opened on U.S. 50
|
||||
- Impact[50] NLP[0.0]: The Cook’s Shop: 15 years of serving Marietta with quality kitchen essentials
|
||||
- Impact[50] NLP[0.0]: Tradition continues at WCCC with annual Thanksgiving Luncheon
|
||||
- Impact[0] NLP[0.0]: Black Friday preview from Grand Central Mall
|
||||
- Impact[0] NLP[0.0]: MOVHD on Thanksgiving food safety
|
||||
- Impact[50] NLP[0.0]: 2nd annual Meals for Veterans by Marietta Noon Rotary
|
||||
- Impact[50] NLP[0.0]: Wood County Board of Education receives forensic audit report
|
||||
- Impact[50] NLP[0.0]: This Day in History: Nov. 26, 2025
|
||||
- Impact[50] NLP[0.22]: St. Marys water rates increase for residents and businesses
|
||||
- Impact[0] NLP[0.0]: Whooping cough in the Mid-Ohio Valley
|
||||
- Impact[40] NLP[0.05]: Disaster relief available for drought-affected businesses in Ohio, West Virginia
|
||||
- Impact[0] NLP[0.0]: Agents seize more than $775,000 worth of counterfeit toys shipped from China
|
||||
- Impact[0] NLP[0.73]: Colder weather moving into the Mid-Ohio Valley.
|
||||
- Impact[50] NLP[0.0]: Clutch Collective offers handmade gifts from over 80 artists for holiday season
|
||||
- Impact[15] NLP[0.0]: Travel ban for all high-profile vehicles on the Ohio Turnpike begins Wednesday
|
||||
- Impact[50] NLP[0.0]: MCF completes Stock the Shelves Challenge
|
||||
- Impact[0] NLP[0.0]: Thunderbolt Running and Timing promotes community fitness through group runs
|
||||
- Impact[50] NLP[0.0]: Groundbreaking ceremony for New Lincoln Elementary
|
||||
- Impact[0] NLP[0.0]: This Day in History: Nov. 25, 2025
|
||||
- Impact[0] NLP[0.13]: MOV Regional Airport ready for holiday travel
|
||||
- Impact[50] NLP[0.0]: K9 Kuky from Belpre got a donation for protective vest
|
||||
- Impact[5] NLP[0.08]: Chilly Thanksgiving for the MOV
|
||||
- Impact[5] NLP[0.2]: Messy Thanksgiving Travel
|
||||
- Impact[0] NLP[0.0]: Ohio man facing multiple charges of rape involving a minor
|
||||
- Impact[0] NLP[0.0]: West Virginia’s largest Christmas shop prepares for Small Business Saturday
|
||||
- Impact[50] NLP[0.0]: Salvation Army of Parkersburg kettle bell campaign raises $13,000 in two weeks
|
||||
- Impact[0] NLP[0.0]: White Picket Farm makes top 10 for Best of West Virginia 2025
|
||||
- Impact[50] NLP[0.0]: WCVSC holds thanksgiving giveaway for veteran families
|
||||
- Impact[50] NLP[0.0]: Community prayer vigils set for National Guard member from Webster Springs
|
||||
- Impact[0] NLP[0.0]: Afghan national charged in Guard ambush shooting drove across US to carry out attack, officials say
|
||||
- Impact[30] NLP[0.31]: Rain could begin as snow on Sunday, but is there another chance next week?
|
||||
- Impact[0] NLP[0.0]: Trump administration orders 500 more National Guard troops to DC after shooting of soldiers
|
||||
- Impact[10] NLP[0.0]: Morgantown man arrested for sexually abusing 9-year-old daughter in 2004
|
||||
- Impact[50] NLP[0.0]: UPDATE: 17-year-old juvenile found safe
|
||||
- Impact[0] NLP[0.0]: UPDATE: W.Va. Board of Education to allow religious vaccine exemptions pending appeal
|
||||
- Impact[65] NLP[0.08]: Salvation Army major returns to service after surviving traumatic motorcycle crash
|
||||
- Impact[0] NLP[0.0]: 2 National Guard members shot in an ambush attack just blocks from the White House
|
||||
- Impact[0] NLP[0.0]: MCHD Dentistry hosts Healthy Smiles Day in Monongalia County
|
||||
- Impact[0] NLP[0.0]: Meadowbrook Mall giving away discounts, prizes to first 200 people on Black Friday
|
||||
- Impact[100] NLP[0.27]: UPDATE: Truck driver who caused fatal Cheat Lake accident booked into jail by ICE
|
||||
- Impact[0] NLP[0.0]: Gov. Morrisey shares tips on how to avoid scams during holiday season
|
||||
- Impact[0] NLP[0.0]: IRS files $5 million lawsuit against West Virginia Senator Jim Justice
|
||||
- Impact[0] NLP[0.0]: Hundreds of thousands of dollars worth of counterfeit toys seized in Virginia
|
||||
- Impact[0] NLP[0.0]: Local nonprofit hosts second annual “Petsgiving”
|
||||
- Impact[50] NLP[0.0]: Salem University looking to start new Psychedelic Education program
|
||||
- Impact[50] NLP[0.0]: State Police accepts donations for ‘State Troopers for West Virginia Needy’ campaign
|
||||
- Impact[0] NLP[0.0]: WVU Medicine Children’s holding community toy drive for holiday donations
|
||||
- Impact[50] NLP[0.0]: UPDATE: Runaway teen from Upshur County has now been found
|
||||
- Impact[0] NLP[0.0]: Jennifer Garner visits schools and homes in her native W.Va.
|
||||
- Impact[0] NLP[0.0]: Big changes are coming just as Thanksgiving is arriving, but what are they?
|
||||
- Impact[0] NLP[0.0]: Wesleyan College provides new columbarium to be located in chapel on campus
|
||||
- Impact[0] NLP[0.0]: State Police announce sobriety checkpoint in Randolph County in December
|
||||
- Impact[50] NLP[0.0]: Woman arrested for slashing dog’s face with knife in Harrison County
|
||||
- Impact[0] NLP[0.0]: Marion County man arrested for child abuse
|
||||
- Impact[50] NLP[0.18]: Midstate Plumbing & Air gives away turkeys for Thanksgiving
|
||||
- Impact[0] NLP[0.0]: Erskine construction hands out 200 turkeys
|
||||
- Impact[7] NLP[0.4]: Rain showers continue in NCWV, when will they end?
|
||||
- Impact[0] NLP[0.2]: If you’re traveling for the holiday, will you need to worry about any weather-related travel problems?
|
||||
- Impact[50] NLP[0.0]: Kings Hands Boxing Club holds ribbon cutting in Stonewood
|
||||
- Impact[0] NLP[0.0]: Canaan Valley Wildlife Refuge giving away free, real Christmas trees this holiday season
|
||||
- Impact[0] NLP[0.0]: Town of White Hall preparing for holiday events
|
||||
- Impact[0] NLP[0.0]: Gov. Morrisey grants state employees a half-day off before Thanksgiving
|
||||
- Impact[0] NLP[0.0]: Market in the Park kicks off the Celebration of Lights
|
||||
- Impact[15] NLP[0.19]: Juveniles injured in Pike Co. shooting
|
||||
- Impact[0] NLP[0.0]: Coroner rules woman found unresponsive died of natural causes
|
||||
- Impact[0] NLP[0.0]: 2 National Guard members shot in an ambush attack just blocks from the White House
|
||||
- Impact[0] NLP[0.0]: FIRST ALERT FORECAST: Cold temperatures for your Turkey Day
|
||||
- Impact[0] NLP[0.0]: Vice President Vance arrives in downtown Lexington; motorcade causing some traffic delays
|
||||
- Impact[50] NLP[0.0]: Authorities searching for missing man
|
||||
- Impact[0] NLP[0.0]: Kentucky State Police launch sale of trooper teddy bears to support children in crisis
|
||||
- Impact[0] NLP[0.0]: Better Business Bureau warns shoppers of holiday scams
|
||||
- Impact[0] NLP[0.0]: Hazard Turkey Trot brings community together on Thanksgiving morning
|
||||
- Impact[5] NLP[0.13]: FIRST ALERT FORECAST: Chilly temperatures start today
|
||||
- Impact[25] NLP[0.1]: Hit-and-run survivor thanks community for support
|
||||
- Impact[0] NLP[0.0]: Man sentenced for money laundering conspiracy involving COVID relief loans
|
||||
- Impact[0] NLP[0.0]: Man charged after reportedly firing shots toward wife
|
||||
- Impact[0] NLP[0.11]: Gov. Beshear announces funding to improve water service for homes in Powell Co.
|
||||
- Impact[50] NLP[0.0]: Students face food insecurity, school provides resources
|
||||
- Impact[0] NLP[0.0]: This Day in History - November 26, 2010
|
||||
- Impact[0] NLP[0.0]: Thanksgiving grief: Coping with loss during the holidays
|
||||
- Impact[0] NLP[0.0]: Thanksgiving grocery costs down 5%, but menu matters
|
||||
- Impact[37] NLP[0.13]: Pike man arraigned in connection to April shooting death
|
||||
- Impact[37] NLP[0.22]: Kentucky man accepts plea deal in baby’s death
|
||||
- Impact[0] NLP[0.0]: Child pornography warrants issued for missing high school coach, authorities say
|
||||
- Impact[0] NLP[0.0]: Kentucky counties struggle to find skilled plumbers and electricians
|
||||
- Impact[0] NLP[0.11]: Rate hike settlement proposed in Kentucky
|
||||
- Impact[0] NLP[0.0]: Thanksgiving Eve poses heightened drunk driving risks as millions travel
|
||||
- Impact[0] NLP[0.0]: E. KY sheriff to meet with KHSAA officials about possible sports betting
|
||||
- Impact[0] NLP[0.11]: London Fire Department urges holiday celebration safety
|
||||
- Impact[0] NLP[0.0]: Holiday travelers split on transportation secretary’s call to dress up for flights
|
||||
- Impact[50] NLP[0.0]: Perry Co. students sponsor more than 170 peers for Angel Tree project
|
||||
- Impact[0] NLP[0.0]: FIRST ALERT FORECAST: Shower Chances Continue This Evening
|
||||
- Impact[0] NLP[0.09]: Berea police crack down on dangerous school pickup parking
|
||||
- Impact[0] NLP[0.0]: Blue Grass Airport expects thousands of daily passengers during holiday rush
|
||||
- Impact[0] NLP[0.0]: Man arrested after throwing rocks at passing vehicles
|
||||
- Impact[0] NLP[0.0]: Man crashes after hitting mailboxes, charged with DUI
|
||||
- Impact[0] NLP[0.0]: Lexington restaurant owner defends employee after customer’s personal attack in online review
|
||||
- Impact[0] NLP[0.0]: Emmy-winning artist performs 50 hours straight for coat drive
|
||||
- Impact[10] NLP[0.0]: Runners in Morgantown's Turkey Trot gobble to finish line
|
||||
- Impact[65] NLP[0.21]: Webster Co rallies behind WVNG member injured in shooting
|
||||
- Impact[0] NLP[0.0]: Restaurant Road Trip: Smashers Sweets-N-Eats
|
||||
- Impact[0] NLP[0.0]: WV "Heirloom" app launched to encourage logging off social media
|
||||
- Impact[50] NLP[0.15]: Clarksburg FD provides tips on Thanksgiving safety
|
||||
- Impact[50] NLP[0.0]: Salvation Army hosts Thanksgiving meal in Clarksburg
|
||||
- Impact[50] NLP[0.0]: Missing Harrison County teen located
|
||||
- Impact[0] NLP[0.0]: MCHD offers free services for Healthy Smiles Day
|
||||
- Impact[7] NLP[0.0]: RCB marching band to perform in DC's Independence Day parade
|
||||
- Impact[0] NLP[0.0]: WV State Parks announce 2025 Black Friday deals
|
||||
- Impact[0] NLP[0.0]: WVNG members shot in DC in 'critical condition': FBI Director Kash Patel
|
||||
- Impact[0] NLP[0.0]: Man allegedly sexually assaulted 9-year-old in 2004: WVSP
|
||||
- Impact[0] NLP[0.0]: Permanent injunction issued in favor of vaccine exemptions
|
||||
- Impact[90] NLP[0.5]: Truck driver in Cheat Lake fatal arrested by ICE
|
||||
- Impact[0] NLP[0.0]: Hundreds eat Thanksgiving meals at WV state parks
|
||||
- Impact[0] NLP[0.0]: Annual Gobble Gallop sees highest attendance yet
|
||||
- Impact[65] NLP[0.33]: Person facing charges in Kanawha County side-by-side crash
|
||||
- Impact[50] NLP[0.0]: Kanawha Charleston Humane Association hosts 10th annual Furry Feast
|
||||
- Impact[0] NLP[0.0]: DC shooting suspect worked for CIA in Afghanistan; drove across country before attack
|
||||
- Impact[0] NLP[0.0]: Judge rules West Virginia parents can cite religious beliefs for vaccine exemptions
|
||||
- Impact[50] NLP[0.0]: Huntington City Mission prepares for Thanksgiving
|
||||
- Impact[0] NLP[0.0]: Volunteers continue Frank Veltri’s mission of giving this Thanksgiving
|
||||
- Impact[15] NLP[0.0]: West Virginia Turnpike Authority expecting 740,000 drivers through holiday weekend
|
||||
- Impact[50] NLP[0.0]: Charleston police misconduct allegations cleared
|
||||
- Impact[0] NLP[0.0]: Earl Wilson Community Thanksgiving passes out hundreds of Thanksgiving meals for kids
|
||||
- Impact[0] NLP[0.0]: Ohio State likely to end eight majors, over 350 courses in Senate Bill 1 compliance
|
||||
- Impact[0] NLP[0.29]: Major weather changes across Thanksgiving and next week
|
||||
- Impact[50] NLP[0.0]: Rats and Rams face off in Dunbar’s 77th Commode Bowl
|
||||
- Impact[0] NLP[0.0]: Morrisey speaks on shooting of two WVNG members
|
||||
- Impact[37] NLP[0.22]: Children’s pajamas sold on Amazon recalled for ‘risk of death from burn hazard’
|
||||
- Impact[65] NLP[0.18]: Webster County rallies behind West Virginia National Guard member injured in shooting
|
||||
- Impact[0] NLP[0.0]: What we do and don’t know about the National Guard members shot in DC
|
||||
- Impact[50] NLP[0.0]: Two West Virginia National Guard members hospitalized after shooting in Washington, D.C.
|
||||
- Impact[50] NLP[0.0]: Charleston man charged with DUI after allegedly hitting pedestrian with vehicle
|
||||
- Impact[50] NLP[0.0]: Man faces nearly 20 charges after leading pursuit through Kanawha County in stolen vehicle
|
||||
- Impact[0] NLP[0.0]: Law enforcement respond to home of U.S. Sen. Shelley Moore Capito after online comment
|
||||
- Impact[50] NLP[0.0]: Gallipolis police asking for assistance in finding 3 missing Gallia County teens
|
||||
- Impact[50] NLP[0.0]: One person in custody after standoff in Huntington
|
||||
- Impact[50] NLP[0.0]: Kanawha County teen located after being reported missing, deputies say
|
||||
- Impact[60] NLP[0.09]: Vigils planned for West Virginia National Guard member shot in D.C.
|
||||
- Impact[87] NLP[0.11]: Logan murder victim remembered as devoted father, outgoing friend
|
||||
- Impact[50] NLP[0.0]: State leaders react to shooting of two West Virginia National Guard members
|
||||
- Impact[50] NLP[0.0]: 'There will be justice': W.Va. governor addresses state's National Guard troops in DC
|
||||
- Impact[0] NLP[0.0]: What we know about the suspect in the shooting of two WVNG members in D.C.
|
||||
- Impact[35] NLP[0.0]: MSHA releases preliminary report on deadly mine accident in Nicholas County
|
||||
- Impact[0] NLP[0.0]: Hundreds of churches across eastern Ky. join forces to fix 12,000 Thanksgiving meals
|
||||
- Impact[50] NLP[0.0]: Child psychiatric treatment facility in Barboursville to close in early 2026
|
||||
- Impact[75] NLP[0.1]: Man accused of Logan stabbing denied bond after charge upgraded to murder
|
||||
- Impact[50] NLP[0.0]: Charleston police chief defends officers against misconduct allegations in council letter
|
||||
- Impact[50] NLP[0.0]: Barboursville School to close in February, widens mental health placement gaps in W.Va.
|
||||
- Impact[50] NLP[0.0]: D.C. National Guard shooter confirmed to have connection to Washington state
|
||||
- Impact[87] NLP[0.42]: Emergency crews respond to crash involving ATV and car in Logan County
|
||||
- Impact[50] NLP[0.09]: Child reportedly falls from miniature train at Light the Night in Charleston
|
||||
- Impact[50] NLP[0.0]: 2 National Guard members shot near White House identified, gunman faces several charges
|
||||
- Impact[50] NLP[0.0]: Charleston police officer cleared of misconduct allegations after attorneys demand probe
|
||||
- Impact[10] NLP[0.0]: Black Friday planning: What stores are open, closed on Thanksgiving Day
|
||||
- Impact[0] NLP[0.0]: Shooting prompts 500 more National Guard troops deployed to DC amid ongoing legal battle
|
||||
- Impact[0] NLP[0.0]: Hometown Hero: Dr. Eric Shrader
|
||||
- Impact[0] NLP[0.0]: WVVA’s Pigskin Prophet: Week 13
|
||||
- Impact[50] NLP[0.0]: Warriors Ready to Take the Next Step
|
||||
- Impact[0] NLP[0.0]: See some of your results from WVVA's '20 Days of Giving'
|
||||
- Impact[0] NLP[0.4]: Chilly and windy for Thanksgiving, with colder weather tonight
|
||||
- Impact[50] NLP[0.0]: Two West Virginia National Guard members shot in D.C. in ‘targeted’ attack; suspect identified
|
||||
- Impact[0] NLP[0.0]: Small Business Administration urges shoppers to support local businesses
|
||||
- Impact[0] NLP[0.0]: Restaurant gives back to McDowell County community on Thanksgiving
|
||||
- Impact[0] NLP[0.0]: AAA shares safety tips for busy holiday shopping areas
|
||||
- Impact[0] NLP[0.0]: Bluefield Union Mission distributes 300 Thanksgiving food bags to community
|
||||
- Impact[0] NLP[0.0]: 2 National Guard members shot in an ambush attack just blocks from the White House
|
||||
- Impact[0] NLP[0.17]: Public Service Commission continues current rates for Black Diamond Power Co. during investigation
|
||||
- Impact[0] NLP[0.0]: Several cheese products recalled due to listeria risk
|
||||
- Impact[0] NLP[0.0]: Town of Tazewell gears up for Christmas festivities
|
||||
- Impact[15] NLP[0.07]: CodeRED platform attack affects thousands of organizations nationwide
|
||||
- Impact[0] NLP[0.0]: 100+ attend free tri-annual Father/Daughter Dance in Greenbrier County
|
||||
- Impact[0] NLP[0.0]: Virginia Troopers asks you to make buckling up a Thanksgiving tradition
|
||||
- Impact[50] NLP[0.0]: Two sought in Campbell County armed robbery
|
||||
- Impact[0] NLP[0.0]: Red Cross offers steps to keep your Thanksgiving cooking safe
|
||||
- Impact[50] NLP[0.0]: Lady Warriors Hungry for Another Trip to States
|
||||
- Impact[9] NLP[0.18]: Boil Water advisory lifted for parts of Raleigh County
|
||||
- Impact[0] NLP[0.0]: Will politics dominate your Thanksgiving? New poll shows partisan motivation ahead of midterms
|
||||
- Impact[0] NLP[0.0]: Richlands man granted early release from supervised probation
|
||||
- Impact[0] NLP[0.0]: Dominion customers face higher bills despite reduced rate request
|
||||
- Impact[0] NLP[0.0]: Shooting into home leads to two arrests and guns seized
|
||||
- Impact[65] NLP[0.1]: Chase ends with WVSP cruiser crash
|
||||
- Impact[0] NLP[0.0]: Judge issues permanent injunction; West Virginia must allow religious vaccine exemptions under state law
|
||||
- Impact[0] NLP[0.0]: New fee plan asks international visitors to pay fair share to make national parks beautiful again
|
||||
- Impact[0] NLP[0.0]: Deputies searching for suspects after shooting in parking lot at FedEx facility
|
||||
- Impact[0] NLP[0.0]: Don’t let unsafe driving behaviors or theft ruin Black Friday shopping
|
||||
- Impact[50] NLP[0.0]: Fill the truck today: Pineville VFD food drive at Sav A Lot, prize drawing at 5pm
|
||||
- Impact[0] NLP[0.0]: WV students - Win up to $5,000 for college savings with the ‘When I Grow Up’ essay contest
|
||||
- Impact[37] NLP[0.1]: Richmond FBI offers $20k reward for information on Florida woman’s death
|
||||
- Impact[50] NLP[0.0]: Waynesboro man charged with abduction, rape
|
||||
- Impact[10] NLP[0.0]: Morgantown man arrested for sexually abusing 9-year-old daughter in 2004
|
||||
- Impact[50] NLP[0.0]: Feast of Sharing provides warm meal & clothes
|
||||
- Impact[0] NLP[0.0]: USPS JC site moving from East Main to North Roan St.
|
||||
- Impact[7] NLP[0.0]: Preparations underway for Union HS game as search continues
|
||||
- Impact[0] NLP[0.0]: DC shooting suspect worked for CIA in Afghanistan: CIA
|
||||
- Impact[0] NLP[0.0]: Ladies Raising Livestock Conference to be held on Feb. 21
|
||||
- Impact[0] NLP[0.0]: Christmas tree farms open for the season
|
||||
- Impact[0] NLP[0.0]: Kingsport store prepares for the holiday rush
|
||||
- Impact[0] NLP[0.0]: Turkey Trot Thanksgiving a tradition for many families
|
||||
- Impact[0] NLP[0.0]: Haven of Mercy volunteers prepare to feed thousands
|
||||
- Impact[0] NLP[0.33]: Family and friends of double homicide victims speak out
|
||||
- Impact[0] NLP[0.0]: Timeline: The search for fugitive coach Travis Turner
|
||||
- Impact[0] NLP[0.0]: Restaurants prepare to serve on Thanksgiving
|
||||
- Impact[0] NLP[0.0]: VSP: Warrants issued for missing Union football coach
|
||||
- Impact[0] NLP[0.33]: Sheriff: Man found dead in Lee County
|
||||
- Impact[0] NLP[0.27]: Passing clouds, windy and cold tonight
|
||||
- Impact[0] NLP[0.0]: National Guard members shot near WH, in critical condition
|
||||
- Impact[0] NLP[0.0]: Over 5,000 runners compete in 20th annual Turkey Trot
|
||||
- Impact[0] NLP[0.0]: Fern & Fable Collective to open Friday in Jonesborough
|
||||
- Impact[0] NLP[0.0]: Local couple faces raises concerns about local dealership
|
||||
- Impact[0] NLP[0.0]: Manhunt continues for fugitive Southwest Virginia high school football coach
|
||||
- Impact[0] NLP[0.0]: Bristol community mourns after double homicide on Lily Street
|
||||
- Impact[0] NLP[0.0]: Missing Virginia high school coach charged with child pornography, police say
|
||||
- Impact[0] NLP[0.0]: Tennessee launches winter trout stocking with over 70,000 fish at 40 local fishing spots
|
||||
- Impact[0] NLP[0.0]: Spotless giraffe Kipekee passes away at Brights Zoo
|
||||
- Impact[0] NLP[0.0]: Tennessee inmate who killed Chattanooga State student declines to choose execution method
|
||||
- Impact[10] NLP[0.0]: Black Friday planning: What stores are open, closed on Thanksgiving Day
|
||||
- Impact[0] NLP[0.0]: Lee County Sheriff's Office investigating body found in St Charles
|
||||
- Impact[0] NLP[0.0]: Virginia State Police continue search for missing football coach
|
||||
- Impact[0] NLP[0.0]: Bristol, Tennessee police investigating double homicide, victims identified
|
||||
- Impact[0] NLP[0.0]: Ceremony to remember homeless people who died in our region
|
||||
- Impact[0] NLP[0.0]: High School Basketball Scores and Highlights: November 26, 2025
|
||||
- Impact[0] NLP[0.0]: We Salute You: October 2025
|
||||
- Impact[0] NLP[0.0]: Downtown Johnson City bars to host Santacon
|
||||
- Impact[0] NLP[0.0]: UPDATE: Virginia State Police searching for Union head football coach Travis Turner
|
||||
- Impact[0] NLP[0.0]: Scooters Coffee to open new drive-thru in Bristol, Tenn. next month
|
||||
- Impact[8] NLP[0.0]: Virginia State Police warn citizens of scam calls in the area
|
||||
- Impact[0] NLP[0.1]: Bristol, Tenn. homicide victims' loved ones speak out
|
||||
- Impact[0] NLP[0.0]: New $80M resort opens in Pigeon Forge, a big draw for Tennessee vacationers
|
||||
- Impact[0] NLP[0.0]: We Salute You: September 2025
|
||||
- Impact[50] NLP[0.0]: Virginia Tech beats Colorado State to move into the winner’s bracket at Battle 4 Atlantis
|
||||
- Impact[0] NLP[0.0]: Thanksgiving's journey: How it landed on the fourth Thursday of November
|
||||
- Impact[50] NLP[0.0]: Neighbors: Man involved in Carter County standoff may have had mental issues
|
||||
- Impact[0] NLP[0.0]: Tri-Cities shelters prep for Thanksgiving as concerns grow over unhoused population
|
||||
- Impact[0] NLP[0.0]: Hinton’s Ritz Theatre to show free holiday movies leading up to Christmas
|
||||
- Impact[0] NLP[0.0]: Trump: ‘Animal’ who shot National Guard members ‘will pay a very steep price’
|
||||
- Impact[50] NLP[0.0]: Governor Morrisey releases statement after two West Virginia National Guard members reportedly shot in Washington D.C.
|
||||
- Impact[0] NLP[0.0]: 2 National Guard members shot near White House, suspect in custody
|
||||
- Impact[0] NLP[0.0]: Bluefield West Virginia Police Department releases bench warrant list
|
||||
- Impact[0] NLP[0.5]: Dozens dead, nearly 300 missing in Hong Kong high-rise fire
|
||||
- Impact[0] NLP[0.0]: Major Cooldown Tonight as our Showers Exit, Thanksgiving Will Be a Chilly One!
|
||||
- Impact[0] NLP[0.0]: Will mail come this weekend? USPS delivery changes, post office closures planned for Thanksgiving holiday
|
||||
- Impact[0] NLP[0.0]: Which grocery stores are open for Thanksgiving 2025?
|
||||
- Impact[15] NLP[0.33]: Don’t let food poisoning crash your Thanksgiving dinner
|
||||
- Impact[20] NLP[0.25]: Brutal wind chills arrive for Thanksgiving, cold temperatures to continue
|
||||
- Impact[0] NLP[0.0]: Tips on keeping up with your packages this holiday season
|
||||
- Impact[0] NLP[0.0]: Tips on staying ahead of travel delays this holiday season
|
||||
- Impact[0] NLP[0.0]: Thanksgiving Travel projections expect busy week on highways, at airports
|
||||
- Impact[0] NLP[0.0]: Lifeline Church Hinton Youth Group serves up holiday cheer and clean windshields
|
||||
- Impact[50] NLP[0.0]: Beckley psychiatrist gives advice on dealing with family drama during the holidays
|
||||
- Impact[48] NLP[0.5]: Preliminary report released on fatal Rolling Thunder Mine accident
|
||||
- Impact[0] NLP[0.0]: How artificial intelligence is impacting modern healthcare
|
||||
|
||||
Database insertion/update complete. 293 stories were processed and inserted/updated.
|
||||
Cleaning duplicate summaries from the database...
|
||||
Cleaned 14 duplicate entries.
|
||||
Process finished and connection closed.
|
||||
916
news2.html
Normal file
916
news2.html
Normal file
@@ -0,0 +1,916 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
|
||||
<script src="https://code.jquery.com/ui/1.13.1/jquery-ui.js" integrity="sha256-6XMVI0zB8cRzfZjqKcD01PBsAy3FlDASrlC8SxCpInY=" crossorigin="anonymous"></script>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>RLX News</title>
|
||||
<style>
|
||||
/* --- Flexbox Sticky Footer --- */
|
||||
html {
|
||||
height: 100%;
|
||||
}
|
||||
body {
|
||||
min-height: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
html, body {
|
||||
font-family: Arial, sans-serif;
|
||||
background-color: #f4f4f4;
|
||||
padding: 10px;
|
||||
box-sizing: border-box;
|
||||
padding-bottom: 0;
|
||||
}
|
||||
|
||||
#full-display-container, #sad-display-area {
|
||||
flex-grow: 1;
|
||||
}
|
||||
|
||||
.form-container {
|
||||
max-width: 800px;
|
||||
position: relative;
|
||||
padding: 15px;
|
||||
background: white;
|
||||
margin-bottom: 10px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.toggle-btn, button { font-size: 20px; padding: 8px 15px; cursor: pointer; }
|
||||
button { background-color: #4CAF50; color: white; border: none; border-radius: 4px; }
|
||||
button:hover { background-color: #45a049; }
|
||||
.form-group label { font-size: 20px; margin-bottom: 5px; display: block; }
|
||||
input, textarea { width: 100%; padding: 10px; font-size: 18px; box-sizing: border-box; }
|
||||
|
||||
#full-display-container {
|
||||
max-width: 100%;
|
||||
margin: 0 auto;
|
||||
display: grid;
|
||||
grid-template-columns: 1fr 1fr;
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
#sad-display-area {
|
||||
flex-grow: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 10px;
|
||||
min-height: 0;
|
||||
}
|
||||
|
||||
#top-stories-container, #bottom-stories-container {
|
||||
flex-grow: 1;
|
||||
overflow: hidden;
|
||||
min-height: 0;
|
||||
}
|
||||
#top-stories-container { flex-basis: 50%; flex-shrink: 0; }
|
||||
#bottom-stories-container { background: #e9e9e9; padding: 5px; border-radius: 8px; }
|
||||
|
||||
.scroller-inner {
|
||||
animation-name: continuous-scroll;
|
||||
animation-timing-function: linear;
|
||||
animation-iteration-count: infinite;
|
||||
}
|
||||
|
||||
.content-block {
|
||||
display: grid;
|
||||
grid-template-columns: 1fr 1fr;
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
@keyframes continuous-scroll {
|
||||
0% {
|
||||
transform: translateY(0);
|
||||
}
|
||||
100% {
|
||||
transform: translateY(-50%);
|
||||
}
|
||||
}
|
||||
|
||||
.news-item { background: white; padding: 5px; border-radius: 8px; box-shadow: 0 2px 4px rgba(0,0,0,0.1); display: flex; align-items: center; }
|
||||
.news-item img { width: 150px; height: 100px; object-fit: cover; border-radius: 6px; margin-right: 10px; }
|
||||
.news-content { flex: 2; }
|
||||
.headline { color: #333; font-size: 36px; margin: 0 0 5px 0; line-height: 1.0; font-weight: bold; }
|
||||
.summary { color: #666; font-size: 28px; margin: 0 0 5px 0; line-height: 1.0; display: -webkit-box; -webkit-line-clamp: 4; -webkit-box-orient: vertical; line-clamp: 4; }
|
||||
.storylink { color: #007BFF; text-decoration: none; }
|
||||
.storylink:hover { text-decoration: underline; }
|
||||
|
||||
.relevance-high { background-color: lightblue; }
|
||||
.relevance-really-high { background-color: cyan; }
|
||||
.relevance-super-high { background-color: yellow; }
|
||||
.relevance-mazza-high { background-color: orange; }
|
||||
.relevance-cheech-high { background-color: #FF8790; }
|
||||
|
||||
@keyframes flashRedOutline {
|
||||
0% { outline: 7px solid red; }
|
||||
50% { outline: 7px solid transparent; }
|
||||
100% { outline: 10px solid red; }
|
||||
}
|
||||
.new-story-flash {
|
||||
animation: flashRedOutline 2s linear infinite;
|
||||
border-radius: 8px; /* Match the news-item border-radius */
|
||||
}
|
||||
|
||||
#ticker-container {
|
||||
width: 100%;
|
||||
background-color: black;
|
||||
overflow: hidden;
|
||||
padding: 5px 0;
|
||||
box-sizing: border-box;
|
||||
z-index: 1000;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
#ticker-content {
|
||||
display: inline-block;
|
||||
white-space: nowrap;
|
||||
/* animation removed, now handled by JS */
|
||||
}
|
||||
|
||||
#ticker-content > span {
|
||||
margin: 0 20px;
|
||||
font-size: 30px;
|
||||
}
|
||||
|
||||
.ticker-year { color: lightgray; }
|
||||
.ticker-event { color: white; }
|
||||
.ticker-report { color: lightblue; }
|
||||
.ticker-wikimedia { color: lightpink; } /* Style for Wikimedia events */
|
||||
.ticker-holiday { color: lightgreen; } /* Style for holiday events */
|
||||
.ticker-upcoming { color: cyan; } /* Style for upcoming events */
|
||||
|
||||
@media (max-width: 768px) {
|
||||
#full-display-container {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
.content-block {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div class="form-container" id="form-container">
|
||||
<button class="toggle-btn" onclick="toggleForm();">Expand News Search Dialog</button>
|
||||
<div id="searchForm" style="display: none;">
|
||||
<div class="form-group"><label for="startTime">Start Time (Zulu/UTC):</label><input type="datetime-local" id="startTime" name="startTime" required></div>
|
||||
<div class="form-group"><label for="endTime">End Time (Zulu/UTC):</label><input type="datetime-local" id="endTime" name="endTime" required></div>
|
||||
<div class="form-group"><label for="keyTerms">Key Terms (comma-separated):</label><textarea id="keyTerms" name="keyTerms" rows="3" required>weather,flood,fire,fog,snow,emergency,wind,ice,rain,power,explosion,warmer,colder,drown,stream,river,air,wind,destroyed,rime,glaze,river,ice,creek,crash,thunder,spinup,black ice,fog,spill,pileup,pile-up,gust,frozen,funnel,rainfall,fatal,injury,sleet,injured,frost,dead,death,landslide,culvert,slippery,wildfire,tornado,blizzard,creek,hail,thunderstorm,downburst,microburst,crash,heatstroke,derecho,lightning,hypothermia,slide,flow,ski,water,innundation,victim,victims,flooding,flooded,snowing,freezing rain,clouds,cloud,storm,aircraft</textarea></div>
|
||||
<button onclick="updatenews();">Submit</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="full-display-container"></div>
|
||||
|
||||
<div class="display-area" id="sad-display-area" style="display: none;">
|
||||
<div id="top-stories-container"></div>
|
||||
<div id="bottom-stories-container"></div>
|
||||
</div>
|
||||
|
||||
<div id="ticker-container" style="display: none;">
|
||||
<div id="ticker-content"></div>
|
||||
</div>
|
||||
<script>
|
||||
let lastTickerData = null;
|
||||
let isSadMode = false;
|
||||
var refreshTimer;
|
||||
var tickerWatchdogTimer;
|
||||
const NUM_TOP_STORIES = 8;
|
||||
const BOTTOM_SCROLLER_SPEED_MULTIPLIER = 2;
|
||||
window.currentWikimediaEvents = []; // Initialize the global variable
|
||||
let tickerCycleCount = 0;
|
||||
let upcomingHolidays = [];
|
||||
let triviaQuestions = [];
|
||||
let lastTriviaFetchTime = 0;
|
||||
|
||||
// Always fetch fresh Wikimedia events
|
||||
// No need to cache them between cycles
|
||||
|
||||
const TARGET_BROADCAST_SECONDS = 150;
|
||||
const PIXELS_PER_SECOND_SPEED = 150; // Adjust this value to control scroll speed. Higher is faster.
|
||||
let lastTickerUpdateTime = Date.now();
|
||||
let lastNewsData = null;
|
||||
let currentNewsUrl = 'https://wx.stoat.org/lsr.php?news3=potato';
|
||||
|
||||
let nextTickerHtml = null;
|
||||
let animationId = null;
|
||||
let tickerPosition = 0;
|
||||
let lastTime = 0;
|
||||
let tickerContent = document.getElementById('ticker-content');
|
||||
let injectionHtml = null;
|
||||
|
||||
function animateTicker(currentTime) {
|
||||
if (!lastTime) lastTime = currentTime;
|
||||
const deltaTime = (currentTime - lastTime) / 1000;
|
||||
lastTime = currentTime;
|
||||
tickerPosition -= PIXELS_PER_SECOND_SPEED * deltaTime;
|
||||
const scrollWidth = tickerContent.scrollWidth;
|
||||
if (tickerPosition <= -scrollWidth) {
|
||||
updateTickerContent();
|
||||
tickerPosition = 0;
|
||||
}
|
||||
tickerContent.style.transform = `translateX(${tickerPosition}px)`;
|
||||
animationId = requestAnimationFrame(animateTicker);
|
||||
}
|
||||
|
||||
function updateTickerContent() {
|
||||
if (nextTickerHtml) {
|
||||
tickerContent.innerHTML = nextTickerHtml;
|
||||
const containerWidth = document.getElementById('ticker-container').clientWidth;
|
||||
tickerPosition = -containerWidth;
|
||||
nextTickerHtml = null;
|
||||
// Fetch new
|
||||
fetchAndDisplayTickerData(false);
|
||||
}
|
||||
}
|
||||
|
||||
function fetchAndDisplayNews(url = 'https://wx.stoat.org/lsr.php?news3=potato') {
|
||||
// Update the current URL if provided
|
||||
if (url) {
|
||||
currentNewsUrl = url;
|
||||
}
|
||||
|
||||
$.getJSON(currentNewsUrl, function(newsData) {
|
||||
// Sort the data
|
||||
newsData.sort((a, b) => {
|
||||
if (b.impact_score !== a.impact_score) return b.impact_score - a.impact_score;
|
||||
return new Date(b.timeutc) - new Date(a.timeutc);
|
||||
});
|
||||
|
||||
// Check if data has changed
|
||||
if (lastNewsData && JSON.stringify(newsData) === JSON.stringify(lastNewsData)) {
|
||||
console.log('News data unchanged, skipping update');
|
||||
return;
|
||||
}
|
||||
|
||||
// Update cache
|
||||
lastNewsData = newsData;
|
||||
|
||||
if (isSadMode) {
|
||||
const topContainer = document.getElementById('top-stories-container');
|
||||
const bottomContainer = document.getElementById('bottom-stories-container');
|
||||
topContainer.innerHTML = '';
|
||||
bottomContainer.innerHTML = '';
|
||||
|
||||
const topStories = newsData.slice(0, NUM_TOP_STORIES);
|
||||
const scrollingStories = newsData.slice(NUM_TOP_STORIES);
|
||||
|
||||
function createScroller(stories, durationMultiplier, isBottomScroller = false) {
|
||||
if (stories.length === 0) return null;
|
||||
const scrollerInner = document.createElement('div');
|
||||
scrollerInner.className = 'scroller-inner';
|
||||
const contentBlock1 = document.createElement('div');
|
||||
contentBlock1.className = 'content-block';
|
||||
stories.forEach(news => contentBlock1.appendChild(createNewsItem(news)));
|
||||
const contentBlock2 = contentBlock1.cloneNode(true);
|
||||
contentBlock2.setAttribute('aria-hidden', 'true');
|
||||
scrollerInner.appendChild(contentBlock1);
|
||||
scrollerInner.appendChild(contentBlock2);
|
||||
const duration = stories.length * durationMultiplier;
|
||||
scrollerInner.style.animationName = 'continuous-scroll';
|
||||
scrollerInner.style.animationDuration = `${duration}s`;
|
||||
// Ensure no delay is applied to any scroller
|
||||
scrollerInner.style.animationDelay = '0s';
|
||||
return scrollerInner;
|
||||
}
|
||||
|
||||
const topScroller = createScroller(topStories, 7, false);
|
||||
if (topScroller) topContainer.appendChild(topScroller);
|
||||
|
||||
const bottomScroller = createScroller(scrollingStories, BOTTOM_SCROLLER_SPEED_MULTIPLIER, true);
|
||||
if (bottomScroller) bottomContainer.appendChild(bottomScroller);
|
||||
|
||||
} else {
|
||||
const fullContainer = document.getElementById('full-display-container');
|
||||
fullContainer.innerHTML = '';
|
||||
newsData.forEach(news => fullContainer.appendChild(createNewsItem(news)));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function createNewsItem(news) {
|
||||
const newsItem = document.createElement('div');
|
||||
newsItem.className = 'news-item';
|
||||
let score = news.impact_score;
|
||||
const storyTime = new Date(news.timeutc);
|
||||
const currentTime = new Date();
|
||||
const oneHourInMs = 3600000;
|
||||
|
||||
// Add flashRedOutline class if the story is less than 1 hour old
|
||||
if (currentTime - storyTime < oneHourInMs) {
|
||||
newsItem.classList.add('new-story-flash');
|
||||
}
|
||||
|
||||
const relevanceClasses = {
|
||||
'relevance-high': score > 15,
|
||||
'relevance-really-high': score > 25,
|
||||
'relevance-super-high': score > 50,
|
||||
'relevance-mazza-high': score > 90,
|
||||
'relevance-cheech-high': score > 150
|
||||
};
|
||||
Object.entries(relevanceClasses).filter(([, c]) => c).forEach(([cN]) => newsItem.classList.add(cN));
|
||||
|
||||
newsItem.innerHTML = `<a href="${news.storylink}" target="_blank"><img src="${news.imageurl}"></a><div class="news-content"><h2 class="headline"><a href="${news.storylink}" target="_blank" class="storylink">(${extractTextBetweenHttpAndCom(news.storylink)}) ${news.headline}</a></h2><p class="summary">${news.summary} ${convertPostgresTimestamp(news.timeutc)}L</p></div>`;
|
||||
return newsItem;
|
||||
}
|
||||
|
||||
function extractTextBetweenHttpAndCom(url) {
|
||||
url = url.replace(/www\./, '');
|
||||
const match = url.match(/https?:\/\/(.*?)\.com/);
|
||||
return match && match[1] ? match[1].toUpperCase() : 'FAUXNEWS';
|
||||
}
|
||||
|
||||
function convertPostgresTimestamp(timestamp) {
|
||||
const d = new Date(timestamp.replace('Z', ''));
|
||||
return `${d.getFullYear()}-${String(d.getMonth()+1).padStart(2,'0')}-${String(d.getDate()).padStart(2,'0')} ${String(d.getHours()).padStart(2,'0')}:${String(d.getMinutes()).padStart(2,'0')}`;
|
||||
}
|
||||
|
||||
function toggleForm() {
|
||||
const formContent = document.getElementById("searchForm");
|
||||
const toggleBtn = document.querySelector(".toggle-btn");
|
||||
if (formContent.style.display === "none") {
|
||||
formContent.style.display = "block";
|
||||
toggleBtn.textContent = "Collapse News Search Dialog";
|
||||
if (refreshTimer) clearInterval(refreshTimer);
|
||||
// Stop ticker when form is expanded (not in SAD mode)
|
||||
if (animationId) {
|
||||
cancelAnimationFrame(animationId);
|
||||
animationId = null;
|
||||
}
|
||||
if (tickerWatchdogTimer) {
|
||||
clearInterval(tickerWatchdogTimer);
|
||||
tickerWatchdogTimer = null;
|
||||
}
|
||||
// Hide ticker when form is expanded
|
||||
updateTickerVisibility();
|
||||
} else {
|
||||
formContent.style.display = "none";
|
||||
toggleBtn.textContent = "Expand News Search Dialog";
|
||||
// Always use the current URL
|
||||
fetchAndDisplayNews(currentNewsUrl);
|
||||
refreshTimer = setInterval(() => {
|
||||
fetchAndDisplayNews(currentNewsUrl);
|
||||
}, 300000);
|
||||
// Update ticker visibility based on mode
|
||||
updateTickerVisibility();
|
||||
}
|
||||
}
|
||||
|
||||
function completelyHide() { document.getElementById("form-container").style.display = "none"; }
|
||||
|
||||
function updatenews() {
|
||||
const start = document.getElementById("startTime").value;
|
||||
const end = document.getElementById("endTime").value;
|
||||
const keyTerms = document.getElementById("keyTerms").value;
|
||||
const terms = keyTerms.split(',');
|
||||
let arrayterms = terms.map(term => `key[]=${encodeURIComponent(term)}`).join('&');
|
||||
url = `lsr.php?newsarchive=true&start=${start}&end=${end}&${arrayterms}`;
|
||||
// Clear the cache to force an update
|
||||
lastNewsData = null;
|
||||
fetchAndDisplayNews(url);
|
||||
}
|
||||
|
||||
function sadCheck() {
|
||||
const params = new URLSearchParams(document.location.search);
|
||||
if (params.has("sad") || params.has("SAD")) {
|
||||
isSadMode = true;
|
||||
completelyHide();
|
||||
Object.assign(document.documentElement.style, {height: '100%'});
|
||||
Object.assign(document.body.style, {height: '100%', overflow: 'hidden', display: 'flex', flexDirection: 'column'});
|
||||
document.getElementById('sad-display-area').style.display = 'flex';
|
||||
document.getElementById('full-display-container').style.display = 'none';
|
||||
// Update ticker visibility and start when entering SAD mode
|
||||
updateTickerVisibility();
|
||||
fetchAndDisplayTickerData(true);
|
||||
startTickerWatchdog();
|
||||
}
|
||||
}
|
||||
|
||||
function format_date_with_ordinal(date) {
|
||||
const day = date.getDate();
|
||||
const month = date.toLocaleString('default', { month: 'long' });
|
||||
const get_ordinal_suffix = (day) => {
|
||||
if (day > 3 && day < 21) return 'th';
|
||||
switch (day % 10) {
|
||||
case 1: return "st";
|
||||
case 2: return "nd";
|
||||
case 3: return "rd";
|
||||
default: return "th";
|
||||
}
|
||||
};
|
||||
const suffix = get_ordinal_suffix(day);
|
||||
return `${month} ${day}${suffix}`;
|
||||
}
|
||||
|
||||
function fetchWikimediaEvents() {
|
||||
const now = new Date();
|
||||
|
||||
// Get the individual components
|
||||
const year = now.getFullYear();
|
||||
const month = String(now.getMonth() + 1).padStart(2, '0'); // Months are 0-indexed, so add 1
|
||||
const day = String(now.getDate()).padStart(2, '0');
|
||||
const hours = String(now.getHours()).padStart(2, '0');
|
||||
|
||||
// Concatenate them into the final string
|
||||
const formattedDate = `${year}${month}${day}${hours}`;
|
||||
|
||||
// Format today's date as YYYY-MM-DD to match the API response keys
|
||||
const todayFormatted = `${year}-${month}-${day}`;
|
||||
|
||||
const url = 'https://wx.stoat.org/calendar/wikimedia_onthisday.json';
|
||||
const cacheBustingUrl = url; //`${url}?v=${formattedDate}`;
|
||||
|
||||
return $.getJSON(cacheBustingUrl)
|
||||
.done(function(data) {
|
||||
if (data && typeof data === 'object') {
|
||||
// Collect events only from today
|
||||
let allEvents = [];
|
||||
|
||||
// Add today's events
|
||||
if (data[todayFormatted] && data[todayFormatted].events && Array.isArray(data[todayFormatted].events)) {
|
||||
allEvents = allEvents.concat(data[todayFormatted].events);
|
||||
}
|
||||
|
||||
if (allEvents.length > 0) {
|
||||
// Always shuffle and use all events
|
||||
window.currentWikimediaEvents = [...allEvents].sort(() => 0.5 - Math.random());
|
||||
} else {
|
||||
console.warn("No Wikimedia events found for today.");
|
||||
window.currentWikimediaEvents = [];
|
||||
}
|
||||
} else {
|
||||
console.warn("Wikimedia JSON is empty or invalid.");
|
||||
window.currentWikimediaEvents = [];
|
||||
}
|
||||
})
|
||||
.fail(function(jqXHR, textStatus, errorThrown) {
|
||||
console.error(`Failed to load from ${cacheBustingUrl}. Status: ${textStatus}, Error: ${errorThrown}`);
|
||||
window.currentWikimediaEvents = [];
|
||||
});
|
||||
}
|
||||
|
||||
function fetchHolidays() {
|
||||
const now = new Date();
|
||||
|
||||
// Format today's date as YYYY-MM-DD to match the API response keys
|
||||
const year = now.getFullYear();
|
||||
const month = String(now.getMonth() + 1).padStart(2, '0');
|
||||
const day = String(now.getDate()).padStart(2, '0');
|
||||
const todayFormatted = `${year}-${month}-${day}`;
|
||||
|
||||
// Get the individual components for cache busting
|
||||
const hours = String(now.getHours()).padStart(2, '0');
|
||||
const formattedDate = `${year}${month}${day}${hours}`;
|
||||
|
||||
const url = 'https://calendar.wx4rlx.org/get_holidays.py';
|
||||
const cacheBustingUrl = `${url}?time=${formattedDate}`;
|
||||
|
||||
return $.getJSON(cacheBustingUrl)
|
||||
.done(function(data) {
|
||||
if (data && data.holidays) {
|
||||
// Get holidays for today using the formatted date as the key
|
||||
window.currentHolidays = data.holidays[todayFormatted] || [];
|
||||
|
||||
// Store upcoming holidays for the next 9 days
|
||||
upcomingHolidays = [];
|
||||
for (let i = 1; i <= 9; i++) {
|
||||
const nextDate = new Date(now);
|
||||
nextDate.setDate(now.getDate() + i);
|
||||
const nextYear = nextDate.getFullYear();
|
||||
const nextMonth = String(nextDate.getMonth() + 1).padStart(2, '0');
|
||||
const nextDay = String(nextDate.getDate()).padStart(2, '0');
|
||||
const nextFormatted = `${nextYear}-${nextMonth}-${nextDay}`;
|
||||
|
||||
const nextHolidays = data.holidays[nextFormatted] || [];
|
||||
if (nextHolidays.length > 0) {
|
||||
upcomingHolidays.push({
|
||||
date: nextDate,
|
||||
holidays: nextHolidays
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
console.warn("Holidays JSON is empty, invalid, or does not contain 'holidays'.");
|
||||
window.currentHolidays = [];
|
||||
upcomingHolidays = [];
|
||||
}
|
||||
})
|
||||
.fail(function(jqXHR, textStatus, errorThrown) {
|
||||
console.error(`Failed to load holidays from ${cacheBustingUrl}. Status: ${textStatus}, Error: ${errorThrown}`);
|
||||
window.currentHolidays = [];
|
||||
upcomingHolidays = [];
|
||||
});
|
||||
}
|
||||
|
||||
function fetchTriviaQuestions() {
|
||||
const now = Date.now();
|
||||
// Check if we need to fetch new questions and respect the 5-second rate limit
|
||||
if (triviaQuestions.length > 10 || now - lastTriviaFetchTime < 5000) {
|
||||
return $.Deferred().resolve().promise();
|
||||
}
|
||||
|
||||
lastTriviaFetchTime = now;
|
||||
const url = 'https://opentdb.com/api.php?amount=50&type=multiple';
|
||||
return $.getJSON(url)
|
||||
.done(function(data) {
|
||||
if (data.response_code === 0 && data.results) {
|
||||
triviaQuestions = data.results;
|
||||
console.log(`Fetched ${triviaQuestions.length} trivia questions`);
|
||||
} else {
|
||||
console.warn('Trivia API returned non-zero response code or no results');
|
||||
triviaQuestions = [];
|
||||
}
|
||||
})
|
||||
.fail(function(jqXHR, textStatus, errorThrown) {
|
||||
console.error(`Failed to fetch trivia questions: ${textStatus}, ${errorThrown}`);
|
||||
triviaQuestions = [];
|
||||
});
|
||||
}
|
||||
|
||||
function fetchInjection() {
|
||||
const injectionApiUrl = 'https://calendar.wx4rlx.org/onetime.py?action=api';
|
||||
return $.ajax({
|
||||
url: injectionApiUrl,
|
||||
dataType: 'text' // Treat the response as plain text
|
||||
})
|
||||
.done(function(data) {
|
||||
if (data && data.trim().length > 0) {
|
||||
injectionHtml = data;
|
||||
console.log('Injection content fetched:', injectionHtml);
|
||||
} else {
|
||||
injectionHtml = null;
|
||||
}
|
||||
})
|
||||
.fail(function() {
|
||||
console.log('No injection content available or error fetching');
|
||||
injectionHtml = null;
|
||||
});
|
||||
}
|
||||
|
||||
function fetchAndDisplayTickerData(startImmediately = true) {
|
||||
// First, fetch the injection content
|
||||
fetchInjection().always(function() {
|
||||
// Then fetch other data
|
||||
$.when(fetchWikimediaEvents(), fetchHolidays(), fetchTriviaQuestions()).always(function() {
|
||||
const tickerApiUrl = 'https://calendar.wx4rlx.org/?action=api';
|
||||
$.getJSON(tickerApiUrl, function(data) {
|
||||
if (data.status !== 'success') return;
|
||||
updateTickerLastUpdateTime();
|
||||
|
||||
// Always update the ticker with fresh data
|
||||
const today = new Date();
|
||||
const currentYear = today.getFullYear();
|
||||
const formatted_date = format_date_with_ordinal(today);
|
||||
const tickerContent = $('#ticker-content');
|
||||
|
||||
let localItems = [];
|
||||
data.events.forEach(item => localItems.push({ date: item.date, text: item.event, type: 'event' }));
|
||||
data.weather_reports.forEach(item => localItems.push({ date: item.date, text: item.report, type: 'report' }));
|
||||
// Add xmacis records
|
||||
if (data.xmacis_records) {
|
||||
data.xmacis_records.forEach(item => {
|
||||
// Extract year from the date field
|
||||
const year = parseInt(item.date.split('-')[0]);
|
||||
localItems.push({
|
||||
date: item.date,
|
||||
text: item.description,
|
||||
type: 'report', // Use 'report' type to match weather_reports color scheme
|
||||
year: year
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Increment cycle count
|
||||
tickerCycleCount++;
|
||||
|
||||
// Add upcoming events based on cycle count
|
||||
let upcomingEventItem = null;
|
||||
if (upcomingHolidays.length > 0) {
|
||||
// Every 10 cycles takes priority over every other cycle
|
||||
if (tickerCycleCount % 10 === 0) {
|
||||
const nextFiveDays = upcomingHolidays.slice(0, 5);
|
||||
let upcomingTexts = [];
|
||||
nextFiveDays.forEach(day => {
|
||||
const formattedDay = format_date_with_ordinal(day.date);
|
||||
upcomingTexts.push(`${formattedDay} - ${day.holidays.join(', ')}`);
|
||||
});
|
||||
if (upcomingTexts.length > 0) {
|
||||
upcomingEventItem = {
|
||||
date: today.toISOString().split('T')[0],
|
||||
text: 'Upcoming Special Days: ' + upcomingTexts.join('; '),
|
||||
type: 'upcoming',
|
||||
year: 'Upcoming'
|
||||
};
|
||||
}
|
||||
}
|
||||
// Only show tomorrow's events if it's an even cycle AND not a multiple of 10
|
||||
else if (tickerCycleCount % 2 === 0) {
|
||||
const tomorrow = upcomingHolidays[0];
|
||||
if (tomorrow) {
|
||||
upcomingEventItem = {
|
||||
date: tomorrow.date.toISOString().split('T')[0],
|
||||
text: 'Upcoming Special Days: Tomorrow - ' + tomorrow.holidays.join(', '),
|
||||
type: 'upcoming',
|
||||
year: 'Tomorrow'
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add today's holidays to local items
|
||||
if (window.currentHolidays && window.currentHolidays.length > 0) {
|
||||
localItems.push({
|
||||
date: today.toISOString().split('T')[0],
|
||||
text: 'Special Days: ' + window.currentHolidays.join(', '),
|
||||
type: 'holiday',
|
||||
year: 'Today'
|
||||
});
|
||||
}
|
||||
|
||||
// Add injection HTML at the beginning if available
|
||||
// Use the injectionHtml that was fetched at the start of this function
|
||||
if (injectionHtml) {
|
||||
localItems.unshift({
|
||||
date: today.toISOString().split('T')[0],
|
||||
text: injectionHtml,
|
||||
type: 'injection',
|
||||
year: 'INJECTION'
|
||||
});
|
||||
}
|
||||
|
||||
// Add upcoming event at the very end if it exists
|
||||
if (upcomingEventItem) {
|
||||
localItems.push(upcomingEventItem);
|
||||
}
|
||||
|
||||
// Sort items by year
|
||||
localItems.sort((a, b) => {
|
||||
const getYear = (item) => {
|
||||
if (item.year !== undefined) {
|
||||
// Handle 'INJECTION', 'Today', 'Tomorrow', 'Upcoming' etc.
|
||||
if (isNaN(Number(item.year))) return Infinity;
|
||||
return Number(item.year);
|
||||
}
|
||||
return Number(item.date.split('-')[0]);
|
||||
};
|
||||
|
||||
const yearA = getYear(a);
|
||||
const yearB = getYear(b);
|
||||
|
||||
if (isNaN(yearA) && isNaN(yearB)) return 0;
|
||||
if (isNaN(yearA)) return 1;
|
||||
if (isNaN(yearB)) return -1;
|
||||
|
||||
return yearA - yearB;
|
||||
});
|
||||
|
||||
// Calculate duration of local items
|
||||
const tempLocalHtml = buildTickerHtml(localItems, currentYear);
|
||||
tickerContent.html(tempLocalHtml);
|
||||
const localWidth = tickerContent[0].scrollWidth;
|
||||
const localDuration = localWidth / PIXELS_PER_SECOND_SPEED;
|
||||
|
||||
// Determine number of Wikimedia items to add
|
||||
let numToSprinkle = 0;
|
||||
if (window.currentWikimediaEvents && window.currentWikimediaEvents.length > 0) {
|
||||
// Always include at least one item
|
||||
numToSprinkle = 1;
|
||||
|
||||
// Calculate average width per Wikimedia item using the first few events
|
||||
const sampleEvents = window.currentWikimediaEvents.slice(0, Math.min(5, window.currentWikimediaEvents.length));
|
||||
let totalWidth = 0;
|
||||
sampleEvents.forEach(event => {
|
||||
const tempWikiItem = { date: `${event.year}-01-01`, text: event.text, type: 'wikimedia', year: event.year };
|
||||
tickerContent.html(buildTickerHtml([tempWikiItem], currentYear));
|
||||
totalWidth += tickerContent[0].scrollWidth;
|
||||
});
|
||||
|
||||
const avgWikiWidth = totalWidth / sampleEvents.length;
|
||||
const timePerWikiItem = avgWikiWidth / PIXELS_PER_SECOND_SPEED;
|
||||
|
||||
// Add more items if there's time
|
||||
const durationGap = TARGET_BROADCAST_SECONDS - localDuration;
|
||||
if (timePerWikiItem > 0 && durationGap > timePerWikiItem) {
|
||||
const additionalItems = Math.floor((durationGap - timePerWikiItem) / timePerWikiItem);
|
||||
numToSprinkle += Math.max(0, additionalItems);
|
||||
}
|
||||
numToSprinkle = Math.min(numToSprinkle, window.currentWikimediaEvents.length);
|
||||
}
|
||||
|
||||
// Add Wikimedia items to local items
|
||||
if (numToSprinkle > 0 && window.currentWikimediaEvents && window.currentWikimediaEvents.length > 0) {
|
||||
const eventsToAdd = window.currentWikimediaEvents.slice(0, numToSprinkle);
|
||||
eventsToAdd.forEach(event => {
|
||||
localItems.push({ date: `${event.year}-01-01`, text: event.text, type: 'wikimedia', year: event.year });
|
||||
});
|
||||
|
||||
// Re-sort with the new items
|
||||
localItems.sort((a, b) => {
|
||||
const getYear = (item) => {
|
||||
if (item.year !== undefined) {
|
||||
if (isNaN(Number(item.year))) return Infinity;
|
||||
return Number(item.year);
|
||||
}
|
||||
return Number(item.date.split('-')[0]);
|
||||
};
|
||||
|
||||
const yearA = getYear(a);
|
||||
const yearB = getYear(b);
|
||||
|
||||
if (isNaN(yearA) && isNaN(yearB)) return 0;
|
||||
if (isNaN(yearA)) return 1;
|
||||
if (isNaN(yearB)) return -1;
|
||||
|
||||
return yearA - yearB;
|
||||
});
|
||||
}
|
||||
|
||||
const finalContentHtml = buildTickerHtml(localItems, currentYear, formatted_date);
|
||||
|
||||
// Set the content
|
||||
tickerContent.html(finalContentHtml);
|
||||
nextTickerHtml = finalContentHtml; // For next update
|
||||
|
||||
if (startImmediately) {
|
||||
if (!animationId) {
|
||||
animationId = requestAnimationFrame(animateTicker);
|
||||
}
|
||||
// Clear injectionHtml after using it to prevent reuse in next cycle
|
||||
injectionHtml = null;
|
||||
// Fetch new for next cycle, which will fetch a new injection
|
||||
fetchAndDisplayTickerData(false);
|
||||
}
|
||||
|
||||
}).fail(function() {
|
||||
console.error("Failed to fetch data for the horizontal ticker.");
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function buildTickerHtml(items, currentYear, formatted_date) {
|
||||
let contentHtml = `<span style="display: inline-block; width: 100vw;"></span>`;
|
||||
|
||||
// First, add injection items at the very beginning
|
||||
const injectionItems = items.filter(item => item.type === 'injection');
|
||||
injectionItems.forEach(item => {
|
||||
contentHtml += `<span>${item.text}</span>`;
|
||||
});
|
||||
|
||||
// Add the "On This Day" header
|
||||
if (formatted_date) {
|
||||
contentHtml += `<span><span class="ticker-event">On This Day, ${formatted_date}:</span></span>`;
|
||||
}
|
||||
|
||||
// Add all other items (excluding injection items which we've already added)
|
||||
items.filter(item => item.type !== 'injection').forEach(item => {
|
||||
const year = item.year || parseInt(item.date.split('-')[0]);
|
||||
let textClass = `ticker-${item.type}`;
|
||||
// Add a specific class for holidays and upcoming events
|
||||
if (item.type === 'holiday') {
|
||||
textClass = 'ticker-holiday';
|
||||
} else if (item.type === 'upcoming') {
|
||||
textClass = 'ticker-upcoming';
|
||||
}
|
||||
const yearDiff = currentYear - year;
|
||||
let anniversaryPrefix = '';
|
||||
// Only show anniversary for positive year differences (past events)
|
||||
// Skip for holiday type
|
||||
if (item.type !== 'holiday' && yearDiff > 0 && yearDiff % 5 === 0) {
|
||||
anniversaryPrefix = `<span style="color: yellow; font-weight: bold;">${yearDiff} Years Ago: </span>`;
|
||||
}
|
||||
let itemText = item.text;
|
||||
let yearText = year;
|
||||
const arbitraryLength = 500;
|
||||
if (item.text.length > arbitraryLength) {
|
||||
const mazzaImgTag = '<img src="mazza.png" alt="Mazza" style="height: 1.2em; vertical-align: middle; margin: 0 0.3em;">';
|
||||
const imageCount = Math.floor((item.text.length - arbitraryLength) / 200);
|
||||
const imageTags = mazzaImgTag.repeat(imageCount);
|
||||
yearText = imageTags ? `${imageTags} ${year}` : year;
|
||||
}
|
||||
// For holidays and upcoming events, don't show the year prefix
|
||||
if (item.type === 'holiday' || item.type === 'upcoming') {
|
||||
contentHtml += `<span><span class="${textClass}">${itemText}</span></span>`;
|
||||
} else {
|
||||
contentHtml += `<span>${anniversaryPrefix}<span class="ticker-year">${yearText}:</span> <span class="${textClass}">${itemText}</span></span>`;
|
||||
}
|
||||
});
|
||||
|
||||
if (formatted_date) {
|
||||
contentHtml += `<span><span class="ticker-event">Office/Local Event</span></span>`;
|
||||
contentHtml += `<span><span class="ticker-wikimedia">World Event</span></span>`;
|
||||
contentHtml += `<span><span class="ticker-report">Local Weather Event</span></span>`;
|
||||
|
||||
// Add trivia question if available
|
||||
if (triviaQuestions.length > 0) {
|
||||
const trivia = triviaQuestions.shift();
|
||||
// Decode HTML entities in question and answers
|
||||
const question = $('<div>').html(trivia.question).text();
|
||||
const correctAnswer = $('<div>').html(trivia.correct_answer).text();
|
||||
const allAnswers = [correctAnswer, ...trivia.incorrect_answers.map(ans => $('<div>').html(ans).text())];
|
||||
|
||||
// Shuffle answers
|
||||
for (let i = allAnswers.length - 1; i > 0; i--) {
|
||||
const j = Math.floor(Math.random() * (i + 1));
|
||||
[allAnswers[i], allAnswers[j]] = [allAnswers[j], allAnswers[i]];
|
||||
}
|
||||
|
||||
// Build question with choices
|
||||
let questionHtml = `<span style="color: gold;">TRIVIA: ${question} `;
|
||||
const choices = ['A', 'B', 'C', 'D'];
|
||||
allAnswers.forEach((answer, index) => {
|
||||
questionHtml += `${choices[index]}) ${answer} `;
|
||||
});
|
||||
questionHtml += `</span>`;
|
||||
contentHtml += `<span>${questionHtml}</span>`;
|
||||
|
||||
// Store the correct answer for later display
|
||||
// Find which choice corresponds to the correct answer
|
||||
const correctIndex = allAnswers.indexOf(correctAnswer);
|
||||
const correctChoice = choices[correctIndex];
|
||||
window.lastTriviaAnswer = { correctChoice, correctAnswer };
|
||||
}
|
||||
|
||||
contentHtml += `<span><span class="ticker-event">Visit <b>calendar.wx4rlx.org</b> to make updates or see info for upcoming days!</span></span>`;
|
||||
|
||||
// Add trivia answer if available
|
||||
if (window.lastTriviaAnswer) {
|
||||
const { correctChoice, correctAnswer } = window.lastTriviaAnswer;
|
||||
contentHtml += `<span><span style="color: gold;">ANSWER: ${correctChoice}) ${correctAnswer}</span></span>`;
|
||||
// Clear the answer after displaying it
|
||||
window.lastTriviaAnswer = null;
|
||||
}
|
||||
}
|
||||
return contentHtml;
|
||||
}
|
||||
|
||||
function startTickerWatchdog() {
|
||||
// Clear any existing watchdog
|
||||
if (tickerWatchdogTimer) {
|
||||
clearInterval(tickerWatchdogTimer);
|
||||
}
|
||||
// Check every 30 seconds if the ticker hasn't updated in 3x the expected duration
|
||||
tickerWatchdogTimer = setInterval(() => {
|
||||
const timeSinceLastUpdate = Date.now() - lastTickerUpdateTime;
|
||||
const maxAllowedTime = (TARGET_BROADCAST_SECONDS + 5) * 3 * 1000; // 3x expected duration in ms
|
||||
if (timeSinceLastUpdate > maxAllowedTime) {
|
||||
console.warn('Ticker watchdog triggered - forcing refresh');
|
||||
fetchAndDisplayTickerData(true);
|
||||
}
|
||||
}, 30000);
|
||||
}
|
||||
|
||||
function updateTickerLastUpdateTime() {
|
||||
lastTickerUpdateTime = Date.now();
|
||||
}
|
||||
|
||||
function updateTickerVisibility() {
|
||||
const tickerContainer = document.getElementById('ticker-container');
|
||||
if (isSadMode) {
|
||||
tickerContainer.style.display = 'block';
|
||||
} else {
|
||||
tickerContainer.style.display = 'none';
|
||||
// Stop animation and watchdog when hiding
|
||||
if (animationId) {
|
||||
cancelAnimationFrame(animationId);
|
||||
animationId = null;
|
||||
}
|
||||
if (tickerWatchdogTimer) {
|
||||
clearInterval(tickerWatchdogTimer);
|
||||
tickerWatchdogTimer = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Debug function to print ticker info and force next cycle
|
||||
window.debugTicker = function() {
|
||||
console.log('=== TICKER DEBUG INFO ===');
|
||||
console.log('Current cycle count:', tickerCycleCount);
|
||||
console.log('Upcoming holidays:', upcomingHolidays);
|
||||
console.log('Current Wikimedia events count:', window.currentWikimediaEvents ? window.currentWikimediaEvents.length : 0);
|
||||
console.log('Current holidays:', window.currentHolidays);
|
||||
console.log('Next ticker HTML length:', nextTickerHtml ? nextTickerHtml.length : 'null');
|
||||
console.log('Current ticker content:', document.getElementById('ticker-content').innerHTML);
|
||||
|
||||
// Force next cycle
|
||||
console.log('Forcing next ticker cycle...');
|
||||
tickerCycleCount++;
|
||||
fetchAndDisplayTickerData(true);
|
||||
};
|
||||
|
||||
sadCheck();
|
||||
toggleForm();
|
||||
refreshTimer = setInterval(() => {
|
||||
fetchAndDisplayNews(currentNewsUrl);
|
||||
}, 300000);
|
||||
fetchAndDisplayNews(currentNewsUrl);
|
||||
// Show/hide ticker based on initial mode
|
||||
updateTickerVisibility();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
854
news4.html
Normal file
854
news4.html
Normal file
@@ -0,0 +1,854 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
|
||||
<script src="https://code.jquery.com/ui/1.13.1/jquery-ui.js" integrity="sha256-6XMVI0zB8cRzfZjqKcD01PBsAy3FlDASrlC8SxCpInY=" crossorigin="anonymous"></script>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>RLX News</title>
|
||||
<style>
|
||||
/* --- Flexbox Sticky Footer --- */
|
||||
html {
|
||||
height: 100%;
|
||||
}
|
||||
body {
|
||||
min-height: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
html, body {
|
||||
font-family: Arial, sans-serif;
|
||||
background-color: #f4f4f4;
|
||||
padding: 10px;
|
||||
box-sizing: border-box;
|
||||
padding-bottom: 0;
|
||||
}
|
||||
|
||||
#full-display-container, #sad-display-area {
|
||||
flex-grow: 1;
|
||||
}
|
||||
|
||||
.form-container {
|
||||
max-width: 800px;
|
||||
position: relative;
|
||||
padding: 15px;
|
||||
background: white;
|
||||
margin-bottom: 10px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.toggle-btn, button { font-size: 20px; padding: 8px 15px; cursor: pointer; }
|
||||
button { background-color: #4CAF50; color: white; border: none; border-radius: 4px; }
|
||||
button:hover { background-color: #45a049; }
|
||||
.form-group label { font-size: 20px; margin-bottom: 5px; display: block; }
|
||||
input, textarea { width: 100%; padding: 10px; font-size: 18px; box-sizing: border-box; }
|
||||
|
||||
#full-display-container {
|
||||
max-width: 100%;
|
||||
margin: 0 auto;
|
||||
display: grid;
|
||||
grid-template-columns: 1fr 1fr;
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
#sad-display-area {
|
||||
flex-grow: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 10px;
|
||||
min-height: 0;
|
||||
}
|
||||
|
||||
#top-stories-container, #bottom-stories-container {
|
||||
flex-grow: 1;
|
||||
overflow: hidden;
|
||||
min-height: 0;
|
||||
}
|
||||
#top-stories-container { flex-basis: 50%; flex-shrink: 0; }
|
||||
#bottom-stories-container { background: #e9e9e9; padding: 5px; border-radius: 8px; }
|
||||
|
||||
.scroller-inner {
|
||||
animation-name: continuous-scroll;
|
||||
animation-timing-function: linear;
|
||||
animation-iteration-count: infinite;
|
||||
}
|
||||
|
||||
.content-block {
|
||||
display: grid;
|
||||
grid-template-columns: 1fr 1fr;
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
@keyframes continuous-scroll {
|
||||
0% {
|
||||
transform: translateY(0);
|
||||
}
|
||||
100% {
|
||||
transform: translateY(-50%);
|
||||
}
|
||||
}
|
||||
|
||||
.news-item { background: white; padding: 5px; border-radius: 8px; box-shadow: 0 2px 4px rgba(0,0,0,0.1); display: flex; align-items: center; }
|
||||
.news-item img { width: 150px; height: 100px; object-fit: cover; border-radius: 6px; margin-right: 10px; }
|
||||
.news-content { flex: 2; }
|
||||
.headline { color: #333; font-size: 36px; margin: 0 0 5px 0; line-height: 1.0; font-weight: bold; }
|
||||
.summary { color: #666; font-size: 28px; margin: 0 0 5px 0; line-height: 1.0; display: -webkit-box; -webkit-line-clamp: 4; -webkit-box-orient: vertical; }
|
||||
.storylink { color: #007BFF; text-decoration: none; }
|
||||
.storylink:hover { text-decoration: underline; }
|
||||
|
||||
.relevance-high { background-color: lightblue; }
|
||||
.relevance-really-high { background-color: cyan; }
|
||||
.relevance-super-high { background-color: yellow; }
|
||||
.relevance-mazza-high { background-color: orange; }
|
||||
.relevance-cheech-high { background-color: #FF8790; }
|
||||
|
||||
@keyframes flashRedOutline {
|
||||
0% { outline: 7px solid red; }
|
||||
50% { outline: 7px solid transparent; }
|
||||
100% { outline: 10px solid red; }
|
||||
}
|
||||
.new-story-flash {
|
||||
animation: flashRedOutline 2s linear infinite;
|
||||
border-radius: 8px; /* Match the news-item border-radius */
|
||||
}
|
||||
|
||||
#ticker-container {
|
||||
width: 100%;
|
||||
background-color: black;
|
||||
overflow: hidden;
|
||||
padding: 5px 0;
|
||||
box-sizing: border-box;
|
||||
z-index: 1000;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
#ticker-content {
|
||||
display: inline-block;
|
||||
white-space: nowrap;
|
||||
/* animation removed, now handled by JS */
|
||||
}
|
||||
|
||||
#ticker-content > span {
|
||||
margin: 0 20px;
|
||||
font-size: 30px;
|
||||
}
|
||||
|
||||
.ticker-year { color: lightgray; }
|
||||
.ticker-event { color: white; }
|
||||
.ticker-report { color: lightblue; }
|
||||
.ticker-wikimedia { color: lightpink; } /* Style for Wikimedia events */
|
||||
.ticker-holiday { color: lightgreen; } /* Style for holiday events */
|
||||
.ticker-upcoming { color: cyan; } /* Style for upcoming events */
|
||||
|
||||
@media (max-width: 768px) {
|
||||
#full-display-container {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
.content-block {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div class="form-container" id="form-container">
|
||||
<button class="toggle-btn" onclick="toggleForm();">Expand News Search Dialog</button>
|
||||
<div id="searchForm" style="display: none;">
|
||||
<div class="form-group"><label for="startTime">Start Time (Zulu/UTC):</label><input type="datetime-local" id="startTime" name="startTime" required></div>
|
||||
<div class="form-group"><label for="endTime">End Time (Zulu/UTC):</label><input type="datetime-local" id="endTime" name="endTime" required></div>
|
||||
<div class="form-group"><label for="keyTerms">Key Terms (comma-separated):</label><textarea id="keyTerms" name="keyTerms" rows="3" required>weather,flood,fire,fog,snow,emergency,wind,ice,rain,power,explosion,warmer,colder,drown,stream,river,air,wind,destroyed,rime,glaze,river,ice,creek,crash,thunder,spinup,black ice,fog,spill,pileup,pile-up,gust,frozen,funnel,rainfall,fatal,injury,sleet,injured,frost,dead,death,landslide,culvert,slippery,wildfire,tornado,blizzard,creek,hail,thunderstorm,downburst,microburst,crash,heatstroke,derecho,lightning,hypothermia,slide,flow,ski,water,innundation,victim,victims,flooding,flooded,snowing,freezing rain,clouds,cloud,storm,aircraft</textarea></div>
|
||||
<button onclick="updatenews();">Submit</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="full-display-container"></div>
|
||||
|
||||
<div class="display-area" id="sad-display-area" style="display: none;">
|
||||
<div id="top-stories-container"></div>
|
||||
<div id="bottom-stories-container"></div>
|
||||
</div>
|
||||
|
||||
<div id="ticker-container">
|
||||
<div id="ticker-content"></div>
|
||||
</div>
|
||||
<script>
|
||||
let lastTickerData = null;
|
||||
let isSadMode = false;
|
||||
var refreshTimer;
|
||||
var tickerWatchdogTimer;
|
||||
const NUM_TOP_STORIES = 8;
|
||||
const BOTTOM_SCROLLER_SPEED_MULTIPLIER = 2;
|
||||
window.currentWikimediaEvents = []; // Initialize the global variable
|
||||
let tickerCycleCount = 0;
|
||||
let upcomingHolidays = [];
|
||||
let triviaQuestions = [];
|
||||
let lastTriviaFetchTime = 0;
|
||||
|
||||
// Always fetch fresh Wikimedia events
|
||||
// No need to cache them between cycles
|
||||
|
||||
const TARGET_BROADCAST_SECONDS = 150;
|
||||
const PIXELS_PER_SECOND_SPEED = 150; // Adjust this value to control scroll speed. Higher is faster.
|
||||
let lastTickerUpdateTime = Date.now();
|
||||
let lastNewsData = null;
|
||||
let currentNewsUrl = 'https://wx.stoat.org/lsr.php?news3=potato';
|
||||
|
||||
let nextTickerHtml = null;
|
||||
let animationId = null;
|
||||
let tickerPosition = 0;
|
||||
let lastTime = 0;
|
||||
let tickerContent = document.getElementById('ticker-content');
|
||||
let injectionHtml = null;
|
||||
|
||||
function animateTicker(currentTime) {
|
||||
if (!lastTime) lastTime = currentTime;
|
||||
const deltaTime = (currentTime - lastTime) / 1000;
|
||||
lastTime = currentTime;
|
||||
tickerPosition -= PIXELS_PER_SECOND_SPEED * deltaTime;
|
||||
const scrollWidth = tickerContent.scrollWidth;
|
||||
if (tickerPosition <= -scrollWidth) {
|
||||
updateTickerContent();
|
||||
tickerPosition = 0;
|
||||
}
|
||||
tickerContent.style.transform = `translateX(${tickerPosition}px)`;
|
||||
animationId = requestAnimationFrame(animateTicker);
|
||||
}
|
||||
|
||||
function updateTickerContent() {
|
||||
if (nextTickerHtml) {
|
||||
tickerContent.innerHTML = nextTickerHtml;
|
||||
const containerWidth = document.getElementById('ticker-container').clientWidth;
|
||||
tickerPosition = -containerWidth;
|
||||
nextTickerHtml = null;
|
||||
// Fetch new
|
||||
fetchAndDisplayTickerData(false);
|
||||
}
|
||||
}
|
||||
|
||||
function fetchAndDisplayNews(url = 'https://wx.stoat.org/lsr.php?news3=potato') {
|
||||
// Update the current URL if provided
|
||||
if (url) {
|
||||
currentNewsUrl = url;
|
||||
}
|
||||
|
||||
$.getJSON(currentNewsUrl, function(newsData) {
|
||||
// Sort the data
|
||||
newsData.sort((a, b) => {
|
||||
if (b.impact_score !== a.impact_score) return b.impact_score - a.impact_score;
|
||||
return new Date(b.timeutc) - new Date(a.timeutc);
|
||||
});
|
||||
|
||||
// Check if data has changed
|
||||
if (lastNewsData && JSON.stringify(newsData) === JSON.stringify(lastNewsData)) {
|
||||
console.log('News data unchanged, skipping update');
|
||||
return;
|
||||
}
|
||||
|
||||
// Update cache
|
||||
lastNewsData = newsData;
|
||||
|
||||
if (isSadMode) {
|
||||
const topContainer = document.getElementById('top-stories-container');
|
||||
const bottomContainer = document.getElementById('bottom-stories-container');
|
||||
topContainer.innerHTML = '';
|
||||
bottomContainer.innerHTML = '';
|
||||
|
||||
const topStories = newsData.slice(0, NUM_TOP_STORIES);
|
||||
const scrollingStories = newsData.slice(NUM_TOP_STORIES);
|
||||
|
||||
function createScroller(stories, durationMultiplier, isBottomScroller = false) {
|
||||
if (stories.length === 0) return null;
|
||||
const scrollerInner = document.createElement('div');
|
||||
scrollerInner.className = 'scroller-inner';
|
||||
const contentBlock1 = document.createElement('div');
|
||||
contentBlock1.className = 'content-block';
|
||||
stories.forEach(news => contentBlock1.appendChild(createNewsItem(news)));
|
||||
const contentBlock2 = contentBlock1.cloneNode(true);
|
||||
contentBlock2.setAttribute('aria-hidden', 'true');
|
||||
scrollerInner.appendChild(contentBlock1);
|
||||
scrollerInner.appendChild(contentBlock2);
|
||||
const duration = stories.length * durationMultiplier;
|
||||
scrollerInner.style.animationName = 'continuous-scroll';
|
||||
scrollerInner.style.animationDuration = `${duration}s`;
|
||||
// Ensure no delay is applied to any scroller
|
||||
scrollerInner.style.animationDelay = '0s';
|
||||
return scrollerInner;
|
||||
}
|
||||
|
||||
const topScroller = createScroller(topStories, 7, false);
|
||||
if (topScroller) topContainer.appendChild(topScroller);
|
||||
|
||||
const bottomScroller = createScroller(scrollingStories, BOTTOM_SCROLLER_SPEED_MULTIPLIER, true);
|
||||
if (bottomScroller) bottomContainer.appendChild(bottomScroller);
|
||||
|
||||
} else {
|
||||
const fullContainer = document.getElementById('full-display-container');
|
||||
fullContainer.innerHTML = '';
|
||||
newsData.forEach(news => fullContainer.appendChild(createNewsItem(news)));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function createNewsItem(news) {
|
||||
const newsItem = document.createElement('div');
|
||||
newsItem.className = 'news-item';
|
||||
let score = news.impact_score;
|
||||
const storyTime = new Date(news.timeutc);
|
||||
const currentTime = new Date();
|
||||
const oneHourInMs = 3600000;
|
||||
|
||||
// Add flashRedOutline class if the story is less than 1 hour old
|
||||
if (currentTime - storyTime < oneHourInMs) {
|
||||
newsItem.classList.add('new-story-flash');
|
||||
}
|
||||
|
||||
const relevanceClasses = {
|
||||
'relevance-high': score > 15,
|
||||
'relevance-really-high': score > 25,
|
||||
'relevance-super-high': score > 50,
|
||||
'relevance-mazza-high': score > 90,
|
||||
'relevance-cheech-high': score > 150
|
||||
};
|
||||
Object.entries(relevanceClasses).filter(([, c]) => c).forEach(([cN]) => newsItem.classList.add(cN));
|
||||
|
||||
newsItem.innerHTML = `<a href="${news.storylink}" target="_blank"><img src="${news.imageurl}"></a><div class="news-content"><h2 class="headline"><a href="${news.storylink}" target="_blank" class="storylink">(${extractTextBetweenHttpAndCom(news.storylink)}) ${news.headline}</a></h2><p class="summary">${news.summary} ${convertPostgresTimestamp(news.timeutc)}L</p></div>`;
|
||||
return newsItem;
|
||||
}
|
||||
|
||||
function extractTextBetweenHttpAndCom(url) {
|
||||
url = url.replace(/www\./, '');
|
||||
const match = url.match(/https?:\/\/(.*?)\.com/);
|
||||
return match && match[1] ? match[1].toUpperCase() : 'FAUXNEWS';
|
||||
}
|
||||
|
||||
function convertPostgresTimestamp(timestamp) {
|
||||
const d = new Date(timestamp.replace('Z', ''));
|
||||
return `${d.getFullYear()}-${String(d.getMonth()+1).padStart(2,'0')}-${String(d.getDate()).padStart(2,'0')} ${String(d.getHours()).padStart(2,'0')}:${String(d.getMinutes()).padStart(2,'0')}`;
|
||||
}
|
||||
|
||||
function toggleForm() {
|
||||
const formContent = document.getElementById("searchForm");
|
||||
const toggleBtn = document.querySelector(".toggle-btn");
|
||||
if (formContent.style.display === "none") {
|
||||
formContent.style.display = "block";
|
||||
toggleBtn.textContent = "Collapse News Search Dialog";
|
||||
if (refreshTimer) clearInterval(refreshTimer);
|
||||
} else {
|
||||
formContent.style.display = "none";
|
||||
toggleBtn.textContent = "Expand News Search Dialog";
|
||||
// Always use the current URL
|
||||
fetchAndDisplayNews(currentNewsUrl);
|
||||
refreshTimer = setInterval(() => {
|
||||
fetchAndDisplayNews(currentNewsUrl);
|
||||
}, 300000);
|
||||
}
|
||||
}
|
||||
|
||||
function completelyHide() { document.getElementById("form-container").style.display = "none"; }
|
||||
|
||||
function updatenews() {
|
||||
const start = document.getElementById("startTime").value;
|
||||
const end = document.getElementById("endTime").value;
|
||||
const keyTerms = document.getElementById("keyTerms").value;
|
||||
const terms = keyTerms.split(',');
|
||||
let arrayterms = terms.map(term => `key[]=${encodeURIComponent(term)}`).join('&');
|
||||
url = `lsr.php?newsarchive=true&start=${start}&end=${end}&${arrayterms}`;
|
||||
// Clear the cache to force an update
|
||||
lastNewsData = null;
|
||||
fetchAndDisplayNews(url);
|
||||
}
|
||||
|
||||
function sadCheck() {
|
||||
const params = new URLSearchParams(document.location.search);
|
||||
if (params.has("sad") || params.has("SAD")) {
|
||||
isSadMode = true;
|
||||
completelyHide();
|
||||
Object.assign(document.documentElement.style, {height: '100%'});
|
||||
Object.assign(document.body.style, {height: '100%', overflow: 'hidden', display: 'flex', flexDirection: 'column'});
|
||||
document.getElementById('sad-display-area').style.display = 'flex';
|
||||
document.getElementById('full-display-container').style.display = 'none';
|
||||
}
|
||||
}
|
||||
|
||||
function format_date_with_ordinal(date) {
|
||||
const day = date.getDate();
|
||||
const month = date.toLocaleString('default', { month: 'long' });
|
||||
const get_ordinal_suffix = (day) => {
|
||||
if (day > 3 && day < 21) return 'th';
|
||||
switch (day % 10) {
|
||||
case 1: return "st";
|
||||
case 2: return "nd";
|
||||
case 3: return "rd";
|
||||
default: return "th";
|
||||
}
|
||||
};
|
||||
const suffix = get_ordinal_suffix(day);
|
||||
return `${month} ${day}${suffix}`;
|
||||
}
|
||||
|
||||
function fetchWikimediaEvents() {
|
||||
const now = new Date();
|
||||
|
||||
// Get the individual components
|
||||
const year = now.getFullYear();
|
||||
const month = String(now.getMonth() + 1).padStart(2, '0'); // Months are 0-indexed, so add 1
|
||||
const day = String(now.getDate()).padStart(2, '0');
|
||||
const hours = String(now.getHours()).padStart(2, '0');
|
||||
|
||||
// Concatenate them into the final string
|
||||
const formattedDate = `${year}${month}${day}${hours}`;
|
||||
|
||||
|
||||
const url = 'https://wx.stoat.org/calendar/wikimedia_onthisday.json';
|
||||
const cacheBustingUrl = `${url}?v=${formattedDate}`;
|
||||
|
||||
return $.getJSON(cacheBustingUrl)
|
||||
.done(function(data) {
|
||||
if (data && data.events && Array.isArray(data.events) && data.events.length > 0) {
|
||||
// Always shuffle and use all events
|
||||
window.currentWikimediaEvents = [...data.events].sort(() => 0.5 - Math.random());
|
||||
} else {
|
||||
console.warn("Wikimedia JSON is empty, invalid, or does not contain an 'events' array.");
|
||||
window.currentWikimediaEvents = [];
|
||||
}
|
||||
})
|
||||
.fail(function(jqXHR, textStatus, errorThrown) {
|
||||
console.error(`Failed to load from ${cacheBustingUrl}. Status: ${textStatus}, Error: ${errorThrown}`);
|
||||
window.currentWikimediaEvents = [];
|
||||
});
|
||||
}
|
||||
|
||||
function fetchHolidays() {
|
||||
const now = new Date();
|
||||
|
||||
// Format today's date as YYYY-MM-DD to match the API response keys
|
||||
const year = now.getFullYear();
|
||||
const month = String(now.getMonth() + 1).padStart(2, '0');
|
||||
const day = String(now.getDate()).padStart(2, '0');
|
||||
const todayFormatted = `${year}-${month}-${day}`;
|
||||
|
||||
// Get the individual components for cache busting
|
||||
const hours = String(now.getHours()).padStart(2, '0');
|
||||
const formattedDate = `${year}${month}${day}${hours}`;
|
||||
|
||||
const url = 'https://calendar.wx4rlx.org/get_holidays.py';
|
||||
const cacheBustingUrl = `${url}?time=${formattedDate}`;
|
||||
|
||||
return $.getJSON(cacheBustingUrl)
|
||||
.done(function(data) {
|
||||
if (data && data.holidays) {
|
||||
// Get holidays for today using the formatted date as the key
|
||||
window.currentHolidays = data.holidays[todayFormatted] || [];
|
||||
|
||||
// Store upcoming holidays for the next 9 days
|
||||
upcomingHolidays = [];
|
||||
for (let i = 1; i <= 9; i++) {
|
||||
const nextDate = new Date(now);
|
||||
nextDate.setDate(now.getDate() + i);
|
||||
const nextYear = nextDate.getFullYear();
|
||||
const nextMonth = String(nextDate.getMonth() + 1).padStart(2, '0');
|
||||
const nextDay = String(nextDate.getDate()).padStart(2, '0');
|
||||
const nextFormatted = `${nextYear}-${nextMonth}-${nextDay}`;
|
||||
|
||||
const nextHolidays = data.holidays[nextFormatted] || [];
|
||||
if (nextHolidays.length > 0) {
|
||||
upcomingHolidays.push({
|
||||
date: nextDate,
|
||||
holidays: nextHolidays
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
console.warn("Holidays JSON is empty, invalid, or does not contain 'holidays'.");
|
||||
window.currentHolidays = [];
|
||||
upcomingHolidays = [];
|
||||
}
|
||||
})
|
||||
.fail(function(jqXHR, textStatus, errorThrown) {
|
||||
console.error(`Failed to load holidays from ${cacheBustingUrl}. Status: ${textStatus}, Error: ${errorThrown}`);
|
||||
window.currentHolidays = [];
|
||||
upcomingHolidays = [];
|
||||
});
|
||||
}
|
||||
|
||||
function fetchTriviaQuestions() {
|
||||
const now = Date.now();
|
||||
// Check if we need to fetch new questions and respect the 5-second rate limit
|
||||
if (triviaQuestions.length > 10 || now - lastTriviaFetchTime < 5000) {
|
||||
return $.Deferred().resolve().promise();
|
||||
}
|
||||
|
||||
lastTriviaFetchTime = now;
|
||||
const url = 'https://opentdb.com/api.php?amount=50&type=multiple';
|
||||
return $.getJSON(url)
|
||||
.done(function(data) {
|
||||
if (data.response_code === 0 && data.results) {
|
||||
triviaQuestions = data.results;
|
||||
console.log(`Fetched ${triviaQuestions.length} trivia questions`);
|
||||
} else {
|
||||
console.warn('Trivia API returned non-zero response code or no results');
|
||||
triviaQuestions = [];
|
||||
}
|
||||
})
|
||||
.fail(function(jqXHR, textStatus, errorThrown) {
|
||||
console.error(`Failed to fetch trivia questions: ${textStatus}, ${errorThrown}`);
|
||||
triviaQuestions = [];
|
||||
});
|
||||
}
|
||||
|
||||
function fetchInjection() {
|
||||
const injectionApiUrl = 'https://calendar.wx4rlx.org/onetime.py?action=api';
|
||||
return $.ajax({
|
||||
url: injectionApiUrl,
|
||||
dataType: 'text' // Treat the response as plain text
|
||||
})
|
||||
.done(function(data) {
|
||||
if (data && data.trim().length > 0) {
|
||||
injectionHtml = data;
|
||||
console.log('Injection content fetched:', injectionHtml);
|
||||
} else {
|
||||
injectionHtml = null;
|
||||
}
|
||||
})
|
||||
.fail(function() {
|
||||
console.log('No injection content available or error fetching');
|
||||
injectionHtml = null;
|
||||
});
|
||||
}
|
||||
|
||||
function fetchAndDisplayTickerData(startImmediately = true) {
|
||||
// First, fetch the injection content
|
||||
fetchInjection().always(function() {
|
||||
// Then fetch other data
|
||||
$.when(fetchWikimediaEvents(), fetchHolidays(), fetchTriviaQuestions()).always(function() {
|
||||
const tickerApiUrl = 'https://calendar.wx4rlx.org/?action=api';
|
||||
$.getJSON(tickerApiUrl, function(data) {
|
||||
if (data.status !== 'success') return;
|
||||
updateTickerLastUpdateTime();
|
||||
|
||||
// Always update the ticker with fresh data
|
||||
const today = new Date();
|
||||
const currentYear = today.getFullYear();
|
||||
const formatted_date = format_date_with_ordinal(today);
|
||||
const tickerContent = $('#ticker-content');
|
||||
|
||||
let localItems = [];
|
||||
data.events.forEach(item => localItems.push({ date: item.date, text: item.event, type: 'event' }));
|
||||
data.weather_reports.forEach(item => localItems.push({ date: item.date, text: item.report, type: 'report' }));
|
||||
|
||||
// Increment cycle count
|
||||
tickerCycleCount++;
|
||||
|
||||
// Add upcoming events based on cycle count
|
||||
let upcomingEventItem = null;
|
||||
if (upcomingHolidays.length > 0) {
|
||||
// Every 10 cycles takes priority over every other cycle
|
||||
if (tickerCycleCount % 10 === 0) {
|
||||
const nextFiveDays = upcomingHolidays.slice(0, 5);
|
||||
let upcomingTexts = [];
|
||||
nextFiveDays.forEach(day => {
|
||||
const formattedDay = format_date_with_ordinal(day.date);
|
||||
upcomingTexts.push(`${formattedDay} - ${day.holidays.join(', ')}`);
|
||||
});
|
||||
if (upcomingTexts.length > 0) {
|
||||
upcomingEventItem = {
|
||||
date: today.toISOString().split('T')[0],
|
||||
text: 'Upcoming Special Days: ' + upcomingTexts.join('; '),
|
||||
type: 'upcoming',
|
||||
year: 'Upcoming'
|
||||
};
|
||||
}
|
||||
}
|
||||
// Only show tomorrow's events if it's an even cycle AND not a multiple of 10
|
||||
else if (tickerCycleCount % 2 === 0) {
|
||||
const tomorrow = upcomingHolidays[0];
|
||||
if (tomorrow) {
|
||||
upcomingEventItem = {
|
||||
date: tomorrow.date.toISOString().split('T')[0],
|
||||
text: 'Upcoming Special Days: Tomorrow - ' + tomorrow.holidays.join(', '),
|
||||
type: 'upcoming',
|
||||
year: 'Tomorrow'
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add today's holidays to local items
|
||||
if (window.currentHolidays && window.currentHolidays.length > 0) {
|
||||
localItems.push({
|
||||
date: today.toISOString().split('T')[0],
|
||||
text: 'Special Days: ' + window.currentHolidays.join(', '),
|
||||
type: 'holiday',
|
||||
year: 'Today'
|
||||
});
|
||||
}
|
||||
|
||||
// Add injection HTML at the beginning if available
|
||||
// Use the injectionHtml that was fetched at the start of this function
|
||||
if (injectionHtml) {
|
||||
localItems.unshift({
|
||||
date: today.toISOString().split('T')[0],
|
||||
text: injectionHtml,
|
||||
type: 'injection',
|
||||
year: 'INJECTION'
|
||||
});
|
||||
}
|
||||
|
||||
// Add upcoming event at the very end if it exists
|
||||
if (upcomingEventItem) {
|
||||
localItems.push(upcomingEventItem);
|
||||
}
|
||||
|
||||
// Sort items by year
|
||||
localItems.sort((a, b) => {
|
||||
const getYear = (item) => {
|
||||
if (item.year !== undefined) {
|
||||
// Handle 'INJECTION', 'Today', 'Tomorrow', 'Upcoming' etc.
|
||||
if (isNaN(Number(item.year))) return Infinity;
|
||||
return Number(item.year);
|
||||
}
|
||||
return Number(item.date.split('-')[0]);
|
||||
};
|
||||
|
||||
const yearA = getYear(a);
|
||||
const yearB = getYear(b);
|
||||
|
||||
if (isNaN(yearA) && isNaN(yearB)) return 0;
|
||||
if (isNaN(yearA)) return 1;
|
||||
if (isNaN(yearB)) return -1;
|
||||
|
||||
return yearA - yearB;
|
||||
});
|
||||
|
||||
// Calculate duration of local items
|
||||
const tempLocalHtml = buildTickerHtml(localItems, currentYear);
|
||||
tickerContent.html(tempLocalHtml);
|
||||
const localWidth = tickerContent[0].scrollWidth;
|
||||
const localDuration = localWidth / PIXELS_PER_SECOND_SPEED;
|
||||
|
||||
// Determine number of Wikimedia items to add
|
||||
let numToSprinkle = 0;
|
||||
if (window.currentWikimediaEvents && window.currentWikimediaEvents.length > 0) {
|
||||
// Always include at least one item
|
||||
numToSprinkle = 1;
|
||||
|
||||
// Calculate average width per Wikimedia item using the first few events
|
||||
const sampleEvents = window.currentWikimediaEvents.slice(0, Math.min(5, window.currentWikimediaEvents.length));
|
||||
let totalWidth = 0;
|
||||
sampleEvents.forEach(event => {
|
||||
const tempWikiItem = { date: `${event.year}-01-01`, text: event.text, type: 'wikimedia', year: event.year };
|
||||
tickerContent.html(buildTickerHtml([tempWikiItem], currentYear));
|
||||
totalWidth += tickerContent[0].scrollWidth;
|
||||
});
|
||||
|
||||
const avgWikiWidth = totalWidth / sampleEvents.length;
|
||||
const timePerWikiItem = avgWikiWidth / PIXELS_PER_SECOND_SPEED;
|
||||
|
||||
// Add more items if there's time
|
||||
const durationGap = TARGET_BROADCAST_SECONDS - localDuration;
|
||||
if (timePerWikiItem > 0 && durationGap > timePerWikiItem) {
|
||||
const additionalItems = Math.floor((durationGap - timePerWikiItem) / timePerWikiItem);
|
||||
numToSprinkle += Math.max(0, additionalItems);
|
||||
}
|
||||
numToSprinkle = Math.min(numToSprinkle, window.currentWikimediaEvents.length);
|
||||
}
|
||||
|
||||
// Add Wikimedia items to local items
|
||||
if (numToSprinkle > 0 && window.currentWikimediaEvents && window.currentWikimediaEvents.length > 0) {
|
||||
const eventsToAdd = window.currentWikimediaEvents.slice(0, numToSprinkle);
|
||||
eventsToAdd.forEach(event => {
|
||||
localItems.push({ date: `${event.year}-01-01`, text: event.text, type: 'wikimedia', year: event.year });
|
||||
});
|
||||
|
||||
// Re-sort with the new items
|
||||
localItems.sort((a, b) => {
|
||||
const getYear = (item) => {
|
||||
if (item.year !== undefined) {
|
||||
if (isNaN(Number(item.year))) return Infinity;
|
||||
return Number(item.year);
|
||||
}
|
||||
return Number(item.date.split('-')[0]);
|
||||
};
|
||||
|
||||
const yearA = getYear(a);
|
||||
const yearB = getYear(b);
|
||||
|
||||
if (isNaN(yearA) && isNaN(yearB)) return 0;
|
||||
if (isNaN(yearA)) return 1;
|
||||
if (isNaN(yearB)) return -1;
|
||||
|
||||
return yearA - yearB;
|
||||
});
|
||||
}
|
||||
|
||||
const finalContentHtml = buildTickerHtml(localItems, currentYear, formatted_date);
|
||||
|
||||
// Set the content
|
||||
tickerContent.html(finalContentHtml);
|
||||
nextTickerHtml = finalContentHtml; // For next update
|
||||
|
||||
if (startImmediately) {
|
||||
if (!animationId) {
|
||||
animationId = requestAnimationFrame(animateTicker);
|
||||
}
|
||||
// Clear injectionHtml after using it to prevent reuse in next cycle
|
||||
injectionHtml = null;
|
||||
// Fetch new for next cycle, which will fetch a new injection
|
||||
fetchAndDisplayTickerData(false);
|
||||
}
|
||||
|
||||
}).fail(function() {
|
||||
console.error("Failed to fetch data for the horizontal ticker.");
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function buildTickerHtml(items, currentYear, formatted_date) {
|
||||
let contentHtml = `<span style="display: inline-block; width: 100vw;"></span>`;
|
||||
|
||||
// First, add injection items at the very beginning
|
||||
const injectionItems = items.filter(item => item.type === 'injection');
|
||||
injectionItems.forEach(item => {
|
||||
contentHtml += `<span>${item.text}</span>`;
|
||||
});
|
||||
|
||||
// Add the "On This Day" header
|
||||
if (formatted_date) {
|
||||
contentHtml += `<span><span class="ticker-event">On This Day, ${formatted_date}:</span></span>`;
|
||||
}
|
||||
|
||||
// Add all other items (excluding injection items which we've already added)
|
||||
items.filter(item => item.type !== 'injection').forEach(item => {
|
||||
const year = item.year || parseInt(item.date.split('-')[0]);
|
||||
let textClass = `ticker-${item.type}`;
|
||||
// Add a specific class for holidays and upcoming events
|
||||
if (item.type === 'holiday') {
|
||||
textClass = 'ticker-holiday';
|
||||
} else if (item.type === 'upcoming') {
|
||||
textClass = 'ticker-upcoming';
|
||||
}
|
||||
const yearDiff = currentYear - year;
|
||||
let anniversaryPrefix = '';
|
||||
// Only show anniversary for positive year differences (past events)
|
||||
// Skip for holiday type
|
||||
if (item.type !== 'holiday' && yearDiff > 0 && yearDiff % 5 === 0) {
|
||||
anniversaryPrefix = `<span style="color: yellow; font-weight: bold;">${yearDiff} Years Ago: </span>`;
|
||||
}
|
||||
let itemText = item.text;
|
||||
let yearText = year;
|
||||
const arbitraryLength = 500;
|
||||
if (item.text.length > arbitraryLength) {
|
||||
const mazzaImgTag = '<img src="mazza.png" alt="Mazza" style="height: 1.2em; vertical-align: middle; margin: 0 0.3em;">';
|
||||
const imageCount = Math.floor((item.text.length - arbitraryLength) / 200);
|
||||
const imageTags = mazzaImgTag.repeat(imageCount);
|
||||
yearText = imageTags ? `${imageTags} ${year}` : year;
|
||||
}
|
||||
// For holidays and upcoming events, don't show the year prefix
|
||||
if (item.type === 'holiday' || item.type === 'upcoming') {
|
||||
contentHtml += `<span><span class="${textClass}">${itemText}</span></span>`;
|
||||
} else {
|
||||
contentHtml += `<span>${anniversaryPrefix}<span class="ticker-year">${yearText}:</span> <span class="${textClass}">${itemText}</span></span>`;
|
||||
}
|
||||
});
|
||||
|
||||
if (formatted_date) {
|
||||
contentHtml += `<span><span class="ticker-event">Office/Local Event</span></span>`;
|
||||
contentHtml += `<span><span class="ticker-wikimedia">World Event</span></span>`;
|
||||
contentHtml += `<span><span class="ticker-report">Local Weather Event</span></span>`;
|
||||
|
||||
// Add trivia question if available
|
||||
if (triviaQuestions.length > 0) {
|
||||
const trivia = triviaQuestions.shift();
|
||||
// Decode HTML entities in question and answers
|
||||
const question = $('<div>').html(trivia.question).text();
|
||||
const correctAnswer = $('<div>').html(trivia.correct_answer).text();
|
||||
const allAnswers = [correctAnswer, ...trivia.incorrect_answers.map(ans => $('<div>').html(ans).text())];
|
||||
|
||||
// Shuffle answers
|
||||
for (let i = allAnswers.length - 1; i > 0; i--) {
|
||||
const j = Math.floor(Math.random() * (i + 1));
|
||||
[allAnswers[i], allAnswers[j]] = [allAnswers[j], allAnswers[i]];
|
||||
}
|
||||
|
||||
// Build question with choices
|
||||
let questionHtml = `<span style="color: gold;">TRIVIA: ${question} `;
|
||||
const choices = ['A', 'B', 'C', 'D'];
|
||||
allAnswers.forEach((answer, index) => {
|
||||
questionHtml += `${choices[index]}) ${answer} `;
|
||||
});
|
||||
questionHtml += `</span>`;
|
||||
contentHtml += `<span>${questionHtml}</span>`;
|
||||
|
||||
// Store the correct answer for later display
|
||||
// Find which choice corresponds to the correct answer
|
||||
const correctIndex = allAnswers.indexOf(correctAnswer);
|
||||
const correctChoice = choices[correctIndex];
|
||||
window.lastTriviaAnswer = { correctChoice, correctAnswer };
|
||||
}
|
||||
|
||||
contentHtml += `<span><span class="ticker-event">Visit <b>calendar.wx4rlx.org</b> to make updates or see info for upcoming days!</span></span>`;
|
||||
|
||||
// Add trivia answer if available
|
||||
if (window.lastTriviaAnswer) {
|
||||
const { correctChoice, correctAnswer } = window.lastTriviaAnswer;
|
||||
contentHtml += `<span><span style="color: gold;">ANSWER: ${correctChoice}) ${correctAnswer}</span></span>`;
|
||||
// Clear the answer after displaying it
|
||||
window.lastTriviaAnswer = null;
|
||||
}
|
||||
}
|
||||
return contentHtml;
|
||||
}
|
||||
|
||||
function startTickerWatchdog() {
|
||||
// Clear any existing watchdog
|
||||
if (tickerWatchdogTimer) {
|
||||
clearInterval(tickerWatchdogTimer);
|
||||
}
|
||||
// Check every 30 seconds if the ticker hasn't updated in 3x the expected duration
|
||||
tickerWatchdogTimer = setInterval(() => {
|
||||
const timeSinceLastUpdate = Date.now() - lastTickerUpdateTime;
|
||||
const maxAllowedTime = (TARGET_BROADCAST_SECONDS + 5) * 3 * 1000; // 3x expected duration in ms
|
||||
if (timeSinceLastUpdate > maxAllowedTime) {
|
||||
console.warn('Ticker watchdog triggered - forcing refresh');
|
||||
fetchAndDisplayTickerData(true);
|
||||
}
|
||||
}, 30000);
|
||||
}
|
||||
|
||||
function updateTickerLastUpdateTime() {
|
||||
lastTickerUpdateTime = Date.now();
|
||||
}
|
||||
|
||||
// Debug function to print ticker info and force next cycle
|
||||
window.debugTicker = function() {
|
||||
console.log('=== TICKER DEBUG INFO ===');
|
||||
console.log('Current cycle count:', tickerCycleCount);
|
||||
console.log('Upcoming holidays:', upcomingHolidays);
|
||||
console.log('Current Wikimedia events count:', window.currentWikimediaEvents ? window.currentWikimediaEvents.length : 0);
|
||||
console.log('Current holidays:', window.currentHolidays);
|
||||
console.log('Next ticker HTML length:', nextTickerHtml ? nextTickerHtml.length : 'null');
|
||||
console.log('Current ticker content:', document.getElementById('ticker-content').innerHTML);
|
||||
|
||||
// Force next cycle
|
||||
console.log('Forcing next ticker cycle...');
|
||||
tickerCycleCount++;
|
||||
fetchAndDisplayTickerData(true);
|
||||
};
|
||||
|
||||
sadCheck();
|
||||
toggleForm();
|
||||
refreshTimer = setInterval(() => {
|
||||
fetchAndDisplayNews(currentNewsUrl);
|
||||
}, 300000);
|
||||
fetchAndDisplayNews(currentNewsUrl);
|
||||
// Start the ticker independently
|
||||
fetchAndDisplayTickerData(true);
|
||||
startTickerWatchdog();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
13
noaa_staff.json
Normal file
13
noaa_staff.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"type": "service_account",
|
||||
"project_id": "staff-452918",
|
||||
"private_key_id": "429a1cfad95e68e440e9af97dc0fc855f61a32af",
|
||||
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDWU6AzAGgbH0iC\npGpM3QsrJRXea86riFbraJARwSqMmoyjN3XDpk+M8zw26HZb/PFgrAhw/hOCALI+\ny8JnXlJ1Aj6l3nEvG5kbh53yDlV8Be2bVjn65BqtkZjCCCtQrSULeEdhHB1uCHMW\nUup37ocCn8+3cpzkCNFSK3ciK/wtvEgRN18447v3lhXWU/DX8ANV2iycx0avb84Q\nT2/DH8DubAKAHVRaQZr+nz7v72E9gJvjAS2VKPofutSiTrlKWCi5zweZ4nvCaYtf\nynPx3IjFum3x+PeIL2Rbv8o/rI5soBYaoxo9ng0XCFsqaZpqH9iO018ACwhOdfEC\nMKFhByaHAgMBAAECggEACyp7gSIMn8Ucdk9HM86cTgN2RTU2pNx13Xkqxadbpnby\nzBoP6kEtic6uR2oK/eCKrsT9AkNVFC53GtXAwLk68IT3wJUl2RTGAEIrf2MxyZlW\nzK33AqmESPX55bnz5RXFhU3J4jb1wuLGeYDuXZCGu3iZ+MO22sZ+0+9scHz8JrcS\nXLEtwBMN1pxYxsXMGYcQ1a/DwFD5Fb41AlO66r6ftoDfhj42aYPm96a6wkilgwnV\n5bjDZz7jE3Ju3eC9KqCZZUdL68tFRaLYfwQAPH332/tY5UB7sQ0d/hib4ioJAqdc\nijYRG933n8+3F/YpaiVLxQccN7yy/lzKD0/qTJIVaQKBgQDu7VYDb8pbb0Grxo9u\nkSLVJYsgilbacih33jntzzbVemXCRMHHhXhRX1l9unVVRyduYubn54p10EBTVRbd\nIXbrjkrLsa/GJnFmsvl0Q1ykSECLopZ081HQuKcxNc8pd8OQGKZ+eIxL+cyIn9OA\n11iwWB2Q0Xw8BwmvhGGeBkeJhQKBgQDlpEbyEpE5OB1Gw1JtSInfeveKOp+BASyj\nW6QykL+SkdjDHByLYIWSO//Rxy28AmTN3JNzyf03CHYOz9WirI8PsUaDGrgKV2fT\nW/pRWIJqWsu4IKUlEZ5KvhFtKxx2eYUI2fpcMYHCevsS0l8DSCWCw02RSUGXPWrG\nS4AQvTdHmwKBgQCCP9IHF5c6F0G2sg0ZGnfhuJDK0X+doGQdTIFJ+g+LeykkKbV1\n3nYD7jXsC4zRS3PcgoAcZiYeKcQXaaiILikVKsHtn1UScXyanvDMh7hmOpktRho9\ncTPl0/QZKJthoM90q2uAQJlDBc+Rn4d+kmbf43lirEDMvI10KHfK3ON8vQKBgH7N\ng09P6VjLH39fgzhNAbShVebCEClP2LIrfDNfsijg1+yErU5Y8eiZsNzJOvvB5p+1\n2MXrFyM4TNhI8xJsbmKf5Oun6U/si6wt8b7RPNI1aKA3hnk9jUaARWcekwzodQk6\nNACMZbZZ6iGugEBzqiLWbRJ4fPtiMn9kWUbk9I1ZAoGBAK21AEB7JQyJo4fGKahf\n9m3SoaLQSIfUfa/N7+Qum/NFu1SVZ2Yv6ThA8FO87euAW4kOP5OX1xzoiwBp4xgE\ntYJ4VrdFtnf0LMuQZyB7G1zxdnKgI1HfJGLtLtO8yf/xaTV8fGO+oy9ugqMrznyN\nWsAQd3CrJc542Oey99L1obji\n-----END PRIVATE KEY-----\n",
|
||||
"client_email": "staffing@staff-452918.iam.gserviceaccount.com",
|
||||
"client_id": "102589230430170881336",
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": "https://oauth2.googleapis.com/token",
|
||||
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/staffing%40staff-452918.iam.gserviceaccount.com",
|
||||
"universe_domain": "googleapis.com"
|
||||
}
|
||||
411
nws.php
Executable file
411
nws.php
Executable file
@@ -0,0 +1,411 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
//no gets, curent point outage info
|
||||
if(empty($_GET)) {
|
||||
$query = "SELECT * FROM nws order by lastupdate asc";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['officestats11'])) {
|
||||
// Get all unique lastupdate dates from the database
|
||||
$date_query = "SELECT DISTINCT DATE(lastupdate) as unique_date
|
||||
FROM nws
|
||||
WHERE status = 'active'
|
||||
AND office ~ 'WFO'
|
||||
ORDER BY unique_date ASC"; // Changed from DESC to ASC
|
||||
|
||||
$date_result = pg_query($dbconn, $date_query)
|
||||
or die('Date query failed: ' . pg_last_error());
|
||||
|
||||
$datetime_points = [];
|
||||
while ($row = pg_fetch_array($date_result, null, PGSQL_ASSOC)) {
|
||||
$dt = DateTime::createFromFormat('Y-m-d', $row['unique_date']);
|
||||
$dt->setTime(23, 59, 59);
|
||||
$datetime_points[] = $dt->format('Y-m-d H:i:s');
|
||||
}
|
||||
|
||||
pg_free_result($date_result);
|
||||
|
||||
if (empty($datetime_points)) {
|
||||
echo json_encode(['error' => 'No valid datetime points found in database']);
|
||||
exit;
|
||||
}
|
||||
|
||||
// Debug: Log the datetime points
|
||||
error_log("Processed datetime points: " . implode(', ', $datetime_points));
|
||||
|
||||
$query = "WITH latest_records AS (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY personid ORDER BY ABS(EXTRACT(EPOCH FROM (lastupdate - CAST($1 AS TIMESTAMP)))) ASC) AS rn
|
||||
FROM nws
|
||||
WHERE status = 'active'
|
||||
AND lastupdate <= CAST($1 AS TIMESTAMP) + INTERVAL '1 day'
|
||||
AND lastupdate >= CAST($1 AS TIMESTAMP) - INTERVAL '3 days'
|
||||
AND office ~ 'WFO'
|
||||
),
|
||||
otitle_counts AS (
|
||||
SELECT
|
||||
office,
|
||||
otitle,
|
||||
COUNT(*) AS otitle_count
|
||||
FROM latest_records
|
||||
WHERE rn = 1
|
||||
GROUP BY office, otitle
|
||||
)
|
||||
SELECT
|
||||
lr.office,
|
||||
COUNT(DISTINCT lr.personid) AS unique_person_count,
|
||||
(SELECT ARRAY_AGG(json_obj ORDER BY json_obj->>'otitle' ASC)
|
||||
FROM (SELECT DISTINCT jsonb_build_object(
|
||||
'otitle', tc2.otitle,
|
||||
'count', tc2.otitle_count
|
||||
) AS json_obj
|
||||
FROM otitle_counts tc2
|
||||
WHERE tc2.office = lr.office) AS subquery
|
||||
) AS title_counts_array
|
||||
FROM latest_records lr
|
||||
WHERE lr.rn = 1
|
||||
GROUP BY lr.office
|
||||
ORDER BY unique_person_count DESC";
|
||||
|
||||
$results_array = [];
|
||||
|
||||
// Execute query for each unique datetime
|
||||
foreach ($datetime_points as $datetime) {
|
||||
$result = pg_query_params($dbconn, $query, array($datetime))
|
||||
or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$office_data = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$office_data[] = $line;
|
||||
}
|
||||
|
||||
// Store results with the datetime used
|
||||
$results_array[] = [
|
||||
'provided_datetime' => $datetime,
|
||||
'data' => $office_data
|
||||
];
|
||||
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
// Debug: Log before output
|
||||
error_log("Final results: " . json_encode($results_array));
|
||||
|
||||
// Return JSON encoded results
|
||||
echo json_encode($results_array);
|
||||
}
|
||||
|
||||
if (isset($_GET['officestats'])) {
|
||||
if (isset($_GET['datetime'])) {
|
||||
// Expecting datetime as comma-separated dates or JSON array
|
||||
$input_dates = is_array($_GET['datetime'])
|
||||
? $_GET['datetime']
|
||||
: explode(',', $_GET['datetime']);
|
||||
|
||||
// Process each date and set to end of day
|
||||
$input_dates = array_unique($input_dates);
|
||||
$input_dates = array_values($input_dates);
|
||||
$datetime_points = [];
|
||||
foreach ($input_dates as $date) {
|
||||
$dt = DateTime::createFromFormat('m-d-Y', trim($date));
|
||||
if ($dt === false) {
|
||||
error_log("Invalid date skipped: " . trim($date));
|
||||
continue;
|
||||
}
|
||||
$dt->setTime(23, 59, 59);
|
||||
$datetime_points[] = $dt->format('Y-m-d H:i:s');
|
||||
}
|
||||
|
||||
// Ensure uniqueness and reindex
|
||||
$datetime_points = array_unique($datetime_points);
|
||||
$datetime_points = array_values($datetime_points);
|
||||
|
||||
// Debug: Log the datetime points
|
||||
error_log("Processed datetime points: " . implode(', ', $datetime_points));
|
||||
|
||||
if (empty($datetime_points)) {
|
||||
echo json_encode(['error' => 'No valid datetime points provided']);
|
||||
exit;
|
||||
}
|
||||
|
||||
$query = "WITH latest_records AS (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY personid ORDER BY ABS(EXTRACT(EPOCH FROM (lastupdate - CAST($1 AS TIMESTAMP)))) ASC) AS rn
|
||||
FROM nws
|
||||
WHERE status = 'active'
|
||||
AND lastupdate <= CAST($1 AS TIMESTAMP) + INTERVAL '1 day'
|
||||
AND lastupdate >= CAST($1 AS TIMESTAMP) - INTERVAL '3 days'
|
||||
AND office ~ 'WFO'
|
||||
),
|
||||
otitle_counts AS (
|
||||
SELECT
|
||||
office,
|
||||
otitle,
|
||||
COUNT(*) AS otitle_count
|
||||
FROM latest_records
|
||||
WHERE rn = 1
|
||||
GROUP BY office, otitle
|
||||
)
|
||||
SELECT
|
||||
lr.office,
|
||||
COUNT(DISTINCT lr.personid) AS unique_person_count,
|
||||
(SELECT ARRAY_AGG(json_obj ORDER BY json_obj->>'otitle' ASC)
|
||||
FROM (SELECT DISTINCT jsonb_build_object(
|
||||
'otitle', tc2.otitle,
|
||||
'count', tc2.otitle_count
|
||||
) AS json_obj
|
||||
FROM otitle_counts tc2
|
||||
WHERE tc2.office = lr.office) AS subquery
|
||||
) AS title_counts_array
|
||||
FROM latest_records lr
|
||||
WHERE lr.rn = 1
|
||||
GROUP BY lr.office
|
||||
ORDER BY unique_person_count DESC";
|
||||
|
||||
$results_array = [];
|
||||
|
||||
// Execute query for each provided datetime
|
||||
foreach ($datetime_points as $datetime) {
|
||||
$result = pg_query_params($dbconn, $query, array($datetime))
|
||||
or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$office_data = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$office_data[] = $line;
|
||||
}
|
||||
|
||||
// Store results with the datetime used
|
||||
$results_array[] = [
|
||||
'provided_datetime' => $datetime,
|
||||
'data' => $office_data
|
||||
];
|
||||
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
// Debug: Log before output
|
||||
error_log("Final results: " . json_encode($results_array));
|
||||
|
||||
// Return JSON encoded results
|
||||
echo json_encode($results_array);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (isset($_GET['regionstats'])) {
|
||||
if (isset($_GET['datetime'])) {
|
||||
// Expecting datetime as comma-separated dates or JSON array
|
||||
$input_dates = is_array($_GET['datetime'])
|
||||
? $_GET['datetime']
|
||||
: explode(',', $_GET['datetime']);
|
||||
|
||||
// Process each date and set to end of day
|
||||
$input_dates = array_unique($input_dates);
|
||||
$input_dates = array_values($input_dates);
|
||||
$datetime_points = [];
|
||||
foreach ($input_dates as $date) {
|
||||
$dt = DateTime::createFromFormat('m-d-Y', trim($date));
|
||||
if ($dt === false) {
|
||||
error_log("Invalid date skipped: " . trim($date));
|
||||
continue;
|
||||
}
|
||||
$dt->setTime(23, 59, 59);
|
||||
$datetime_points[] = $dt->format('Y-m-d H:i:s');
|
||||
}
|
||||
|
||||
// Ensure uniqueness and reindex
|
||||
$datetime_points = array_unique($datetime_points);
|
||||
$datetime_points = array_values($datetime_points);
|
||||
|
||||
// Debug: Log the datetime points
|
||||
error_log("Processed datetime points: " . implode(', ', $datetime_points));
|
||||
|
||||
if (empty($datetime_points)) {
|
||||
echo json_encode(['error' => 'No valid datetime points provided']);
|
||||
exit;
|
||||
}
|
||||
|
||||
$query = "WITH latest_records AS (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY personid ORDER BY ABS(EXTRACT(EPOCH FROM (lastupdate - CAST($1 AS TIMESTAMP)))) ASC) AS rn,
|
||||
SUBSTRING(office FROM 'NWS/([EWPASC]R)') AS region
|
||||
FROM nws
|
||||
WHERE lastupdate <= CAST($1 AS TIMESTAMP) + INTERVAL '1 day' - INTERVAL '1 second'
|
||||
AND lastupdate >= CAST($1 AS TIMESTAMP) - INTERVAL '3 days'
|
||||
AND office ~ 'NWS/[EWPASC]R'
|
||||
and status = 'active'
|
||||
),
|
||||
otitle_counts AS (
|
||||
SELECT
|
||||
region,
|
||||
otitle,
|
||||
COUNT(*) AS otitle_count
|
||||
FROM latest_records
|
||||
WHERE rn = 1
|
||||
GROUP BY region, otitle
|
||||
)
|
||||
SELECT
|
||||
lr.region,
|
||||
COUNT(DISTINCT lr.personid) AS unique_person_count,
|
||||
(SELECT ARRAY_AGG(json_obj ORDER BY json_obj->>'otitle' ASC)
|
||||
FROM (SELECT DISTINCT jsonb_build_object(
|
||||
'otitle', tc2.otitle,
|
||||
'count', tc2.otitle_count
|
||||
) AS json_obj
|
||||
FROM otitle_counts tc2
|
||||
WHERE tc2.region = lr.region) AS subquery
|
||||
) AS title_counts_array
|
||||
FROM latest_records lr
|
||||
WHERE lr.rn = 1
|
||||
GROUP BY lr.region
|
||||
ORDER BY unique_person_count DESC";
|
||||
|
||||
$results_array = [];
|
||||
|
||||
// Execute query for each provided datetime
|
||||
foreach ($datetime_points as $datetime) {
|
||||
$result = pg_query_params($dbconn, $query, array($datetime))
|
||||
or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$office_data = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$office_data[] = $line;
|
||||
}
|
||||
|
||||
// Store results with the datetime used
|
||||
$results_array[] = [
|
||||
'provided_datetime' => $datetime,
|
||||
'data' => $office_data
|
||||
];
|
||||
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
// Debug: Log before output
|
||||
//error_log("Final results: " . json_encode($results_array));
|
||||
|
||||
// Return JSON encoded results
|
||||
echo json_encode($results_array);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (isset($_GET['drilldown'])) {
|
||||
if (isset($_GET['datetime'])) {
|
||||
// Expecting datetime as comma-separated dates or JSON array
|
||||
$input_dates = is_array($_GET['datetime'])
|
||||
? $_GET['datetime']
|
||||
: explode(',', $_GET['datetime']);
|
||||
|
||||
// Process each date and set to end of day
|
||||
$datetime_points = [];
|
||||
foreach ($input_dates as $date) {
|
||||
// Specify the exact format of your input date string
|
||||
$dt = DateTime::createFromFormat('m-d-Y', trim($date)); // Adjust format as needed
|
||||
if ($dt === false) {
|
||||
// Handle invalid date
|
||||
continue;
|
||||
}
|
||||
$dt->setTime(23, 59, 59);
|
||||
$datetime_points[] = $dt->format('Y-m-d H:i:s');
|
||||
}
|
||||
$datetime_points = array_unique($datetime_points);
|
||||
$datetime_points = array_values($datetime_points);
|
||||
|
||||
$query = "WITH latest_records AS (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY personid ORDER BY lastupdate DESC) AS rn
|
||||
FROM nws
|
||||
WHERE status = 'active'
|
||||
AND lastupdate <= $1
|
||||
),
|
||||
otitle_counts AS (
|
||||
SELECT
|
||||
office,
|
||||
otitle,
|
||||
COUNT(*) AS otitle_count
|
||||
FROM latest_records
|
||||
WHERE rn = 1
|
||||
GROUP BY office, otitle
|
||||
)
|
||||
SELECT
|
||||
lr.office,
|
||||
COUNT(DISTINCT lr.personid) AS unique_person_count,
|
||||
(SELECT ARRAY_AGG(json_obj ORDER BY json_obj->>'otitle' ASC)
|
||||
FROM (SELECT DISTINCT jsonb_build_object(
|
||||
'otitle', tc2.otitle,
|
||||
'count', tc2.otitle_count
|
||||
) AS json_obj
|
||||
FROM otitle_counts tc2
|
||||
WHERE tc2.office = lr.office) AS subquery
|
||||
) AS title_counts_array
|
||||
FROM latest_records lr
|
||||
WHERE lr.rn = 1
|
||||
GROUP BY lr.office
|
||||
ORDER BY unique_person_count DESC";
|
||||
|
||||
$results_array = [];
|
||||
|
||||
// Execute query for each provided datetime
|
||||
foreach ($datetime_points as $datetime) {
|
||||
$result = pg_query_params($dbconn, $query, array($datetime))
|
||||
or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$office_data = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$office_data[] = $line;
|
||||
}
|
||||
|
||||
// Store results with the datetime used
|
||||
$results_array[] = [
|
||||
'provided_datetime' => $datetime,
|
||||
'data' => $office_data
|
||||
];
|
||||
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
// Return JSON encoded results
|
||||
echo json_encode($results_array);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
pg_close($dbconn);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
?>
|
||||
347
ohcam.py
Normal file
347
ohcam.py
Normal file
@@ -0,0 +1,347 @@
|
||||
import time
|
||||
import requests
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from psycopg2.extras import Json, execute_values
|
||||
import re
|
||||
|
||||
conn = psycopg2.connect(host='127.0.0.1', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
ohcams = 'https://api.ohgo.com/roadmarkers/cameras?pointData={%22lowLongitude%22:-83.85946954181879,%22highLongitude%22:-77.00743340900629,%22lowLatitude%22:37.434082799235426,%22highLatitude%22:40.20012906764399,%22routeDirection%22:%22%22,%22routeName%22:%22%22}'
|
||||
|
||||
response = json.loads(requests.get(ohcams).text)
|
||||
tings = []
|
||||
for p in response:
|
||||
tings = [p['Cameras'][0]['Direction'], p['Cameras'][0]['LargeURL'], p['Latitude'], p['Longitude'],p['Description'],'true','normalproxy','10','https://ohgo.com']
|
||||
sql = 'INSERT INTO cams (aspect, url, lat, lon, description, active, method, interval,source) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s) ON CONFLICT DO NOTHING;'
|
||||
cursor.execute(sql,tings)
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
wvids = 'https://wv511.org/wsvc/gmap.asmx/buildCamerasJSONjs'
|
||||
|
||||
wvheaders = {'Connection': 'keep-alive',
|
||||
'Accept': 'application/json',
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36',
|
||||
'X-HTTP-Method-Override': 'GET',
|
||||
'Referer': 'http://wv511.org/',
|
||||
'Accept-Language': 'en-US,en;q=0.9',
|
||||
'Cookie': 'mapExtent=-86.271172363286%3A35.832853816094%3A-74.92229541016%3A41.956945104642'}
|
||||
|
||||
r = requests.get(wvids,headers=wvheaders)
|
||||
x = r.text
|
||||
potato = re.findall(r'var camera_data = { "count": 118,(.*?}])?', x, re.DOTALL | re.MULTILINE)
|
||||
potato = re.findall(r' "cams":.(.*?}])?', x, re.DOTALL | re.MULTILINE)
|
||||
potatoer = json.loads(potato[0])
|
||||
|
||||
|
||||
wvcams = []
|
||||
|
||||
|
||||
#print(potatoer)
|
||||
for i in potatoer:
|
||||
try:
|
||||
lat = i['start_lat']
|
||||
lon = i['start_lng']
|
||||
url = str('https://sfstest.roadsummary.com/rtplive/' + str(i['md5'])+'/playlist.m3u8')
|
||||
description = re.findall(r'<div id="camDescription">(.*?)<span?',i['description'])[0]
|
||||
tings = [description,lat,lon,url,'rtsp','true','10','http://wv511.org']
|
||||
sql = 'INSERT INTO cams (description, lat, lon, url, method, active, interval,source) VALUES (%s,%s,%s,%s,%s,%s,%s,%s) ON CONFLICT DO NOTHING;'
|
||||
cursor.execute(sql,tings)
|
||||
conn.commit()
|
||||
except Exception as e:
|
||||
print(e,url)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#Bounding box of CWA
|
||||
|
||||
x101 = 137
|
||||
x102 = 143
|
||||
y101 = 194
|
||||
y102 = 200
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def bloomsky():
|
||||
|
||||
(xlat,xlon,nlat,nlon) = (40.000897, -77.513573, 36.749991, -83.098124)
|
||||
cursor.execute("UPDATE cams set bloomsky = url where method = 'bloomsky' and bloomsky is null")
|
||||
conn.commit()
|
||||
cursor.execute("SELECT camid, bloomsky from cams where method = 'bloomsky' and active = True")
|
||||
bloomcams = cursor.fetchall()
|
||||
bloomurl = 'https://map.bloomsky.com/data/device/'
|
||||
bloomheaders = {'Connection': 'keep-alive',
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36',
|
||||
'Content-Type': 'application/json; charset=UTF-8',
|
||||
'Accept': '*/*',
|
||||
'Origin': 'https://map.bloomsky.com',
|
||||
'Sec-Fetch-Site': 'same-origin',
|
||||
'Sec-Fetch-Mode': 'cors',
|
||||
'Sec-Fetch-Dest': 'empty',
|
||||
'X-HTTP-Method-Override': 'POST',
|
||||
'Host': 'map.bloomsky.com',
|
||||
'Referer': 'https://map.bloomsky.com',
|
||||
'Accept-Language': 'en-US,en;q=0.9'}
|
||||
#'Cookie': 'AWSELB=A38D8DDD18933E21FE3ADA5953BE34121339A3A57D7398DD0E6A3F21299C92AD55655B00C912D9CF7F381931286A93623AAED145BC6DAEC803C16F67CC4FA4015E6CF710F2; AWSELBCORS=A38D8DDD18933E21FE3ADA5953BE34121339A3A57D7398DD0E6A3F21299C92AD55655B00C912D9CF7F381931286A93623AAED145BC6DAEC803C16F67CC4FA4015E6CF710F2'
|
||||
|
||||
|
||||
|
||||
bloompost = json.dumps({"type":"ALL"})
|
||||
r = requests.post(bloomurl,headers=bloomheaders,data=bloompost)
|
||||
# print(r)
|
||||
bloomcameras = r.json()
|
||||
if r.status_code == 200:
|
||||
for p in bloomcameras:
|
||||
lat = bloomcameras[p]['lat']
|
||||
lon = bloomcameras[p]['lng']
|
||||
if (lat < xlat and lat > nlat and lon < xlon and lon > nlon):
|
||||
tings = [bloomcameras[p]['img'], bloomcameras[p]['city'],bloomcameras[p]['name'],lat,lon,p,'10','True','240','bloomsky','https://map.bloomsky.com/weather-stations/' + str(p)]
|
||||
sql = "INSERT INTO cams (url,town,description,lat,lon,bloomsky,interval,active,keephours, method,source) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) ON CONFLICT DO NOTHING"
|
||||
cursor.execute(sql,tings)
|
||||
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
def ambient():
|
||||
url = "https://lightning.ambientweather.net/devices?%24publicBox[0][0]=-83.144531&%24publicBox[0][1]=36.72&%24publicBox[1][0]=-75.5647&%24publicBox[1][1]=40.33802332110745&%24limit=1000&rank=2"
|
||||
r = requests.get(url)
|
||||
x = r.json()
|
||||
for i in x['data']:
|
||||
if (i.get('info').get('webcam') != None and "jpg" in i.get('info').get('webcam')):
|
||||
description = i.get('info').get('coords').get('location')
|
||||
lat = i.get('info').get('coords').get('coords').get('lat')
|
||||
lon = i.get('info').get('coords').get('coords').get('lon')
|
||||
url = i.get('info').get('webcam')
|
||||
|
||||
|
||||
try:
|
||||
tings = [description,lat,lon,url,'normal','true','10','https://ambientweather.net/']
|
||||
sql = 'INSERT INTO cams (description, lat, lon, url, method, active, interval,source) VALUES (%s,%s,%s,%s,%s,%s,%s,%s) ON CONFLICT DO NOTHING;'
|
||||
cursor.execute(sql,tings)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
conn.commit()
|
||||
|
||||
ambient()
|
||||
|
||||
|
||||
|
||||
def kymeso():
|
||||
ky = 'https://www.kymesonet.org/json/StationsInfo.json'
|
||||
response = json.loads(requests.get(ky).text)
|
||||
tings = []
|
||||
base = 'https://www.kymesonet.org/json/appSiteCam/'
|
||||
suff = '.jpg'
|
||||
for key,value in response.items():
|
||||
if value.get('isDiscontinued') == False:
|
||||
tings = [base + value.get('abbr')+ suff, value.get('lat'),value.get('lon'),value.get('name'),'true','normal','10','https://www.kymesonet.org/']
|
||||
sql = 'INSERT INTO cams (url, lat, lon, description, active, method, interval,source) VALUES (%s,%s,%s,%s,%s,%s,%s,%s) ON CONFLICT DO NOTHING;'
|
||||
# print(tings)
|
||||
cursor.execute(sql,tings)
|
||||
conn.commit()
|
||||
|
||||
kymeso()
|
||||
|
||||
|
||||
def ky511cams():
|
||||
"""
|
||||
Fetches traffic camera data, filters it, and performs an efficient
|
||||
bulk insert into the database.
|
||||
"""
|
||||
# Define constants for clarity and easy maintenance
|
||||
API_URL = 'https://services2.arcgis.com/CcI36Pduqd0OR4W9/ArcGIS/rest/services/trafficCamerasCur_Prd/FeatureServer/0/query?where=1%3D1&outFields=name%2C+description%2C+snapshot%2C+status%2C+latitude%2C+longitude%2C+county&returnGeometry=false&f=pjson'
|
||||
|
||||
# Define the SQL template with placeholders for the values
|
||||
SQL_INSERT = """
|
||||
INSERT INTO cams (description, lat, lon, url, active, method, interval, source)
|
||||
VALUES %s
|
||||
ON CONFLICT DO NOTHING;
|
||||
"""
|
||||
|
||||
try:
|
||||
# 1. Use .json() for efficiency and add a timeout for safety
|
||||
response = requests.get(API_URL, timeout=15)
|
||||
response.raise_for_status() # Raise an exception for bad status codes (4xx or 5xx)
|
||||
data = response.json()
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"Error fetching data from API: {e}")
|
||||
return
|
||||
except json.JSONDecodeError:
|
||||
print("Error: Failed to decode JSON from response.")
|
||||
return
|
||||
|
||||
# 2. Use a list comprehension to process and filter data concisely
|
||||
cams_to_insert = [
|
||||
(
|
||||
cam.get('description'),
|
||||
cam.get('latitude'),
|
||||
cam.get('longitude'),
|
||||
cam.get('snapshot'),
|
||||
True, # active
|
||||
'normalproxy', # method
|
||||
10, # interval
|
||||
'https://goky.ky.gov/' # source
|
||||
)
|
||||
for feature in data.get('features', [])
|
||||
if (cam := feature.get('attributes')) and cam.get('longitude') > -83.8
|
||||
]
|
||||
|
||||
if not cams_to_insert:
|
||||
print("No new camera data meeting the criteria to insert.")
|
||||
return
|
||||
|
||||
try:
|
||||
# 3. Use a single, highly efficient bulk insert operation
|
||||
execute_values(cursor, SQL_INSERT, cams_to_insert)
|
||||
conn.commit()
|
||||
print(f"✅ Successfully processed and inserted {len(cams_to_insert)} records.")
|
||||
except Exception as e:
|
||||
print(f"❌ Database error occurred: {e}")
|
||||
conn.rollback() # Roll back the transaction on error
|
||||
|
||||
|
||||
ky511cams()
|
||||
|
||||
|
||||
|
||||
#bloomsky()
|
||||
|
||||
|
||||
|
||||
headers = {
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36',
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def get_elev(lon, lat):
|
||||
"""Gets elevation from the local database first, then falls back to a web API."""
|
||||
try:
|
||||
# Primary method: Local PostGIS database
|
||||
sql = """
|
||||
SELECT round(ST_Value(rast, ST_SetSRID(ST_MakePoint(%s,%s),4326)) * 3.28084) AS val
|
||||
FROM dem
|
||||
WHERE ST_Intersects(rast, ST_SetSRID(ST_MakePoint(%s,%s),4326));
|
||||
"""
|
||||
vars_tuple = (lon, lat, lon, lat)
|
||||
cursor.execute(sql, vars_tuple)
|
||||
result = cursor.fetchone()
|
||||
|
||||
# Check if the database returned a valid, non-zero elevation
|
||||
if result and result[0] is not None and result[0] != 0:
|
||||
return int(result[0])
|
||||
|
||||
except Exception as e:
|
||||
# Log the database error if needed
|
||||
print(f"Database error: {e}")
|
||||
|
||||
# Fallback method: Web API
|
||||
# This part runs if the try block fails or returns no valid data
|
||||
elev = get_elev_backup(lon, lat)
|
||||
return int(elev) if elev is not None else None
|
||||
|
||||
|
||||
def get_elev_backup(lon, lat):
|
||||
"""Backup function to get elevation from the National Map API."""
|
||||
baseurl = 'https://epqs.nationalmap.gov/v1/json'
|
||||
params = {
|
||||
'x': lon,
|
||||
'y': lat,
|
||||
'units': 'Feet',
|
||||
'wkid': 4326,
|
||||
'includeDate': False
|
||||
}
|
||||
print(params)
|
||||
try:
|
||||
# The timeout prevents the request from hanging indefinitely
|
||||
r = requests.get(baseurl, params=params, timeout=5)
|
||||
# Raise an exception for bad status codes (4xx or 5xx)
|
||||
r.raise_for_status()
|
||||
|
||||
# Attempt to parse JSON and get the value
|
||||
data = r.json()
|
||||
elev = data.get('value')
|
||||
|
||||
# The API may return a string 'no-data' for some locations
|
||||
if isinstance(elev, str) and not elev.replace('.', '', 1).isdigit():
|
||||
return None
|
||||
|
||||
# Add a small delay to avoid hammering the API
|
||||
time.sleep(1)
|
||||
return float(elev)
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
# Handles connection errors, timeouts, bad status codes, etc.
|
||||
print(r.content)
|
||||
print(f"API Request failed: {e}")
|
||||
return None
|
||||
except JSONDecodeError:
|
||||
# Handles cases where the response is not valid JSON
|
||||
print(f"Failed to decode JSON from response. Response text: {r.text}")
|
||||
return None
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
cursor.execute("SELECT camid, lon, lat from cams WHERE elevation ISNULL or elevation = 0 and active = true")
|
||||
allcams = cursor.fetchall()
|
||||
|
||||
|
||||
|
||||
for cam in allcams:
|
||||
camid, lon, lat = cam
|
||||
elev = get_elev(lon,lat)
|
||||
val = (elev,camid)
|
||||
sql = "UPDATE cams SET elevation= %s WHERE camid = %s"
|
||||
cursor.execute(sql,val)
|
||||
conn.commit()
|
||||
|
||||
updates = ['UPDATE public.cams SET geom = ST_SetSRID(ST_MakePoint(lon, lat), 4326)',
|
||||
'UPDATE public.cams SET county = county.countyname from public.county WHERE ST_Contains(county.geom,cams.geom)',
|
||||
'UPDATE public.cams SET pzone = pzone.state_zone from public.pzone WHERE ST_Contains(pzone.geom,cams.geom)',
|
||||
'UPDATE public.cams SET fzone = fzone.state_zone from public.fzone WHERE ST_Contains(fzone.geom,cams.geom)',
|
||||
'UPDATE public.cams SET cwa = fzone.cwa from public.fzone WHERE ST_Contains(fzone.geom,cams.geom)',
|
||||
'UPDATE public.cams SET zonename = pzone.shortname from public.pzone WHERE ST_Contains(pzone.geom,cams.geom)',
|
||||
'UPDATE public.cams SET keephours = 240 WHERE keephours is null',
|
||||
"""UPDATE public.cams SET method = 'normal' WHERE method is null""",
|
||||
'UPDATE public.cams SET state = county.state from public.county WHERE ST_Contains(county.geom,cams.geom)',
|
||||
'UPDATE public.cams SET airport = EXISTS (SELECT 1 FROM public.airports WHERE ST_Intersects(cams.geom, airports.geom5)) WHERE airport IS NULL',
|
||||
"UPDATE cams SET active = false FROM cwa WHERE cwa.cwa = 'RLX' AND NOT ST_DWithin(cams.geom::geography, cwa.geom::geography, 10 * 1609.344) AND cams.url ~ 'oh.us' AND cams.active = true",]
|
||||
|
||||
for i in updates:
|
||||
cursor.execute(i)
|
||||
|
||||
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
ambient()
|
||||
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
260
ohgo.html
Normal file
260
ohgo.html
Normal file
@@ -0,0 +1,260 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>OHGO/WV511/goKY Tracker</title>
|
||||
<style>
|
||||
.tabulator, .tabulator-header, .tabulator-tableHolder {
|
||||
overflow: visible !important;
|
||||
}
|
||||
.switch {
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
width: 40px;
|
||||
height: 24px;
|
||||
}
|
||||
.switch input {
|
||||
opacity: 0;
|
||||
width: 0;
|
||||
height: 0;
|
||||
}
|
||||
.slider {
|
||||
position: absolute;
|
||||
cursor: pointer;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background-color: #ccc;
|
||||
-webkit-transition: .4s;
|
||||
transition: .4s;
|
||||
}
|
||||
.slider:before {
|
||||
position: absolute;
|
||||
content: "";
|
||||
height: 16px;
|
||||
width: 16px;
|
||||
left: 4px;
|
||||
bottom: 4px;
|
||||
background-color: white;
|
||||
-webkit-transition: .4s;
|
||||
transition: .4s;
|
||||
}
|
||||
input:checked + .slider {
|
||||
background-color: #2196F3;
|
||||
}
|
||||
input:focus + .slider {
|
||||
box-shadow: 0 0 1px #2196F3;
|
||||
}
|
||||
input:checked + .slider:before {
|
||||
-webkit-transform: translateX(16px);
|
||||
-ms-transform: translateX(16px);
|
||||
transform: translateX(16px);
|
||||
}
|
||||
.slider.round {
|
||||
border-radius: 24px;
|
||||
}
|
||||
.slider.round:before {
|
||||
border-radius: 50%;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.5.1/jquery.js" integrity="sha512-WNLxfP/8cVYL9sj8Jnp6et0BkubLP31jhTG9vhL/F5uEZmg5wEzKoXp1kJslzPQWwPT1eyMiSxlKCgzHLOTOTQ==" crossorigin="anonymous"></script>
|
||||
<link href="/tabulator/dist/css/tabulator_midnight.css" rel="stylesheet">
|
||||
<script type="text/javascript" src="https://unpkg.com/tabulator-tables@6.3.0/dist/js/tabulator.min.js"></script>
|
||||
<button id="refreshToggle" onclick="pauseData()">Data autorefreshes every 2 minutes, click here to pause (makes it easier)</button> <br>
|
||||
<a>Note that the start time will be the time the road was closed, not necessarily the time the issue started</a><br>
|
||||
<a>Double click any field to automatically copy to clipboard</a>
|
||||
<div id="controls">
|
||||
<label class="switch">
|
||||
<input type="checkbox" id="hidetoggle" onclick="hider()" checked>
|
||||
<span class="slider round"></span>
|
||||
</label>Toggle not showing/showing hidden reports
|
||||
</div>
|
||||
<input type="checkbox" id="cwa" name="cwa" value="RLX" onchange="filters()" checked>
|
||||
<label for="cwa">RLX only</label><br>
|
||||
<div id="wunderobs"></div>
|
||||
|
||||
<script>
|
||||
function googleMap(cell, formatterParams) {
|
||||
return "http://maps.google.com/maps?t=k&q=loc:" + cell.getData().lat + "+" + cell.getData().lon + "&basemap=satellite";
|
||||
}
|
||||
|
||||
var table = new Tabulator("#wunderobs", {
|
||||
virtualDom: true,
|
||||
virtualDomBuffer: 300,
|
||||
ajaxURL: "lsr.php?getCombinedTable=p",
|
||||
ajaxConfig: "GET",
|
||||
autoResize: true,
|
||||
initialSort: [{ column: "start", dir: "desc" }],
|
||||
columns: [
|
||||
{ title: "Id", field: "id", hozAlign: "center", visible: false },
|
||||
{ title: "Source", field: "source", hozAlign: "center", visible: false },
|
||||
{
|
||||
title: "LSR", field: "lsr",
|
||||
cellClick: function(e, cell) { cellClickCallback(e, cell); },
|
||||
formatter: "toggle",
|
||||
formatterParams: { size: 20, onValue: 'true', offValue: 'false', onColor: "green", offColor: "red", clickable: true }
|
||||
},
|
||||
{
|
||||
title: "Hide", field: "hide",
|
||||
cellClick: function(e, cell) { hideClickCallback(e, cell); },
|
||||
formatter: "toggle",
|
||||
formatterParams: { size: 20, onValue: 'true', offValue: 'false', onColor: "green", offColor: "red", clickable: true }
|
||||
},
|
||||
{ title: "Issue Start (Z)", field: "start" },
|
||||
// { title: "Last Update (Z)", field: "lastupdate" },
|
||||
{ title: "Lat", field: "lat" },
|
||||
{ title: "Lon", field: "lon" },
|
||||
{ title: "Category", field: "category" },
|
||||
{ title: "End (Z)", field: "endtime", formatter:function(cell, formatterParams, onRendered){
|
||||
const formattedDate = cell.getValue();
|
||||
if (!formattedDate) return "Ongoing";
|
||||
const inputDate = new Date(`${formattedDate}Z`);
|
||||
const now = new Date();
|
||||
const diffMs = now - inputDate;
|
||||
const diffMins = diffMs / (1000 * 60);
|
||||
|
||||
return diffMins < 30 ? "Ongoing" : formattedDate;
|
||||
return cell.getValue(); //return the contents of the cell;
|
||||
} },
|
||||
{ title: "County", field: "county" },
|
||||
{ title: "Location", field: "location", formatter: "link", formatterParams: { url: googleMap, target: "_blank" } },
|
||||
{ title: "Description", field: "description" }
|
||||
],
|
||||
cellDblClick: function(e, cell) {
|
||||
copyToClipboard(cell.getValue());
|
||||
},
|
||||
cellClick: function(e, cell) {
|
||||
if (e.ctrlKey) {
|
||||
copyToClipboard(cell.getValue());
|
||||
}
|
||||
},
|
||||
dataLoaded: function(data) {
|
||||
setTimeout(addManualDblClickListeners, 500);
|
||||
}
|
||||
});
|
||||
|
||||
function copyToClipboard(value) {
|
||||
if (value !== null && value !== undefined) {
|
||||
if (navigator.clipboard && navigator.clipboard.writeText) {
|
||||
navigator.clipboard.writeText(value.toString())
|
||||
.then(() => {
|
||||
// alert("Copied: " + value);
|
||||
})
|
||||
.catch(err => {
|
||||
fallbackCopy(value);
|
||||
});
|
||||
} else {
|
||||
fallbackCopy(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function fallbackCopy(text) {
|
||||
var textArea = document.createElement("textarea");
|
||||
textArea.value = text;
|
||||
document.body.appendChild(textArea);
|
||||
textArea.select();
|
||||
try {
|
||||
document.execCommand('copy');
|
||||
// alert("Copied (fallback): " + text);
|
||||
} catch (err) {
|
||||
// alert("Failed to copy text");
|
||||
}
|
||||
document.body.removeChild(textArea);
|
||||
}
|
||||
|
||||
function addManualDblClickListeners() {
|
||||
var cells = document.querySelectorAll(".tabulator-cell");
|
||||
cells.forEach(function(cell) {
|
||||
cell.addEventListener("dblclick", function(e) {
|
||||
copyToClipboard(cell.textContent.trim());
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function cellClickCallback(e, cell) {
|
||||
var row = cell.getRow();
|
||||
lsr(row.getData()['lsr'], row.getData()['id'],row.getData()['source'] );
|
||||
}
|
||||
|
||||
function lsr(lsr, id, source) {
|
||||
$.get({
|
||||
url: 'lsr.php?updater=true&lsr=' + lsr + "&id=" + id + "&table="+source,
|
||||
error: function(xhr, error) {
|
||||
alert('Unable to update, please refresh page');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function hideClickCallback(e, cell) {
|
||||
var row = cell.getRow();
|
||||
hide(row.getData()['hide'], row.getData()['id'], row.getData()['source']);
|
||||
}
|
||||
|
||||
function hide(hide, id, source) {
|
||||
$.get({
|
||||
url: 'lsr.php?updater=true&hide=' + hide + "&id=" + id + "&table="+source,
|
||||
error: function(xhr, error) {
|
||||
alert('Unable to update, please refresh page');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function reloadData() {
|
||||
var oldscrolly = window.scrollY;
|
||||
var oldscrollx = window.scrollX;
|
||||
table.replaceData()
|
||||
.then(function() {
|
||||
window.scroll(oldscrollx, oldscrolly);
|
||||
setTimeout(addManualDblClickListeners, 500);
|
||||
})
|
||||
.catch(function(error) {
|
||||
// Silent error handling
|
||||
});
|
||||
}
|
||||
|
||||
function hider() {
|
||||
var hideit = document.getElementById("hidetoggle");
|
||||
if (hideit.checked == true) {
|
||||
table.removeFilter("hide", "=", "false");
|
||||
} else {
|
||||
table.addFilter("hide", "=", "false");
|
||||
}
|
||||
}
|
||||
|
||||
function filters() {
|
||||
var y = document.getElementById("cwa").checked;
|
||||
if (y) {
|
||||
table.addFilter("cwa", "=", 'RLX');
|
||||
} else {
|
||||
table.removeFilter("cwa", "=", 'RLX');
|
||||
}
|
||||
}
|
||||
|
||||
var timeout;
|
||||
var isRefreshing = true;
|
||||
timeout = setInterval(reloadData, 120000);
|
||||
|
||||
function pauseData() {
|
||||
var button = document.getElementById("refreshToggle");
|
||||
if (isRefreshing) {
|
||||
clearInterval(timeout);
|
||||
button.textContent = "Resume Autorefresh";
|
||||
isRefreshing = false;
|
||||
} else {
|
||||
timeout = setInterval(reloadData, 120000);
|
||||
button.textContent = "Data autorefreshes every 2 minutes, click here to pause (makes it easier)";
|
||||
isRefreshing = true;
|
||||
}
|
||||
}
|
||||
|
||||
window.addEventListener("load", function() {
|
||||
setTimeout(addManualDblClickListeners, 1000);
|
||||
});
|
||||
filters()
|
||||
</script>
|
||||
This information is not provided as a direct service to the NWS
|
||||
</body>
|
||||
</html>
|
||||
26
ohgo.php
Normal file
26
ohgo.php
Normal file
@@ -0,0 +1,26 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
// Performing SQL query
|
||||
$query = "SELECT lat,lon,id,category,roadstatus,cwa,county,state,location,routename,description,lsr,date_trunc('minute', start) as start, date_trunc('minute', endtime) as endtime,date_trunc('minute', lastupdate) as lastupdate from ohgo where endtime is null or endtime > now() - interval '48 hours' order by start asc;";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Printing results in HTML
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Closing connection
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
|
||||
|
||||
|
||||
236
ohgo.py
Normal file
236
ohgo.py
Normal file
@@ -0,0 +1,236 @@
|
||||
import requests
|
||||
# import polyline # Not used in the provided snippet
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions # Not strictly necessary for this code, but fine to keep
|
||||
from datetime import datetime, timezone # timezone not used, but datetime is
|
||||
# from geojson import Point, Feature, FeatureCollection, dump # Not used
|
||||
# import pandas as pd # Not used
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||
|
||||
# --- Configuration ---
|
||||
DB_HOST = 'localhost'
|
||||
DB_NAME = 'nws'
|
||||
DB_USER = 'nws'
|
||||
DB_PASSWORD = 'nws' # Consider using environment variables or a secrets manager
|
||||
|
||||
# API endpoint (contains specific coordinates, ensure this is intended)
|
||||
OHGO_API_URL = "https://api.ohgo.com/roadmarkers/TrafficSpeedAndAlertMarkers?pointData={%22lowLongitude%22:-83.55941510027988,%22highLongitude%22:-80.38711285418613,%22lowLatitude%22:38.243663436655325,%22highLatitude%22:40.169517342197835,%22routeDirection%22:%22%22,%22routeName%22:%22%22}"
|
||||
|
||||
# Optional Proxies
|
||||
PROXIES = {"http": "http://nws:nws@localhost:9000",
|
||||
"https": "http://nws:nws@localhost:9000"} # Add https if needed
|
||||
|
||||
# Disable insecure request warnings (use cautiously)
|
||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||
|
||||
# --- Main Logic ---
|
||||
def fetch_ohgo_data(session, url):
|
||||
"""Fetches data from the OHGO API."""
|
||||
print(f"INFO: Fetching data from {url}...")
|
||||
try:
|
||||
response = session.get(url, timeout=30) # Add a timeout
|
||||
response.raise_for_status() # Raise an exception for bad status codes (4xx or 5xx)
|
||||
print("INFO: Data fetched successfully.")
|
||||
return response.json()
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"ERROR: Failed to fetch data from API: {e}")
|
||||
return None
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"ERROR: Failed to parse JSON response: {e}")
|
||||
print(f"Response text: {response.text[:500]}...") # Log part of the response
|
||||
return None
|
||||
|
||||
def prepare_data_for_upsert(ohgo_data):
|
||||
"""Processes API data and prepares it for database insertion."""
|
||||
ohgoclosures_for_upsert = []
|
||||
# Get timestamp ONCE for this batch, use Python datetime object
|
||||
current_timestamp_obj = datetime.utcnow()
|
||||
|
||||
if not ohgo_data or 'TrafficAlertMarkers' not in ohgo_data:
|
||||
print("WARNING: No 'TrafficAlertMarkers' found in API response.")
|
||||
return []
|
||||
|
||||
alerts = ohgo_data['TrafficAlertMarkers']
|
||||
print(f"INFO: Processing {len(alerts)} alerts from API.")
|
||||
|
||||
for j in alerts:
|
||||
category = j.get('Category')
|
||||
# Filter for relevant categories
|
||||
if category == "Flooding" or category == "Weather":
|
||||
# Prepare tuple with exactly 10 items for the efficient UPSERT SQL
|
||||
info_tuple = (
|
||||
j.get('Latitude'),
|
||||
j.get('Longitude'),
|
||||
j.get('Id'),
|
||||
j.get('Location'),
|
||||
j.get('Description'),
|
||||
category, # Use variable already fetched
|
||||
j.get('RoadStatus'),
|
||||
j.get('RouteName'),
|
||||
current_timestamp_obj, # Use datetime object for 'start'
|
||||
current_timestamp_obj # Use datetime object for 'lastupdate'
|
||||
)
|
||||
# Basic validation: check if critical fields like ID are present
|
||||
if info_tuple[2] is None:
|
||||
print(f"WARNING: Skipping record due to missing ID: {j}")
|
||||
continue
|
||||
if info_tuple[0] is None or info_tuple[1] is None:
|
||||
print(f"WARNING: Skipping record {info_tuple[2]} due to missing Lat/Lon: {j}")
|
||||
continue
|
||||
|
||||
ohgoclosures_for_upsert.append(info_tuple)
|
||||
|
||||
print(f"INFO: Prepared {len(ohgoclosures_for_upsert)} records for database.")
|
||||
return ohgoclosures_for_upsert
|
||||
|
||||
def update_database(conn, data_to_upsert):
|
||||
"""Performs batch upsert and subsequent updates in the database."""
|
||||
if not data_to_upsert:
|
||||
print("INFO: No data to update in the database.")
|
||||
return
|
||||
|
||||
# SQL for batch UPSERT using ON CONFLICT...DO UPDATE SET...EXCLUDED
|
||||
upsert_sql = """
|
||||
INSERT INTO ohgo (
|
||||
lat, lon, id, location, description, category,
|
||||
roadstatus, routename, start, lastupdate
|
||||
)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s) -- 10 placeholders
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
lat = EXCLUDED.lat,
|
||||
lon = EXCLUDED.lon,
|
||||
location = EXCLUDED.location,
|
||||
description = EXCLUDED.description,
|
||||
category = EXCLUDED.category,
|
||||
roadstatus = EXCLUDED.roadstatus,
|
||||
lastupdate = EXCLUDED.lastupdate;
|
||||
"""
|
||||
|
||||
# Use context managers for cursor and transaction handling
|
||||
try:
|
||||
with conn: # Start transaction
|
||||
with conn.cursor() as cursor:
|
||||
# --- 1. Batch Upsert ---
|
||||
print(f"INFO: Upserting {len(data_to_upsert)} records...")
|
||||
cursor.executemany(upsert_sql, data_to_upsert)
|
||||
# Note: rowcount might be unreliable with executemany depending on driver/DB version
|
||||
print(f"INFO: Upsert complete. {cursor.rowcount} rows affected (Note: count may be inaccurate).")
|
||||
|
||||
# --- 2. Post-Processing Updates ---
|
||||
print("INFO: Performing post-processing updates...")
|
||||
|
||||
# Update geometry (Requires spatial index on ohgo.geom for performance)
|
||||
print("INFO: Updating geometry...")
|
||||
cursor.execute("""
|
||||
UPDATE ohgo
|
||||
SET geom = ST_SetSRID(ST_MakePoint(lon, lat), 4326)
|
||||
WHERE lat IS NOT NULL AND lon IS NOT NULL AND geom IS NULL;
|
||||
""")
|
||||
print(f"INFO: Updated geometry for {cursor.rowcount} records.")
|
||||
|
||||
# Update endtime based on lastupdate
|
||||
print("INFO: Updating end times...")
|
||||
cursor.execute("""
|
||||
UPDATE ohgo
|
||||
SET endtime = NULL
|
||||
WHERE lastupdate > now() - interval '0.25 hours';
|
||||
""")
|
||||
print(f"INFO: Reset endtime for {cursor.rowcount} recent records.")
|
||||
cursor.execute("""
|
||||
UPDATE ohgo
|
||||
SET endtime = lastupdate
|
||||
WHERE endtime IS NULL AND lastupdate < now() - interval '1 hours';
|
||||
""")
|
||||
print(f"INFO: Set endtime for {cursor.rowcount} older records.")
|
||||
|
||||
# Update administrative boundaries (CRITICAL: Requires spatial indexes!)
|
||||
print("INFO: Updating County/CWA/State...")
|
||||
# Add WHERE geom IS NOT NULL for efficiency
|
||||
cursor.execute("""
|
||||
UPDATE ohgo o SET county = c.countyname
|
||||
FROM public.county c
|
||||
WHERE o.county IS NULL AND o.geom IS NOT NULL
|
||||
AND ST_Contains(c.geom, o.geom);
|
||||
""")
|
||||
print(f"INFO: Updated county for {cursor.rowcount} records.")
|
||||
cursor.execute("""
|
||||
UPDATE ohgo o SET cwa = f.cwa
|
||||
FROM public.fzone f
|
||||
WHERE o.cwa IS NULL AND o.geom IS NOT NULL
|
||||
AND ST_Contains(f.geom, o.geom);
|
||||
""")
|
||||
print(f"INFO: Updated cwa for {cursor.rowcount} records.")
|
||||
cursor.execute("""
|
||||
UPDATE ohgo o SET state = c.state
|
||||
FROM public.county c
|
||||
WHERE o.state IS NULL AND o.geom IS NOT NULL
|
||||
AND ST_Contains(c.geom, o.geom);
|
||||
""")
|
||||
print(f"INFO: Updated state for {cursor.rowcount} records.")
|
||||
|
||||
# Set default boolean flags (Combined)
|
||||
print("INFO: Setting default flags...")
|
||||
cursor.execute("""
|
||||
UPDATE ohgo
|
||||
SET lsr = false, hide = false
|
||||
WHERE lsr IS NULL;
|
||||
""")
|
||||
print(f"INFO: Set default flags for {cursor.rowcount} records.")
|
||||
|
||||
# Clean description (Combined)
|
||||
print("INFO: Cleaning descriptions...")
|
||||
cursor.execute("""
|
||||
UPDATE ohgo
|
||||
SET description = TRIM(REPLACE(REPLACE(description, 'Use alternate route.', ''), E'\n', ' '))
|
||||
WHERE description LIKE '%Use alternate route.%' OR description LIKE E'%\n%';
|
||||
""")
|
||||
print(f"INFO: Cleaned description for {cursor.rowcount} records.")
|
||||
|
||||
print("INFO: Database operations successful. Transaction committed.") # Commit happens automatically on exiting 'with conn:'
|
||||
|
||||
except psycopg2.Error as e:
|
||||
print(f"ERROR: Database error occurred: {e}")
|
||||
print("ERROR: Transaction automatically rolled back.")
|
||||
# Optionally re-raise the exception if calling code needs to handle it
|
||||
# raise
|
||||
except Exception as e:
|
||||
print(f"ERROR: An unexpected error occurred during database operations: {e}")
|
||||
# Transaction is still rolled back by 'with conn:' exiting on exception
|
||||
# raise
|
||||
|
||||
# --- Script Execution ---
|
||||
if __name__ == "__main__":
|
||||
conn = None # Initialize connection variable
|
||||
try:
|
||||
# Establish database connection
|
||||
print(f"INFO: Connecting to database '{DB_NAME}' on '{DB_HOST}'...")
|
||||
conn = psycopg2.connect(host=DB_HOST, database=DB_NAME, user=DB_USER, password=DB_PASSWORD)
|
||||
print("INFO: Database connection successful.")
|
||||
|
||||
# Create a requests session
|
||||
S = requests.Session()
|
||||
S.verify = False # Disables SSL verification - use with caution
|
||||
#S.proxies = PROXIES # Use the defined proxies
|
||||
|
||||
# Fetch data
|
||||
ohgo_json_data = fetch_ohgo_data(S, OHGO_API_URL)
|
||||
|
||||
if ohgo_json_data:
|
||||
# Prepare data
|
||||
prepared_data = prepare_data_for_upsert(ohgo_json_data)
|
||||
|
||||
# Update database
|
||||
update_database(conn, prepared_data)
|
||||
|
||||
except psycopg2.OperationalError as e:
|
||||
print(f"FATAL: Could not connect to database: {e}")
|
||||
except Exception as e:
|
||||
print(f"FATAL: An unexpected error occurred: {e}")
|
||||
finally:
|
||||
# Ensure the connection is closed
|
||||
if conn:
|
||||
conn.close()
|
||||
print("INFO: Database connection closed.")
|
||||
|
||||
print("INFO: Script finished.")
|
||||
387
one.php
Normal file
387
one.php
Normal file
@@ -0,0 +1,387 @@
|
||||
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
|
||||
<script src="js/jsani/lib/jquery-1.11.1.min.js"></script>
|
||||
<script src="js/jsani/lib/jquery-ui-1.11.2/jquery-ui.min.js"></script>
|
||||
<link href="js/jsani/lib/jquery-ui-1.11.2/jquery-ui.min.css" rel="stylesheet" type="text/css">
|
||||
<script src="js/jsani/jquery.jsani.min.js"></script>
|
||||
<link href="js/jsani/jquery.jsani.min.css" rel="stylesheet" type="text/css">
|
||||
|
||||
</head>
|
||||
|
||||
<style type="text/css">
|
||||
html, body {
|
||||
height: 100%;
|
||||
font-family: Arial, sans-serif;
|
||||
}
|
||||
|
||||
#caminfo {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
width: 100%;
|
||||
margin: 10px 0;
|
||||
box-shadow: 0 2px 8px rgba(0,0,0,0.1);
|
||||
border-radius: 8px;
|
||||
overflow: hidden;
|
||||
table-layout: fixed;
|
||||
}
|
||||
|
||||
table, th, td {
|
||||
border: 1px solid #ddd;
|
||||
padding: 6px 10px;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
th {
|
||||
background-color: #f2f2f2;
|
||||
font-weight: bold;
|
||||
color: #333;
|
||||
width: 25%;
|
||||
}
|
||||
|
||||
tr:nth-child(even) {
|
||||
background-color: #f9f9f9;
|
||||
}
|
||||
|
||||
tr:hover {
|
||||
background-color: #f5f5f5;
|
||||
}
|
||||
|
||||
a {
|
||||
color: #0066cc;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
</style>
|
||||
<body>
|
||||
|
||||
|
||||
<div style="display: flex; flex-direction: column; align-items: center; width: 100%;">
|
||||
<div style="width: 800px; margin-bottom: 20px;">
|
||||
<div id="caminfo">
|
||||
<table>
|
||||
<tbody id="camstats"></tbody>
|
||||
</table>
|
||||
<form onsubmit="return false;" method="post" name="gifit" style="margin-top: 10px;">
|
||||
<input type="submit" name="stopit" value="Stop Auto Refresh" onclick="stoptimer()"/>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div style="width: 800px; margin: auto;">
|
||||
<div id="animation_1"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="archive">
|
||||
<label>Current archive length is 10 days</label>
|
||||
<form onsubmit="return false;" method="post" name="archive">
|
||||
<label for="archive">Enter Ending Date Time For Archive Data (YYYY-MM-DD HH:MM) UTC</label>
|
||||
<input type="text" id="dtg" name="dtg">
|
||||
<br>
|
||||
<label for="frames">Enter Desired Number of Frames:</label>
|
||||
<input type="text" id="frames" name="frames" value="20" size="4">
|
||||
<input type="submit" name="submit" value="Submit" onclick="archiveshow()" />
|
||||
</form>
|
||||
<br>
|
||||
|
||||
<form onsubmit="return false;" method="post" name="gifit">
|
||||
<label for="gifit">Enter frame delay and last frame delay (ms) to make an animated gif from loaded frames (will take a couple seconds).</label>
|
||||
<br>
|
||||
|
||||
<label for="delay">Delay</label>
|
||||
<input type="text" id="delay" name="delay" value="20" size="6">
|
||||
<label for="delay">Last Frame Delay</label>
|
||||
<input type="text" id="lastdelay" name="lastdelay" value="200" size="6">
|
||||
<label for="maxh">Constrain Horizonal Size</label>
|
||||
<input type="text" id="maxh" name="maxh" value="500" size="6">
|
||||
<label for="maxv">Constrain Vertical Size</label>
|
||||
<input type="text" id="maxv" name="maxv" value="400" size="6">
|
||||
<label for="delay">Last Frame Delay</label>
|
||||
<input type="submit" name="gifit" value="Make GIF" onclick="makemp4()"/>
|
||||
</form>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
<?php
|
||||
|
||||
$camid = pg_escape_string($_GET['camid']);
|
||||
echo('var camid = ' . $_GET["camid"] . ';');
|
||||
if($_GET['dtg'] ?? null){
|
||||
$endtime = pg_escape_string($_GET['dtg']);
|
||||
}
|
||||
if($_GET['camimages'] ?? null){
|
||||
$camimages = $_GET['camimages'];
|
||||
echo('var camimages = ' . $_GET["camimages"] . ';');
|
||||
}
|
||||
if(!$_GET['camimages'] ?? null){
|
||||
echo('var camimages = ' . 20 . ';');
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
if($_GET['dtg'] ?? null){
|
||||
echo('var camtime = ' . json_encode($_GET["dtg"]). ';');
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
?>
|
||||
|
||||
externalcam = [];
|
||||
|
||||
|
||||
function archiveshow() {
|
||||
archive = document.getElementById("dtg").value;
|
||||
archive=archive.trim().replace(/ /g, '+');
|
||||
altframes = document.getElementById("frames").value;
|
||||
camimages = altframes;
|
||||
window.open("one.php?camid=" + camid + "&camimages=" + camimages + "&dtg=" + archive);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
function doTheInsert(label1, value1, label2, value2) {
|
||||
var newRow = document.createElement('tr');
|
||||
newRow.innerHTML = '<th>' + label1 + '</th><td>' + value1 + '</td><th>' + label2 + '</th><td>' + value2 + '</td>';
|
||||
document.getElementById('camstats').appendChild(newRow);
|
||||
}
|
||||
|
||||
function updatetable() {
|
||||
$.getJSON("single.php?camid=" + camid, function(caminfo){
|
||||
document.getElementById('camstats').innerHTML = '';
|
||||
|
||||
// Add header row
|
||||
var headerRow = document.createElement('tr');
|
||||
headerRow.innerHTML = '<th colspan="4" style="text-align: center; background-color: #e0e0e0; font-size: 1.2em;">Camera Information</th>';
|
||||
document.getElementById('camstats').appendChild(headerRow);
|
||||
|
||||
doTheInsert('State', caminfo[0]['state'], 'County', caminfo[0]['county']);
|
||||
doTheInsert('Public Zone', caminfo[0]['pzone'], 'CWA', caminfo[0]['cwa']);
|
||||
doTheInsert('Elevation', caminfo[0]['elevation'] + ' feet', 'Direct link to feed', '<a href="' + caminfo[0]['url'] + '" target="_blank">View Feed</a>');
|
||||
|
||||
// Helper function to format permission value
|
||||
function formatPermission(perm) {
|
||||
if (perm === 't') return 'Yes';
|
||||
if (perm === 'f') return 'No';
|
||||
return 'Unsure';
|
||||
}
|
||||
|
||||
if (caminfo[0]['source'] == null) {
|
||||
doTheInsert('Source for sharing', 'none', 'Permission to share', formatPermission(caminfo[0]['permission']));
|
||||
} else {
|
||||
doTheInsert('Source for sharing', '<a href="' + caminfo[0]['source'] + '" target="_blank">Source Link</a>', 'Permission to share', formatPermission(caminfo[0]['permission']));
|
||||
}
|
||||
|
||||
doTheInsert('Description', '<a href="' + googleMap(caminfo[0]['lat'], caminfo[0]['lon']) + '" target="_blank">' + caminfo[0]['description'] + '</a>', '', '');
|
||||
|
||||
// Add hydro status row
|
||||
// Handle cases where hydro might not be in the response
|
||||
var hydroValue = (caminfo[0].hasOwnProperty('hydro') && caminfo[0]['hydro'] !== null) ? caminfo[0]['hydro'] : false;
|
||||
// Convert to display string
|
||||
var hydroDisplay = hydroValue ? 'Yes' : 'No';
|
||||
// For passing to toggle function, we need to ensure it's in a consistent format
|
||||
var hydroPassValue = hydroValue;
|
||||
var hydroButton = '<button onclick="toggleHydro(\'' + camid + '\', ' + hydroPassValue + ')">Toggle Hydro Status</button>';
|
||||
doTheInsert('Hydro', hydroDisplay + ' ' + hydroButton, '', '');
|
||||
|
||||
// Add airport status row
|
||||
// Handle cases where airport might not be in the response
|
||||
var airportValue = (caminfo[0].hasOwnProperty('airport') && caminfo[0]['airport'] !== null) ? caminfo[0]['airport'] : false;
|
||||
// Convert to display string
|
||||
var airportDisplay = airportValue ? 'Yes' : 'No';
|
||||
// For passing to toggle function, we need to ensure it's in a consistent format
|
||||
var airportPassValue = airportValue;
|
||||
var airportButton = '<button onclick="toggleAirport(\'' + camid + '\', ' + airportPassValue + ')">Toggle Airport Status</button>';
|
||||
doTheInsert('Airport', airportDisplay + ' ' + airportButton, '', '');
|
||||
|
||||
document.title = 'Cam: ' + caminfo[0]['description'];
|
||||
});
|
||||
}
|
||||
|
||||
function googleMap(lat, lon){
|
||||
return "http://maps.google.com/maps?t=k&q=loc:" + lat + "+" + lon + "&basemap=satellite";
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
function getcam() {
|
||||
if (typeof camtime !== 'undefined') {
|
||||
var reqstring = "individualcam.php?camid=" + camid + '&camimages=' + camimages + '&dtg=' + camtime;
|
||||
|
||||
} else {
|
||||
var reqstring = "individualcam.php?camid=" + camid + '&camimages=' + camimages;
|
||||
|
||||
}
|
||||
|
||||
//$.getJSON("individualcam.php?camid=" + camid + '&camimages=' + camimages, function(data){
|
||||
$.getJSON(reqstring, function(data){
|
||||
var camlist = [];
|
||||
var filetimes = [];
|
||||
|
||||
for(var i in data){
|
||||
camlist.push(data[i].filepath);
|
||||
filetimes.push('<a href="./camdata/' + data[i].filepath + '">Pause Loop and click this to view image directly</a> ' + 'Image Retrieved: ' + data[i].dateutc + ' UTC');
|
||||
externalcam.push("./camdata/" + data[i].filepath);
|
||||
}
|
||||
camlist.reverse();
|
||||
filetimes.reverse();
|
||||
externalcam.reverse();
|
||||
var animation = $("#animation_1").jsani({
|
||||
baseDir: './camdata',
|
||||
imageSrcs: camlist,
|
||||
aniWidth: 800,
|
||||
aniHeight: 600,
|
||||
//initdwell: 200,
|
||||
frameLabels: filetimes,
|
||||
controls: ['framelabel', 'stopplay', 'firstframe', 'previous', 'next', 'lastframe', 'looprock', 'slow', 'fast'],
|
||||
last_frame_pause: 8,
|
||||
//first_frame_pause: 1,
|
||||
//frame_pause: '0:5, 3:6'
|
||||
});
|
||||
|
||||
|
||||
updatetable();
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
|
||||
getcam();
|
||||
|
||||
// window.setInterval('refresh()', 60000);
|
||||
const myInterval = setInterval('refresh()', 60000);
|
||||
function refresh() {
|
||||
window.location.reload();
|
||||
}
|
||||
|
||||
function stoptimer() {
|
||||
clearInterval(myInterval);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
function toggleHydro(camid, currentValue) {
|
||||
// Invert the boolean value
|
||||
var newValue = !currentValue;
|
||||
console.log('Toggling hydro for camid:', camid, 'from', currentValue, 'to', newValue);
|
||||
|
||||
// Send AJAX request to update the hydro value
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: 'update_field.php',
|
||||
data: {camid: camid, field: 'hydro', value: newValue},
|
||||
dataType: 'json',
|
||||
success: function(response) {
|
||||
console.log('Response:', response);
|
||||
if (response.success) {
|
||||
// Refresh the table to show updated value
|
||||
updatetable();
|
||||
} else {
|
||||
alert('Failed to update hydro status: ' + (response.message || 'Unknown error'));
|
||||
}
|
||||
},
|
||||
error: function(xhr, status, error) {
|
||||
console.log('AJAX Error:', status, error);
|
||||
alert('Error updating hydro status: ' + error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function toggleAirport(camid, currentValue) {
|
||||
// Invert the boolean value
|
||||
var newValue = !currentValue;
|
||||
console.log('Toggling airport for camid:', camid, 'from', currentValue, 'to', newValue);
|
||||
|
||||
// Send AJAX request to update the airport value
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: 'update_field.php',
|
||||
data: {camid: camid, field: 'airport', value: newValue},
|
||||
dataType: 'json',
|
||||
success: function(response) {
|
||||
console.log('Response:', response);
|
||||
if (response.success) {
|
||||
// Refresh the table to show updated value
|
||||
updatetable();
|
||||
} else {
|
||||
alert('Failed to update airport status: ' + (response.message || 'Unknown error'));
|
||||
}
|
||||
},
|
||||
error: function(xhr, status, error) {
|
||||
console.log('AJAX Error:', status, error);
|
||||
alert('Error updating airport status: ' + error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function makemp4() {
|
||||
var delay = document.getElementById("delay").value;
|
||||
var lastdelay = document.getElementById("lastdelay").value;
|
||||
var maxh = document.getElementById("maxh").value;
|
||||
var maxv = document.getElementById("maxv").value;
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: 'mp4.php',
|
||||
data: {data: externalcam, images: camimages, delay: delay, lastdelay: lastdelay, maxh: maxh, maxv: maxv},
|
||||
success: function(data) {
|
||||
var giffy = document.createElement('img');
|
||||
giffy.src = `data:image/gif;base64,${data}`;
|
||||
var outputImg = document.createElement('a');
|
||||
outputImg.innerHTML = "Click here or on the image to save gif <br>";
|
||||
outputImg.appendChild(giffy);
|
||||
outputImg.href = giffy.src;
|
||||
outputImg.download = "loof.gif";
|
||||
giffy.href = giffy.src;
|
||||
giffy.download = "loop.gif";
|
||||
outputImg.appendChild(giffy)
|
||||
document.body.appendChild(outputImg);
|
||||
//document.body.appendChild(giffy);
|
||||
//window.open('about:blank').document.body.appendChild(outputImg);
|
||||
//var OpenWindow = window.open('about:blank','_blank');
|
||||
//OpenWindow.document.body.appendChild(outputImg);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</script>
|
||||
|
||||
<div id='hidden' style="display: none;">
|
||||
</div>
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
255
one2.php
Normal file
255
one2.php
Normal file
@@ -0,0 +1,255 @@
|
||||
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
|
||||
<script src="js/jsani/lib/jquery-1.11.1.min.js"></script>
|
||||
<script src="js/jsani/lib/jquery-ui-1.11.2/jquery-ui.min.js"></script>
|
||||
<link href="js/jsani/lib/jquery-ui-1.11.2/jquery-ui.min.css" rel="stylesheet" type="text/css">
|
||||
<script src="js/jsani/jquery.jsani.min.js"></script>
|
||||
<link href="js/jsani/jquery.jsani.min.css" rel="stylesheet" type="text/css">
|
||||
|
||||
</head>
|
||||
|
||||
<style type="text/css">
|
||||
html, body {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
#caminfo {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
table,th,td {
|
||||
border: 1px solid black;
|
||||
padding-horizontal: 15px;
|
||||
|
||||
}
|
||||
|
||||
|
||||
</style>
|
||||
<body>
|
||||
|
||||
|
||||
<div style="width: auto; height: 150px; margin: auto;">
|
||||
|
||||
<div id="caminfo">
|
||||
<table>
|
||||
<tbody id="camstats"></tbody>
|
||||
</table>
|
||||
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div style="width: auto; margin: auto;">
|
||||
<div id="animation_1"></div>
|
||||
</div>
|
||||
|
||||
<div id="archive">
|
||||
<label>Current archive length is 15 days</label>
|
||||
<form onsubmit="return false;" method="post" name="archive">
|
||||
<label for="archive">Enter Ending Date Time For Archive Data (YYYY-MM-DD HH:MM) UTC</label>
|
||||
<input type="text" id="dtg" name="dtg">
|
||||
<input type="submit" name="submit" value="Submit" onclick="archiveshow()" />
|
||||
</form>
|
||||
<br>
|
||||
<br>
|
||||
|
||||
<form onsubmit="return false;" method="post" name="gifit">
|
||||
<label for="gifit">Enter frame delay and last frame dwell to make an animated gif from loaded frames.</label>
|
||||
<br>
|
||||
<label for="delay">Delay</label>
|
||||
<input type="text" id="delay" name="delay" value="20" size="6">
|
||||
<label for="delay">Last Frame Delay</label>
|
||||
<input type="text" id="lastdelay" name="lastdelay" value="200" size="6">
|
||||
<label for="maxh">Max Horizonal Size</label>
|
||||
<input type="text" id="maxh" name="maxh" value="500" size="6">
|
||||
<label for="maxv">Max Vertical Size</label>
|
||||
<input type="text" id="maxv" name="maxv" value="400" size="6">
|
||||
<label for="delay">Last Frame Delay</label>
|
||||
<input type="submit" name="gifit" value="Make GIF" onclick="makemp4()"/>
|
||||
</form>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
<?php
|
||||
|
||||
$camid = pg_escape_string($_GET['camid']);
|
||||
echo('var camid = ' . $_GET["camid"] . ';');
|
||||
if($_GET['dtg']){
|
||||
$endtime = pg_escape_string($_GET['dtg']);
|
||||
}
|
||||
if($_GET['camimages']){
|
||||
$camimages = $_GET['camimages'];
|
||||
echo('var camimages = ' . $_GET["camimages"] . ';');
|
||||
}
|
||||
if(!$_GET['camimages']){
|
||||
echo('var camimages = ' . 20 . ';');
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
if($_GET['dtg']){
|
||||
echo('var camtime = ' . json_encode($_GET["dtg"]). ';');
|
||||
}
|
||||
|
||||
|
||||
|
||||
?>
|
||||
|
||||
externalcam = [];
|
||||
|
||||
|
||||
function archiveshow() {
|
||||
archive = document.getElementById("dtg").value;
|
||||
archive=archive.trim().replace(/ /g, '+');
|
||||
window.open("one.php?camid=" + camid + "&camimages=" + camimages + "&dtg=" + archive);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
function doTheInsert(label1,value1,label2,value2) {
|
||||
newRow="<td>" + label1 + "</td><td>" + value1 + "</td><td>" + label2 + "</td><td>" + value2 + "</td>";
|
||||
document.getElementById('camstats').innerHTML += newRow;
|
||||
}
|
||||
|
||||
|
||||
|
||||
function updatetable() {
|
||||
$.getJSON("single.php?camid=" + camid, function(caminfo){
|
||||
document.getElementById('camstats').innerHTML = '';
|
||||
doTheInsert('State',caminfo[0]['state'],'County',caminfo[0]['county']);
|
||||
doTheInsert('Public Zone', caminfo[0]['pzone'], 'CWA', caminfo[0]['cwa']);
|
||||
doTheInsert('Elevation',caminfo[0]['elevation'] + ' feet', 'Direct link to feed (all may not be accessible from RLX)','<a href="' + caminfo[0]['url'] + '">' + 'link' + '</a>');
|
||||
doTheInsert('Description','<a href="' + googleMap(caminfo[0]['lat'],caminfo[0]['lon']) + '" target=”_blank">' + caminfo[0]['description'] + '</a>', '','');
|
||||
});
|
||||
}
|
||||
|
||||
function googleMap(lat, lon){
|
||||
return "http://maps.google.com/maps?t=k&q=loc:" + lat + "+" + lon + "&basemap=satellite";
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
function getcam() {
|
||||
if (typeof camtime !== 'undefined') {
|
||||
var reqstring = "individualcam.php?camid=" + camid + '&camimages=' + camimages + '&dtg=' + camtime;
|
||||
|
||||
} else {
|
||||
var reqstring = "individualcam.php?camid=" + camid + '&camimages=' + camimages;
|
||||
|
||||
}
|
||||
|
||||
//$.getJSON("individualcam.php?camid=" + camid + '&camimages=' + camimages, function(data){
|
||||
$.getJSON(reqstring, function(data){
|
||||
var camlist = [];
|
||||
var filetimes = [];
|
||||
|
||||
for(var i in data){
|
||||
camlist.push(data[i].filepath);
|
||||
filetimes.push('<a href="./camdata/' + data[i].filepath + '">Pause Loop and click this to view image directly</a> ' + 'Image Retrieved: ' + data[i].dateutc + ' UTC');
|
||||
externalcam.push("./camdata/" + data[i].filepath);
|
||||
}
|
||||
camlist.reverse();
|
||||
filetimes.reverse();
|
||||
externalcam.reverse();
|
||||
var animation = $("#animation_1").jsani({
|
||||
baseDir: './camdata',
|
||||
imageSrcs: camlist,
|
||||
aniWidth: 800,
|
||||
aniHeight: 600,
|
||||
//initdwell: 200,
|
||||
frameLabels: filetimes,
|
||||
controls: ['framelabel', 'stopplay', 'firstframe', 'previous', 'next', 'lastframe', 'looprock', 'slow', 'fast', 'zoom'],
|
||||
last_frame_pause: 8,
|
||||
//first_frame_pause: 1,
|
||||
//frame_pause: '0:5, 3:6'
|
||||
});
|
||||
|
||||
|
||||
updatetable();
|
||||
});
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
getcam();
|
||||
|
||||
window.setInterval('refresh()', 60000);
|
||||
|
||||
function refresh() {
|
||||
window.location.reload();
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
function makemp4() {
|
||||
var delay = document.getElementById("delay").value;
|
||||
var lastdelay = document.getElementById("lastdelay").value;
|
||||
var maxh = document.getElementById("maxh").value;
|
||||
var maxv = document.getElementById("maxv").value;
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: 'mp4.php',
|
||||
data: {data: externalcam, images: camimages, delay: delay, lastdelay: lastdelay, maxh: maxh, maxv: maxv},
|
||||
success: function(data) {
|
||||
//var outputImg = document.createElement('img');
|
||||
//var giffy = document.querySelector(`data:image/gif;base64,${data}`);
|
||||
var giffy = document.createElement('img');
|
||||
giffy.src = `data:image/gif;base64,${data}`;
|
||||
var outputImg = document.createElement('a');
|
||||
outputImg.innerHTML = "Click to save image <br>";
|
||||
outputImg.appendChild(giffy);
|
||||
outputImg.href = giffy.src;
|
||||
outputImg.download = "loof.gif";
|
||||
giffy.href = giffy.src;
|
||||
giffy.download = "loop.gif";
|
||||
outputImg.appendChild(giffy)
|
||||
document.body.appendChild(outputImg);
|
||||
//document.body.appendChild(giffy);
|
||||
//window.open('about:blank').document.body.appendChild(outputImg);
|
||||
//var OpenWindow = window.open('about:blank','_blank');
|
||||
//OpenWindow.document.body.appendChild(outputImg);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</script>
|
||||
|
||||
<div id='hidden' style="display: none;">
|
||||
</div>
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
59
onedb.html
Normal file
59
onedb.html
Normal file
@@ -0,0 +1,59 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Wunderground obs</title>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.5.1/jquery.js" integrity="sha512-WNLxfP/8cVYL9sj8Jnp6et0BkubLP31jhTG9vhL/F5uEZmg5wEzKoXp1kJslzPQWwPT1eyMiSxlKCgzHLOTOTQ==" crossorigin="anonymous"></script>
|
||||
<link href="https://unpkg.com/tabulator-tables@4.7.2/dist/css/tabulator.min.css" rel="stylesheet">
|
||||
<script type="text/javascript" src="https://unpkg.com/tabulator-tables@4.7.2/dist/js/tabulator.min.js"></script>
|
||||
<button onclick="reloadData()">Data autorefreshes every 5 minutes, click to refresh now</button>
|
||||
<a>QPE 00L is direct from WU, the other QPE are derived and may be off if time settings on the individual PWS are incorrect</a>
|
||||
<input type="checkbox" id="cwa" name="cwa" value="RLX" onchange="filters()">
|
||||
<label for="cwa">RLX only</label><br>
|
||||
<div id="wunderobs"></div>
|
||||
|
||||
<script>
|
||||
|
||||
function googleMap(cell, formatterParams){
|
||||
return "http://maps.google.com/maps?t=k&q=loc:" + cell.getData().lat + "+" + cell.getData().lon + "&basemap=satellite";
|
||||
}
|
||||
|
||||
|
||||
function reloadData() {
|
||||
table.replaceData("dbone.php");
|
||||
}
|
||||
|
||||
var table = new Tabulator("#wunderobs", {
|
||||
responsiveLayout:true,
|
||||
tooltipsHeader:true,
|
||||
columns:[
|
||||
{title:"Station", field:"sitename"},
|
||||
{title:"T", field:"tempf"},
|
||||
{title:"QPE 00L", field:"preciptotal",formatter:"money",headerTooltip:"Since Midnight"},
|
||||
{title:"Winddir", field:"winddir"},
|
||||
{title:"Speed", field:"windspd",headerTooltip:"Mph"},
|
||||
{title:"Gust", field:"windgust",headerTooltip:"Mph"}
|
||||
|
||||
],
|
||||
});
|
||||
|
||||
table.setData("oneraindb.php");
|
||||
|
||||
function filters() {
|
||||
var y = document.getElementById("cwa").checked;
|
||||
if (y) {
|
||||
table.addFilter("cwa", "=", 'RLX');
|
||||
}
|
||||
if (!y) {
|
||||
table.removeFilter("cwa", "=", 'RLX');
|
||||
}
|
||||
}
|
||||
// {title:"24hr QPE", field:"rain24", formatterParms:{precision:2}},
|
||||
var timeout = setInterval(reloadData, 300000);
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
265
onenew.php
Normal file
265
onenew.php
Normal file
@@ -0,0 +1,265 @@
|
||||
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
|
||||
<script src="js/jsani/lib/jquery-1.11.1.min.js"></script>
|
||||
<script src="js/jsani/lib/jquery-ui-1.11.2/jquery-ui.min.js"></script>
|
||||
<link href="js/jsani/lib/jquery-ui-1.11.2/jquery-ui.min.css" rel="stylesheet" type="text/css">
|
||||
<script src="js/jsani/jquery.jsani.min.js"></script>
|
||||
<link href="js/jsani/jquery.jsani.min.css" rel="stylesheet" type="text/css">
|
||||
|
||||
</head>
|
||||
|
||||
<style type="text/css">
|
||||
html, body {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
#caminfo {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
table,th,td {
|
||||
border: 1px solid black;
|
||||
padding-horizontal: 15px;
|
||||
|
||||
}
|
||||
|
||||
|
||||
</style>
|
||||
<body>
|
||||
|
||||
|
||||
<div style="width: auto; height: 150px; margin: auto;">
|
||||
|
||||
<div id="caminfo">
|
||||
<table>
|
||||
<tbody id="camstats"></tbody>
|
||||
<form onsubmit="return false;" method="post" name="gifit">
|
||||
<input type="submit" name="stopit" value="Stop Auto Refresh" onclick="stoptimer()"/>
|
||||
</form>
|
||||
</table>
|
||||
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div style="width: auto; margin: auto;">
|
||||
<div id="animation_1"></div>
|
||||
</div>
|
||||
|
||||
<div id="archive">
|
||||
<label>Current archive length is 15 days</label>
|
||||
<form onsubmit="return false;" method="post" name="archive">
|
||||
<label for="archive">Enter Ending Date Time For Archive Data (YYYY-MM-DD HH:MM) UTC</label>
|
||||
<input type="text" id="dtg" name="dtg">
|
||||
<input type="submit" name="submit" value="Submit" onclick="archiveshow()" />
|
||||
</form>
|
||||
<br>
|
||||
|
||||
<form onsubmit="return false;" method="post" name="gifit">
|
||||
<label for="gifit">Enter frame delay and last frame delay (ms) to make an animated gif from loaded frames (will take a couple seconds).</label>
|
||||
<br>
|
||||
<label for="delay">Delay</label>
|
||||
<input type="text" id="delay" name="delay" value="20" size="6">
|
||||
<label for="delay">Last Frame Delay</label>
|
||||
<input type="text" id="lastdelay" name="lastdelay" value="200" size="6">
|
||||
<label for="maxh">Constrain Horizonal Size</label>
|
||||
<input type="text" id="maxh" name="maxh" value="500" size="6">
|
||||
<label for="maxv">Constrain Vertical Size</label>
|
||||
<input type="text" id="maxv" name="maxv" value="400" size="6">
|
||||
<label for="delay">Last Frame Delay</label>
|
||||
<input type="submit" name="gifit" value="Make GIF" onclick="makemp4()"/>
|
||||
</form>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
<?php
|
||||
|
||||
$camid = pg_escape_string($_GET['camid']);
|
||||
echo('var camid = ' . $_GET["camid"] . ';');
|
||||
if($_GET['dtg'] ?? null){
|
||||
$endtime = pg_escape_string($_GET['dtg']);
|
||||
}
|
||||
if($_GET['camimages'] ?? null){
|
||||
$camimages = $_GET['camimages'];
|
||||
echo('var camimages = ' . $_GET["camimages"] . ';');
|
||||
}
|
||||
if(!$_GET['camimages'] ?? null){
|
||||
echo('var camimages = ' . 20 . ';');
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
if($_GET['dtg'] ?? null){
|
||||
echo('var camtime = ' . json_encode($_GET["dtg"]). ';');
|
||||
}
|
||||
|
||||
|
||||
|
||||
?>
|
||||
|
||||
externalcam = [];
|
||||
|
||||
|
||||
function archiveshow() {
|
||||
archive = document.getElementById("dtg").value;
|
||||
archive=archive.trim().replace(/ /g, '+');
|
||||
window.open("one.php?camid=" + camid + "&camimages=" + camimages + "&dtg=" + archive);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
function doTheInsert(label1,value1,label2,value2) {
|
||||
newRow="<td>" + label1 + "</td><td>" + value1 + "</td><td>" + label2 + "</td><td>" + value2 + "</td>";
|
||||
document.getElementById('camstats').innerHTML += newRow;
|
||||
}
|
||||
|
||||
|
||||
|
||||
function updatetable() {
|
||||
$.getJSON("single.php?camid=" + camid, function(caminfo){
|
||||
document.getElementById('camstats').innerHTML = '';
|
||||
doTheInsert('State',caminfo[0]['state'],'County',caminfo[0]['county']);
|
||||
doTheInsert('Public Zone', caminfo[0]['pzone'], 'CWA', caminfo[0]['cwa']);
|
||||
doTheInsert('Elevation',caminfo[0]['elevation'] + ' feet', 'Direct link to feed (all may not be accessible from RLX)','<a href="' + caminfo[0]['url'] + '">' + 'link' + '</a>');
|
||||
if (caminfo[0]['source'] == null) {
|
||||
doTheInsert('Source for sharing','none', 'Permission to share (if null, use judgement)',caminfo[0]['permission']);
|
||||
}
|
||||
if (caminfo[0]['source'] != null) {
|
||||
doTheInsert('Source for sharing','<a href="' + caminfo[0]['source'] + '" target=”_blank">' + 'link' + '</a>', 'Permission to share (if null, use judgement)',caminfo[0]['permission']);
|
||||
}
|
||||
|
||||
|
||||
doTheInsert('Description','<a href="' + googleMap(caminfo[0]['lat'],caminfo[0]['lon']) + '" target=”_blank">' + caminfo[0]['description'] + '</a>', '','');
|
||||
document.title = 'Cam: ' + caminfo[0]['description'];
|
||||
});
|
||||
}
|
||||
|
||||
function googleMap(lat, lon){
|
||||
return "http://maps.google.com/maps?t=k&q=loc:" + lat + "+" + lon + "&basemap=satellite";
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
function getcam() {
|
||||
if (typeof camtime !== 'undefined') {
|
||||
var reqstring = "individualcam.php?camid=" + camid + '&camimages=' + camimages + '&dtg=' + camtime;
|
||||
|
||||
} else {
|
||||
var reqstring = "individualcam.php?camid=" + camid + '&camimages=' + camimages;
|
||||
|
||||
}
|
||||
|
||||
//$.getJSON("individualcam.php?camid=" + camid + '&camimages=' + camimages, function(data){
|
||||
$.getJSON(reqstring, function(data){
|
||||
var camlist = [];
|
||||
var filetimes = [];
|
||||
|
||||
for(var i in data){
|
||||
camlist.push(data[i].filepath);
|
||||
filetimes.push('<a href="./camdata/' + data[i].filepath + '">Pause Loop and click this to view image directly</a> ' + 'Image Retrieved: ' + data[i].dateutc + ' UTC');
|
||||
externalcam.push("./camdata/" + data[i].filepath);
|
||||
}
|
||||
camlist.reverse();
|
||||
filetimes.reverse();
|
||||
externalcam.reverse();
|
||||
var animation = $("#animation_1").jsani({
|
||||
baseDir: './camdata',
|
||||
imageSrcs: camlist,
|
||||
aniWidth: 800,
|
||||
aniHeight: 600,
|
||||
//initdwell: 200,
|
||||
frameLabels: filetimes,
|
||||
controls: ['framelabel', 'stopplay', 'firstframe', 'previous', 'next', 'lastframe', 'looprock', 'slow', 'fast', 'zoom'],
|
||||
last_frame_pause: 8,
|
||||
//first_frame_pause: 1,
|
||||
//frame_pause: '0:5, 3:6'
|
||||
});
|
||||
|
||||
|
||||
updatetable();
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
|
||||
getcam();
|
||||
|
||||
// window.setInterval('refresh()', 60000);
|
||||
const myInterval = setInterval('refresh()', 60000);
|
||||
function refresh() {
|
||||
window.location.reload();
|
||||
}
|
||||
|
||||
function stoptimer() {
|
||||
clearInterval(myInterval);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
function makemp4() {
|
||||
var delay = document.getElementById("delay").value;
|
||||
var lastdelay = document.getElementById("lastdelay").value;
|
||||
var maxh = document.getElementById("maxh").value;
|
||||
var maxv = document.getElementById("maxv").value;
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: 'mp4.php',
|
||||
data: {data: externalcam, images: camimages, delay: delay, lastdelay: lastdelay, maxh: maxh, maxv: maxv},
|
||||
success: function(data) {
|
||||
var giffy = document.createElement('img');
|
||||
giffy.src = `data:image/gif;base64,${data}`;
|
||||
var outputImg = document.createElement('a');
|
||||
outputImg.innerHTML = "Click here or on the image to save gif <br>";
|
||||
outputImg.appendChild(giffy);
|
||||
outputImg.href = giffy.src;
|
||||
outputImg.download = "loof.gif";
|
||||
giffy.href = giffy.src;
|
||||
giffy.download = "loop.gif";
|
||||
outputImg.appendChild(giffy)
|
||||
document.body.appendChild(outputImg);
|
||||
//document.body.appendChild(giffy);
|
||||
//window.open('about:blank').document.body.appendChild(outputImg);
|
||||
//var OpenWindow = window.open('about:blank','_blank');
|
||||
//OpenWindow.document.body.appendChild(outputImg);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</script>
|
||||
|
||||
<div id='hidden' style="display: none;">
|
||||
</div>
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
152
onerain.py
Normal file
152
onerain.py
Normal file
@@ -0,0 +1,152 @@
|
||||
import time
|
||||
import requests
|
||||
import json
|
||||
import geojson
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from psycopg2.extras import Json
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
|
||||
#one rain sensor database with last obs and metadata
|
||||
#one rain sensor database with all obs
|
||||
|
||||
#metsensor = [30,53,11,10,50,44,40,41]
|
||||
metsensor = [(3,'tempf'),(2,'raintotal'),(8,'humidity'),(7,'winddir'),(4,'windspd'),(5,'windgust')]
|
||||
rainsensor = [(2,'raintotal')]
|
||||
|
||||
|
||||
features = []
|
||||
alldata = []
|
||||
S = requests.Session()
|
||||
|
||||
cursor.execute("SELECT siteid, sitetype from onerainsites")
|
||||
#WHERE (Active = True) and ((EXTRACT(EPOCH FROM (current_timestamp - lastob ))/60) > 8 or (lastob ISNULL))"
|
||||
allonerainsites = cursor.fetchall()
|
||||
|
||||
|
||||
|
||||
def getData(siteid,sensorid):
|
||||
apiget = 'https://wvdhsem.onerain.com/export/flot/?method=sensor&site_id=' + str(siteid) + '&device_id=' + str(sensorid)
|
||||
#print(apiget)
|
||||
dataresponse = json.loads(S.get(apiget).text)
|
||||
#print(dataresponse)
|
||||
|
||||
|
||||
return dataresponse
|
||||
|
||||
|
||||
|
||||
|
||||
# sql = 'INSERT INTO wuobs (stationid, dateutc, winddir, windspd, windgust, tempf, dewpt, humidity, pressure, preciptotal) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) ON CONFLICT (stationid,dateutc) DO NOTHING'
|
||||
# sql2 = 'UPDATE wusites SET lastob = %s, winddir = %s, windspd = %s, windgust= %s, tempf = %s, dewpt = %s, humidity = %s, pressure = %s, preciptotal = %s WHERE stationid = %s'
|
||||
# cursor.execute(sql,ob)
|
||||
# cursor.execute(sql2,ob2)
|
||||
# conn.commit()
|
||||
# setDerived(stationid)
|
||||
|
||||
def setDerived(stationid):
|
||||
data1 = (stationid,)
|
||||
data = (stationid,stationid)
|
||||
ago24 = "UPDATE wusites set ago24 = subquery.preciptotal, ago24time = subquery.dateutc from (select preciptotal,stationid,dateutc from wuobs WHERE (dateutc < timezone('utc',current_timestamp) - interval '23 hours') and (dateutc > timezone('utc',current_timestamp) - interval '24 hours') and stationid = %s order by dateutc desc limit 1) as subquery where wusites.stationid = %s"
|
||||
ago3 = "UPDATE wusites set ago3 = subquery.preciptotal, ago3time = subquery.dateutc from (select preciptotal,stationid,dateutc from wuobs WHERE (dateutc < timezone('utc',current_timestamp) - interval '2.5 hours') and (dateutc > timezone('utc',current_timestamp) - interval '3 hours') and stationid = %s order by dateutc desc limit 1) as subquery where wusites.stationid = %s"
|
||||
ago6 = "UPDATE wusites set ago6 = subquery.preciptotal, ago6time = subquery.dateutc from (select preciptotal,stationid,dateutc from wuobs WHERE (dateutc < timezone('utc',current_timestamp) - interval '5.2 hours') and (dateutc > timezone('utc',current_timestamp) - interval '6 hours') and stationid = %s order by dateutc desc limit 1) as subquery where wusites.stationid = %s"
|
||||
midnight = "UPDATE wusites set lastmidnight = subquery.preciptotal from (select preciptotal,stationid from wuobs WHERE (dateutc < timezone('UTC', current_date::timestamp at time zone 'US/Eastern')) and (dateutc > timezone('UTC', current_date::timestamp at time zone 'US/Eastern' - interval '40 minutes')) and stationid = %s order by dateutc desc limit 1) as subquery where wusites.stationid = %s"
|
||||
windmax = "UPDATE wusites set windmax = subquery.windmax from (SELECT MAX(windgust) as windmax FROM wuobs where (dateutc > timezone('utc',current_timestamp) - interval '24 hours') and stationid = %s) as subquery where wusites.stationid = %s"
|
||||
maxt = "UPDATE wusites set maxt = subquery.maxt from (SELECT MAX(tempf) as maxt FROM wuobs where (dateutc > timezone('utc',current_timestamp) - interval '24 hours') and stationid = %s) as subquery where wusites.stationid = %s"
|
||||
mint = "UPDATE wusites set mint = subquery.mint from (SELECT MIN(tempf) as mint FROM wuobs where (dateutc > timezone('utc',current_timestamp) - interval '24 hours') and stationid = %s) as subquery where wusites.stationid = %s"
|
||||
|
||||
|
||||
|
||||
cursor.execute(ago24,data)
|
||||
cursor.execute(ago6,data)
|
||||
cursor.execute(ago3,data)
|
||||
cursor.execute(windmax,data)
|
||||
cursor.execute(midnight,data)
|
||||
cursor.execute(maxt,data)
|
||||
cursor.execute(mint,data)
|
||||
conn.commit()
|
||||
rain3 = "update wusites set rain3 = (case when (wusites.ago3time < timezone('UTC', current_date::timestamp at time zone 'US/Eastern') and wusites.stationid = %s) then (wusites.preciptotal + wusites.lastmidnight - wusites.ago3) when (wusites.ago3time > timezone('UTC', current_date::timestamp at time zone 'US/Eastern') and wusites.stationid = %s) then (wusites.preciptotal - wusites.ago3) end) where wusites.stationid = %s"
|
||||
rain6 = "update wusites set rain6 = (case when (wusites.ago6time < timezone('UTC', current_date::timestamp at time zone 'US/Eastern') and wusites.stationid = %s) then (wusites.preciptotal + wusites.lastmidnight - wusites.ago6) when (wusites.ago6time > timezone('UTC', current_date::timestamp at time zone 'US/Eastern') and wusites.stationid = %s) then (wusites.preciptotal - wusites.ago6) end) where wusites.stationid = %s"
|
||||
rain24 = "update wusites set rain24 = (wusites.preciptotal + wusites.lastmidnight - wusites.ago24) where wusites.stationid = %s"
|
||||
data2 = (stationid,stationid,stationid)
|
||||
cursor.execute(rain3,data2)
|
||||
cursor.execute(rain6,data2)
|
||||
cursor.execute(rain24,data1)
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
for i in allonerainsites:
|
||||
if i[1] == 'MET':
|
||||
for j in metsensor:
|
||||
|
||||
tempdata = getData(i[0],j[0])
|
||||
for p in tempdata['data']:
|
||||
datafield = p[1]
|
||||
datefield = int(p[0])/1000
|
||||
ob = (i[0],datefield,int(j[0]),datafield)
|
||||
sql = 'INSERT INTO onerainobs (siteid, dateutc, sensorid, data1) VALUES (%s,to_timestamp(%s),%s,%s) ON CONFLICT (siteid,sensorid,dateutc,data1) DO NOTHING'
|
||||
cursor.execute(sql,ob)
|
||||
conn.commit()
|
||||
if i[1] == 'Rain':
|
||||
for j in rainsensor:
|
||||
tempdata = getData(i[0],j[0])
|
||||
for p in tempdata['data']:
|
||||
datafield = p[1]
|
||||
datefield = int(p[0])/1000
|
||||
ob = (i[0],datefield,int(j[0]),datafield)
|
||||
sql = 'INSERT INTO onerainobs (siteid, dateutc, sensorid, data1) VALUES (%s,to_timestamp(%s),%s,%s) ON CONFLICT (siteid,sensorid,dateutc,data1) DO NOTHING'
|
||||
cursor.execute(sql,ob)
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
for i in allonerainsites:
|
||||
if i[1] == 'MET':
|
||||
for j in metsensor:
|
||||
sql = 'SELECT data1,dateutc from onerainobs where siteid = %s and sensorid = %s order by dateutc desc limit 1'
|
||||
getob = (i[0],j[0])
|
||||
cursor.execute(sql,getob)
|
||||
#WHERE (Active = True) and ((EXTRACT(EPOCH FROM (current_timestamp - lastob ))/60) > 8 or (lastob ISNULL))"
|
||||
obdata = cursor.fetchall()
|
||||
|
||||
if len(obdata) == 1:
|
||||
for l in obdata:
|
||||
|
||||
|
||||
sensor = str(j[1])
|
||||
sensortime = str(j[1])+'time'
|
||||
sql = 'UPDATE onerainsites set ' + sensor + ' = %s, ' + sensortime + ' = %s where siteid = %s'
|
||||
updateob = (l[0],l[1],i[0])
|
||||
|
||||
|
||||
|
||||
cursor.execute(sql,updateob)
|
||||
conn.commit()
|
||||
|
||||
if i[1] == 'Rain':
|
||||
|
||||
for j in rainsensor:
|
||||
sql = 'SELECT data1,dateutc from onerainobs where siteid = %s and sensorid = %s order by dateutc desc limit 1'
|
||||
getob = (i[0],j[0])
|
||||
cursor.execute(sql,getob)
|
||||
#WHERE (Active = True) and ((EXTRACT(EPOCH FROM (current_timestamp - lastob ))/60) > 8 or (lastob ISNULL))"
|
||||
obdata = cursor.fetchall()
|
||||
|
||||
if len(obdata) == 1:
|
||||
for l in obdata:
|
||||
|
||||
|
||||
sensor = str(j[1])
|
||||
sensortime = str(j[1])+'time'
|
||||
sql = 'UPDATE onerainsites set ' + sensor + ' = %s, ' + sensortime + ' = %s where siteid = %s'
|
||||
updateob = (l[0],l[1],i[0])
|
||||
|
||||
|
||||
|
||||
cursor.execute(sql,updateob)
|
||||
conn.commit()
|
||||
26
oneraindb.php
Normal file
26
oneraindb.php
Normal file
@@ -0,0 +1,26 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
// Performing SQL query
|
||||
$query = "SELECT sitename, lat, lon, tempf, humidity,raintotal,winddir,windspd,windgust,tempftime, humiditytime,raintotaltime,winddirtime,windspdtime,windgusttime FROM onerainsites where sitetype = 'MET';";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Printing results in HTML
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Closing connection
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
|
||||
|
||||
|
||||
1103
outage.html
Normal file
1103
outage.html
Normal file
File diff suppressed because it is too large
Load Diff
1103
outagemap.html
Normal file
1103
outagemap.html
Normal file
File diff suppressed because it is too large
Load Diff
24
power.php
Normal file
24
power.php
Normal file
@@ -0,0 +1,24 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
// Performing SQL query
|
||||
//$query = "SELECT distinct on (camid) camid, filepath FROM camdb order by camid,dateutc desc";
|
||||
$query = "SELECT lat,lon,outagen FROM power WHERE active = true and cwa = 'RLX'";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Printing results in HTML
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Closing connection
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
555
power2.py
Normal file
555
power2.py
Normal file
@@ -0,0 +1,555 @@
|
||||
import requests
|
||||
import polyline
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from datetime import datetime, timezone
|
||||
from geojson import Point, Feature, FeatureCollection, dump
|
||||
import pandas as pd
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
proxies = {"http":"http://nws:nws@localhost:9000"}
|
||||
|
||||
aepwvnew = ['0320001','0320003','0320010','0320011','0320012','0320013','0320021','0320030','0320031','0320100','0320102','0320120']
|
||||
aepohnew = ['0320013','0320010','0320011','0320012','0320003','0320001','0302322','0302233','0302232','0302223','0320102','0320100']
|
||||
aepkynew = ['0320031','0320030','0320021','0320013','0320012','0320011','0320010','0320003','0320001']
|
||||
firstenergy = ['030223','030232','032001','032003','032010','032012']
|
||||
dominionva = ['0320121','0320120','0300103','0320102','0320101','0320100','0320031','0320013','0320011']
|
||||
baltimore = ['0320011','0320100','0320101','0320013','0320102','0320103']
|
||||
pepco = ['03201002','03201003','03201020','03201021']
|
||||
|
||||
|
||||
aepohmeta = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
aepwvmeta = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
aepwvkubra = "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false"
|
||||
aepohkubra = 'https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false'
|
||||
aepkykubra = 'https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false'
|
||||
#firstpowerwvmeta = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/pa/interval_generation_data/metadata.json"
|
||||
aepkymeta = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json'
|
||||
domvameta = 'https://outagemap.dominionenergy.com/resources/data/external/interval_generation_data/metadata.json'
|
||||
wvfemeta = 'https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false'
|
||||
|
||||
aepwvcluster = 'cluster-2'
|
||||
aepohcluster = 'cluster-1'
|
||||
aepkycluster = 'cluster-2'
|
||||
aepwvbase = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
aepbasewv = 'https://kubra.io/cluster-data/'
|
||||
aepohbase = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
#firstpowerwvbase = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/"
|
||||
|
||||
aepkybase = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/'
|
||||
domvabase = 'https://outagemap.dominionenergy.com/resources/data/external/interval_generation_data/'
|
||||
graysonrecc = 'https://outages.graysonrecc.com/data/outages.json'
|
||||
|
||||
kubrabase = 'https://kubra.io/cluster-data/'
|
||||
firstenergybase = 'https://kubra.io/cluster-data/'
|
||||
firstenergycluster = 'cluster-4'
|
||||
firstenergyhex1 = 'f5f94943-5df4-4752-a0a7-8ef4baded880'
|
||||
firstenergyhex2 = 'e2986f8a-5a69-4d2f-821c-e5db03932b68'
|
||||
|
||||
southcentraljson = 'https://outage.southcentralpower.com/data/outages.json'
|
||||
|
||||
allcountyoutages = []
|
||||
allkubraoutages = []
|
||||
allaepkubracoutages = []
|
||||
|
||||
def remove_external_curly_braces(s):
|
||||
try:
|
||||
p = s[0]
|
||||
return p
|
||||
except Error as e:
|
||||
print('error in curly ' + e)
|
||||
return s
|
||||
|
||||
|
||||
|
||||
def get_kubra_hexes(url):
|
||||
outage = S.get(url)
|
||||
try:
|
||||
tempdata = json.loads(outage.text)
|
||||
bothhex = tempdata.get('data').get('cluster_interval_generation_data')
|
||||
hexes = bothhex.split('/')
|
||||
returndata = (hexes[2],hexes[3])
|
||||
return returndata
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
||||
|
||||
def kubra(baseurl,cluster,namearray,meta):
|
||||
try:
|
||||
data = get_kubra_hexes(meta)
|
||||
#print(data)
|
||||
hex1, hex2 = get_kubra_hexes(meta)
|
||||
|
||||
newnamearray = []
|
||||
for i in namearray:
|
||||
dir = str(i)
|
||||
dir = dir[-3:]
|
||||
dir = str(dir[::-1])
|
||||
url = baseurl + dir + '/' + hex1 + '/' + hex2 + '/public/' + cluster + '/' + i + '.json'
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']:
|
||||
outageinfo = None
|
||||
try:
|
||||
outageinfo = j.get('desc').get('cluster')
|
||||
except:
|
||||
continue
|
||||
if outageinfo == True:
|
||||
for k in range(4):
|
||||
newnamearray.append(str(i)+ str(k))
|
||||
if outageinfo == False:
|
||||
allkubraoutages.append(j)
|
||||
|
||||
|
||||
newnamearray = list(dict.fromkeys(newnamearray))
|
||||
if len(newnamearray) > 0:
|
||||
kubra(baseurl,cluster,newnamearray,meta)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
||||
def kubra_aep(baseurl,cluster,namearray,meta):
|
||||
data = get_kubra_hexes(meta)
|
||||
#print(data)
|
||||
hex1, hex2 = get_kubra_hexes(meta)
|
||||
newnamearray = []
|
||||
for i in namearray:
|
||||
dir = str(i)
|
||||
dir = dir[-3:]
|
||||
dir = str(dir[::-1])
|
||||
url = baseurl + dir + '/' + hex1 + '/' + hex2 + '/public/' + cluster + '/' + i + '.json'
|
||||
outage = S.get(url)
|
||||
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']:
|
||||
outageinfo = None
|
||||
try:
|
||||
outageinfo = j.get('desc').get('cluster')
|
||||
except:
|
||||
continue
|
||||
if outageinfo == True:
|
||||
for k in range(4):
|
||||
newnamearray.append(str(i)+ str(k))
|
||||
if outageinfo == False:
|
||||
allaepkubracoutages.append(j)
|
||||
#allkubraoutages.append(j)
|
||||
|
||||
|
||||
newnamearray = list(dict.fromkeys(newnamearray))
|
||||
if len(newnamearray) > 0:
|
||||
kubra_aep(baseurl,cluster,newnamearray,meta)
|
||||
|
||||
def insertkubra(data):
|
||||
for j in data:
|
||||
try:
|
||||
custa = j.get('desc').get('cust_a').get('val')
|
||||
except:
|
||||
continue
|
||||
pointgeom = j.get('geom').get('p')
|
||||
if len(pointgeom) == 1:
|
||||
pointlatlon = polyline.decode(pointgeom[0])
|
||||
lat = pointlatlon[0][0]
|
||||
lon = pointlatlon[0][1]
|
||||
else:
|
||||
continue
|
||||
areageom = j.get('geom').get('a')
|
||||
if areageom != None:
|
||||
areageom = remove_external_curly_braces(areageom)
|
||||
else:
|
||||
areageom = None
|
||||
cause = j.get('desc').get('cause').get('EN-US')
|
||||
#cause = j.get('desc').get('cause')
|
||||
start = j.get('desc').get('start_time')
|
||||
if start != None:
|
||||
try:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M:%S%z")
|
||||
except ValueError:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M%z")
|
||||
etr = j.get('desc').get('etr')
|
||||
if etr == 'ETR-NULL' or etr == 'ETR-EXP': etr = None
|
||||
if etr != None:
|
||||
try:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M:%S%z")
|
||||
except ValueError:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M%z")
|
||||
incid = j.get('desc').get('inc_id')
|
||||
crew_status = j.get('desc').get('crew_status').get('EN-US')
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,pointgeom,areageom,start,cause,outagen,crew_status,incidentid,peakoutage,etr,derivedstart,lastchange,active) values (%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s, %s, %s, %s, %s) on conflict (pointgeom) do update set (outagen, cause, start, etr, crew_status,lastchange) = (%s, %s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon,pointgeom,areageom, start, cause, custa, crew_status, incid, custa,etr,current_timestamp, current_timestamp,'True',custa, cause, start, etr, crew_status,current_timestamp)
|
||||
val = (sql,vals)
|
||||
# print(val)
|
||||
cursor.execute(sql,vals)
|
||||
conn.commit()
|
||||
|
||||
def insert_kubra_aep(data):
|
||||
for j in data:
|
||||
try:
|
||||
custa = j.get('desc').get('cust_a').get('val')
|
||||
except:
|
||||
continue
|
||||
pointgeom = j.get('geom').get('p')
|
||||
if len(pointgeom) == 1:
|
||||
pointlatlon = polyline.decode(pointgeom[0])
|
||||
lat = pointlatlon[0][0]
|
||||
lon = pointlatlon[0][1]
|
||||
else:
|
||||
continue
|
||||
areageom = j.get('geom').get('a')
|
||||
if areageom != None:
|
||||
areageom = remove_external_curly_braces(areageom)
|
||||
else:
|
||||
areageom = None
|
||||
cause = "Pending Investigation" # Default to Pending if no cause is found
|
||||
cause_dict = j.get('desc').get('cause')
|
||||
if cause_dict:
|
||||
cause = cause_dict.get('EN-US')
|
||||
#cause = j.get('desc').get('cause')
|
||||
start = j.get('desc').get('start_time')
|
||||
if start != None:
|
||||
try:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M:%S%z")
|
||||
except ValueError:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M%z")
|
||||
etr = j.get('desc').get('etr')
|
||||
if etr == 'ETR-NULL' or etr == 'ETR-EXP': etr = None
|
||||
if etr != None:
|
||||
try:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M:%S%z")
|
||||
except ValueError:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M%z")
|
||||
incid = j.get('desc').get('inc_id')
|
||||
crew_status = j.get('desc').get('crew_status').get('EN-US')
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,pointgeom,areageom,start,cause,outagen,crew_status,incidentid,peakoutage,etr,derivedstart,lastchange,active) values (%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s, %s, %s, %s, %s) on conflict (pointgeom) do update set (outagen, cause, start, etr, crew_status,lastchange) = (%s, %s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon,pointgeom,areageom, start, cause, custa, crew_status, incid, custa,etr,current_timestamp, current_timestamp,'True',custa, cause, start, etr, crew_status,current_timestamp)
|
||||
val = (sql,vals)
|
||||
#print(val)
|
||||
cursor.execute(sql,vals)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def remove_dupes(l):
|
||||
b = []
|
||||
for i in range(0, len(l)):
|
||||
if l[i] not in l[i+1:]:
|
||||
b.append(l[i])
|
||||
return b
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def newaep(meta,namearray,baseurl):
|
||||
newnamearray = []
|
||||
metainfo = json.loads(S.get(meta).text)
|
||||
metadir = metainfo['directory']
|
||||
for i in namearray:
|
||||
url = baseurl + metadir + '/outages/' + i + '.json'
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']:
|
||||
outageinfo = None
|
||||
try:
|
||||
outageinfo = j.get('title')
|
||||
except:
|
||||
continue
|
||||
if outageinfo == 'Area Outage':
|
||||
for k in range(4):
|
||||
newnamearray.append(str(i)+ str(k))
|
||||
if outageinfo == 'Outage Information':
|
||||
allkubraoutages.append(j)
|
||||
|
||||
|
||||
newnamearray = list(dict.fromkeys(newnamearray))
|
||||
if len(newnamearray) > 0:
|
||||
newaep(meta,newnamearray,baseurl)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def check_bad_offset(offset):
|
||||
try:
|
||||
if ":" == offset[-3:-2]:
|
||||
offset = offset[:-3]+offset[-2:]
|
||||
return offset
|
||||
except:
|
||||
return offset
|
||||
|
||||
def fix_bad_timestamp(timestamp):
|
||||
parsed_timestamp = pd.to_datetime(timestamp)
|
||||
return parsed_timestamp
|
||||
|
||||
S = requests.Session()
|
||||
S.verify = False
|
||||
|
||||
|
||||
|
||||
def southcentral():
|
||||
Sp = requests.Session()
|
||||
Sp.verify = False
|
||||
#Sp.proxies.update(proxies)
|
||||
temp = Sp.get(southcentraljson).text
|
||||
outageinfo = json.loads(temp)
|
||||
# print(outageinfo)
|
||||
if len(outageinfo) != 0:
|
||||
|
||||
for i in outageinfo:
|
||||
id = i.get('outageRecID')
|
||||
lat = i.get('outagePoint').get('lat')
|
||||
lon = i.get('outagePoint').get('lng')
|
||||
start = i.get('outageStartTime')
|
||||
end = i.get('outageEndTime')
|
||||
cause = i.get('cause')
|
||||
initial = i.get('customersOutInitially')
|
||||
now = i.get('customersOutNow')
|
||||
change = i.get('outageModifiedTime')
|
||||
crew = i.get('outageWorkStatus')
|
||||
# change = check_bad_offset(change)
|
||||
# start = check_bad_offset(start)
|
||||
# end = check_bad_offset(end)
|
||||
|
||||
|
||||
if start != None:
|
||||
start = fix_bad_timestamp(start)
|
||||
if end != None:
|
||||
end = fix_bad_timestamp(end)
|
||||
if change != None:
|
||||
change = fix_bad_timestamp(change)
|
||||
# change = datetime.strptime(change,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
|
||||
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,start,derivedstart,cause,outagen,crew_status,peakoutage,pointgeom,lastchange,active) values (%s,%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s) on conflict (pointgeom) do update set (outagen, cause, start, crew_status,lastchange) = (%s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon, start, current_timestamp, cause, now, crew, initial,id,change,'True',now,cause,start,crew,change)
|
||||
val = (sql,vals)
|
||||
cursor.execute(sql,vals)
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def grayson():
|
||||
outageinfo = json.loads(S.get(graysonrecc).text)
|
||||
if len(outageinfo) != 0:
|
||||
|
||||
for i in outageinfo:
|
||||
id = i.get('outageRecID')
|
||||
lat = i.get('outagePoint').get('lat')
|
||||
lon = i.get('outagePoint').get('lng')
|
||||
start = i.get('outageStartTime')
|
||||
end = i.get('outageEndTime')
|
||||
cause = i.get('cause')
|
||||
initial = i.get('customersOutInitially')
|
||||
now = i.get('customersOutNow')
|
||||
change = i.get('outageModifiedTime')
|
||||
crew = i.get('outageWorkStatus')
|
||||
# change = check_bad_offset(change)
|
||||
# start = check_bad_offset(start)
|
||||
# end = check_bad_offset(end)
|
||||
|
||||
|
||||
if start != None:
|
||||
start = fix_bad_timestamp(start)
|
||||
if end != None:
|
||||
end = fix_bad_timestamp(end)
|
||||
if change != None:
|
||||
change = fix_bad_timestamp(change)
|
||||
# change = datetime.strptime(change,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
|
||||
|
||||
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,start,derivedstart,cause,outagen,crew_status,peakoutage,pointgeom,lastchange,active) values (%s,%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s) on conflict (pointgeom) do update set (outagen, cause, start, crew_status,lastchange) = (%s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon, start, current_timestamp, cause, now, crew, initial,id,change,'True',now,cause,start,crew,change)
|
||||
val = (sql,vals)
|
||||
cursor.execute(sql,vals)
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def check_outages(meta,namearray,baseurl):
|
||||
metainfo = json.loads(S.get(meta).text)
|
||||
metadir = metainfo['directory']
|
||||
for i in namearray:
|
||||
url = baseurl + metadir + '/outages/' + i + '.json'
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']:
|
||||
id = j.get('id')
|
||||
try:
|
||||
custa = j.get('desc').get('cust_a').get('val')
|
||||
except:
|
||||
continue
|
||||
pointgeom = j.get('geom').get('p')
|
||||
if len(pointgeom) == 1:
|
||||
pointlatlon = polyline.decode(pointgeom[0])
|
||||
lat = pointlatlon[0][0]
|
||||
lon = pointlatlon[0][1]
|
||||
else:
|
||||
continue
|
||||
areageom = j.get('geom').get('a')
|
||||
if areageom != None:
|
||||
areageom = remove_external_curly_braces(areageom)
|
||||
else:
|
||||
areageom = None
|
||||
|
||||
cause = j.get('desc').get('cause')
|
||||
start = j.get('desc').get('start')
|
||||
if start != None:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
etr = j.get('desc').get('etr')
|
||||
if etr == 'ETR-NULL' or etr == 'ETR-EXP': etr = None
|
||||
if etr != None:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
incid = j.get('desc').get('inc_id')
|
||||
crew_status = j.get('desc').get('crew_status')
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,pointgeom,areageom,start,cause,outagen,crew_status,incidentid,peakoutage,etr,genericid,derivedstart,lastchange,active) values (%s,%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s, %s, %s, %s, %s) on conflict (pointgeom) do update set (outagen, cause, start, etr, crew_status,lastchange) = (%s, %s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon,pointgeom,areageom, start, cause, custa, crew_status, incid, custa,etr,id,current_timestamp, current_timestamp,'True',custa, cause, start, etr, crew_status,current_timestamp)
|
||||
val = (sql,vals)
|
||||
cursor.execute(sql,vals)
|
||||
|
||||
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
try:
|
||||
southcentral()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
try:
|
||||
grayson()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
#try:
|
||||
# newaep(aepwvmeta,aepwvnew,aepwvbase)
|
||||
#except Exception as e:
|
||||
# print(e)
|
||||
#try:
|
||||
# newaep(aepohmeta,aepohnew,aepohbase)
|
||||
#except Exception as e:
|
||||
# print(e)
|
||||
#try:
|
||||
# newaep(aepkymeta,aepkynew,aepkybase)
|
||||
#except Exception as e:
|
||||
# print(e)
|
||||
try:
|
||||
kubra_aep(kubrabase,aepwvcluster,aepwvnew,aepwvkubra)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
try:
|
||||
kubra_aep(kubrabase,aepohcluster,aepohnew,aepohkubra)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
try:
|
||||
kubra_aep(kubrabase,aepkycluster,aepkynew,aepkykubra)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
try:
|
||||
#newaep(firstpowerwvmeta,firstpowerwvnew,firstpowerwvbase)
|
||||
kubra(firstenergybase,firstenergycluster,firstenergy,wvfemeta)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
#try:
|
||||
# newaep(domvameta,dominionva,domvabase)
|
||||
#except Exception as e:
|
||||
# print(e)
|
||||
|
||||
#kubra(kubrabase,aepwvcluster,aepwvnew,aepwvmeta)
|
||||
|
||||
|
||||
nodupe = remove_dupes(allcountyoutages)
|
||||
nodupekubra = remove_dupes(allkubraoutages)
|
||||
nodupeaepkubra = remove_dupes(allaepkubracoutages)
|
||||
#print(nodupe)
|
||||
|
||||
def insertaep(data):
|
||||
for j in data:
|
||||
try:
|
||||
custa = j.get('desc').get('cust_a').get('val')
|
||||
except:
|
||||
continue
|
||||
pointgeom = j.get('geom').get('p')
|
||||
if len(pointgeom) == 1:
|
||||
pointlatlon = polyline.decode(pointgeom[0])
|
||||
lat = pointlatlon[0][0]
|
||||
lon = pointlatlon[0][1]
|
||||
else:
|
||||
continue
|
||||
areageom = j.get('geom').get('a')
|
||||
if areageom != None:
|
||||
areageom = remove_external_curly_braces(areageom)
|
||||
else:
|
||||
areageom = None
|
||||
cause = j.get('desc').get('cause')
|
||||
start = j.get('desc').get('start')
|
||||
if start != None:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
etr = j.get('desc').get('etr')
|
||||
if etr == 'ETR-NULL' or etr == 'ETR-EXP': etr = None
|
||||
if etr != None:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
incid = j.get('desc').get('inc_id')
|
||||
crew_status = j.get('desc').get('crew_status')
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
#sql = "INSERT INTO power (lat,lon,pointgeom,areageom,start,cause,outagen,crew_status,incidentid,peakoutage,etr,derivedstart,lastchange,active) values (%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s, %s, %s, %s, %s) on conflict (pointgeom) do update set (outagen, cause, start, etr, crew_status,lastchange) = (%s, %s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon,pointgeom,areageom, start, cause, custa, crew_status, incid, custa,etr,current_timestamp, current_timestamp,'True',custa, cause, start, etr, crew_status,current_timestamp)
|
||||
#val = (sql,vals)
|
||||
print(vals)
|
||||
#cursor.execute(sql,vals)
|
||||
#conn.commit()
|
||||
|
||||
|
||||
if len(nodupe) > 0:
|
||||
insertaep(nodupe)
|
||||
if len(nodupekubra) > 0:
|
||||
insertkubra(nodupekubra)
|
||||
if len(nodupeaepkubra) > 0:
|
||||
insert_kubra_aep(nodupeaepkubra)
|
||||
|
||||
cursor.execute('UPDATE public.power SET realgeom = ST_SetSRID(ST_MakePoint(lon, lat), 4326) where (lat is not null and lon is not null and realgeom is null)')
|
||||
cursor.execute('UPDATE public.power SET peakoutage = outagen where outagen > peakoutage')
|
||||
cursor.execute('update public.power set county = county.countyname from public.county where ST_contains(county.geom,power.realgeom) and power.county is null')
|
||||
cursor.execute('update public.power set cwa = fzone.cwa from public.fzone where ST_contains(fzone.geom,power.realgeom) and power.cwa is null')
|
||||
cursor.execute('update public.power set state = county.state from public.county where ST_contains(county.geom,power.realgeom) and power.state is null')
|
||||
cursor.execute('update public.power set startguess = least(start,derivedstart)')
|
||||
cursor.execute('update public.power set realareageom = st_linefromencodedpolyline(areageom) where areageom is not null and realareageom is null')
|
||||
cursor.execute("update power set pointgeom = NULL where lastchange < now() - interval '2 hours'")
|
||||
conn.commit()
|
||||
|
||||
|
||||
cursor.execute("update power set active = true where lastchange > now() - interval '30 minutes'")
|
||||
cursor.execute("update power set active = false where lastchange < now() - interval '30 minutes'")
|
||||
conn.commit()
|
||||
#cursor.execute("delete from power where cwa != 'RLX'")
|
||||
cursor.execute("delete from power where lastchange < now () - interval '365 days'")
|
||||
conn.commit()
|
||||
|
||||
#print(allkubraoutages)
|
||||
cursor.close()
|
||||
conn.close()
|
||||
447
power3.py
Normal file
447
power3.py
Normal file
@@ -0,0 +1,447 @@
|
||||
import requests
|
||||
import polyline
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from datetime import datetime, timezone
|
||||
from geojson import Point, Feature, FeatureCollection, dump
|
||||
import re
|
||||
import logging
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||
handlers=[
|
||||
logging.FileHandler('power3.log'),
|
||||
logging.StreamHandler()
|
||||
]
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
proxies = {"http":"http://nws:nws@localhost:9000"}
|
||||
|
||||
aepohmeta = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
aepwvmeta = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
#firstpowerwvmeta = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/metadata.json"
|
||||
aepkymeta = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json'
|
||||
wvfemeta= 'https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false'
|
||||
aepwvkubrameta = "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false"
|
||||
aepohkubrameta = 'https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false'
|
||||
aepkykubrameta = 'https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false'
|
||||
|
||||
aepwvbase = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
aepohbase = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
#firstpowerwvbase = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/"
|
||||
aepkybase = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/'
|
||||
graysoncounty = 'https://outages.graysonrecc.com/data/boundaries.json'
|
||||
flemingjson = 'https://outage.fme.coop/data/boundaries.json'
|
||||
|
||||
#buckeye rec
|
||||
#https://outage.buckeyerec.coop/maps/OutageWebMap/maps/GWT.rpc
|
||||
#washington
|
||||
#https://weci.ebill.coop/woViewer/MapWiseWeb/GWT.rpc
|
||||
allcountyoutages = []
|
||||
|
||||
S = requests.Session()
|
||||
|
||||
|
||||
|
||||
|
||||
def fleming():
|
||||
logger.info("Starting fleming()")
|
||||
state = 'KY'
|
||||
company = 'FLEM'
|
||||
try:
|
||||
temp = S.get(flemingjson)
|
||||
temp.raise_for_status()
|
||||
tempdata = json.loads(temp.text)
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} boundaries from fleming")
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed for fleming: {e}")
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error for fleming: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in fleming: {e}")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def bigsandy():
|
||||
logger.info("Starting bigsandy()")
|
||||
state = 'OH'
|
||||
company = 'BS'
|
||||
try:
|
||||
temp = S.get('https://outagemap.bigsandyrecc.com/data/boundaries.json')
|
||||
temp.raise_for_status()
|
||||
tempdata = json.loads(temp.text)
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} boundaries from bigsandy")
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed for bigsandy: {e}")
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error for bigsandy: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in bigsandy: {e}")
|
||||
|
||||
|
||||
|
||||
|
||||
def southcentralpower():
|
||||
logger.info("Starting southcentralpower()")
|
||||
company = 'SCP'
|
||||
url = 'https://outage.southcentralpower.com/data/boundaries.json'
|
||||
Sp = requests.Session()
|
||||
# Sp.proxies.update(proxies)
|
||||
try:
|
||||
response = Sp.get(url)
|
||||
response.raise_for_status()
|
||||
tempdata = json.loads(response.text)
|
||||
state = 'OH'
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} boundaries from southcentralpower")
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed for southcentralpower: {e}")
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error for southcentralpower: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in southcentralpower: {e}")
|
||||
|
||||
#wv https://kubra.io/data/e2ae0326-9912-436a-9355-eb2687e798b1/public/reports/7929429f-635d-4761-b6c7-78f646cef3c2_report.json
|
||||
def ku_get_url():
|
||||
logger.info("Starting ku_get_url()")
|
||||
try:
|
||||
url = 'https://stormcenter.lge-ku.com/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40'
|
||||
r = requests.get(url)
|
||||
r.raise_for_status()
|
||||
x = re.search(r"instanceId: '(.*?)',", r.text)
|
||||
if not x:
|
||||
logger.error("Could not find instanceId in ku_get_url")
|
||||
return None
|
||||
urlcom = x.group(1)
|
||||
urlcom = 'https://kubra.io/stormcenter/api/v1/stormcenters/' + urlcom + '/views/a6cee9e4-312b-4b77-9913-2ae371eb860d/currentState?preview=false'
|
||||
stuff = S.get(urlcom)
|
||||
stuff.raise_for_status()
|
||||
jsonstuff = json.loads(stuff.text)
|
||||
interval_data = jsonstuff.get('data').get('interval_generation_data')
|
||||
if not interval_data:
|
||||
logger.error("Could not find interval_generation_data in ku_get_url")
|
||||
return None
|
||||
urlcom = 'https://kubra.io/' + interval_data + '/public/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40_report.json'
|
||||
logger.info("Successfully generated URL for ku")
|
||||
return urlcom
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed in ku_get_url: {e}")
|
||||
return None
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error in ku_get_url: {e}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in ku_get_url: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def county_json(meta,url,jsonname):
|
||||
logger.info(f"Starting county_json for meta: {meta}")
|
||||
try:
|
||||
response = S.get(meta)
|
||||
response.raise_for_status()
|
||||
metainfo = json.loads(response.text)
|
||||
metadir = metainfo['directory']
|
||||
url = url + metadir + jsonname
|
||||
outage = S.get(url)
|
||||
outage.raise_for_status()
|
||||
logger.info(f"Successfully fetched county JSON from {url}")
|
||||
return outage
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed in county_json: {e}")
|
||||
return None
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error in county_json: {e}")
|
||||
return None
|
||||
except KeyError as e:
|
||||
logger.error(f"Key error in county_json (missing 'directory'): {e}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in county_json: {e}")
|
||||
return None
|
||||
|
||||
|
||||
|
||||
def ku():
|
||||
ku = []
|
||||
url = ku_get_url()
|
||||
data = S.get(url).text
|
||||
tempdata = json.loads(data)
|
||||
temp = tempdata['file_data']['areas'][2]['areas'][0]['areas']
|
||||
temp1 = tempdata['file_data']['areas'][2]['areas'][1]['areas']
|
||||
temp2 = tempdata['file_data']['areas'][1]['areas'][0]['areas']
|
||||
|
||||
for i in temp:
|
||||
ku.append(i)
|
||||
for i in temp1:
|
||||
ku.append(i)
|
||||
for i in temp2:
|
||||
ku.append(i)
|
||||
for o in ku:
|
||||
outageinfo = o['cust_a']['val'],o['cust_s'],o['name'].capitalize(),o['state'],o['utility']
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def grayson():
|
||||
company = 'GRE'
|
||||
outage = S.get(graysoncounty)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
state = 'KY'
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
def aep_county_vawv(meta,url,jsonname):
|
||||
company = 'AEP'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
|
||||
tempdata = json.loads(outage.text)
|
||||
state = 'WV'
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][2]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name'), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
state = 'VA'
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][1]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
def aep_county_oh(meta,url,jsonname):
|
||||
company = 'AEP'
|
||||
state = 'OH'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
def aep_county_ky(meta,url,jsonname):
|
||||
company = 'AEP'
|
||||
state = 'KY'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
def firstenergy_county(meta,url,jsonname):
|
||||
company = 'FE'
|
||||
state = 'WV'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(),state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
def get_kubra_hexes(url):
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
bothhex = tempdata.get('data').get('cluster_interval_generation_data')
|
||||
hexes = bothhex.split('/')
|
||||
return hexes[2],hexes[3]
|
||||
|
||||
|
||||
|
||||
|
||||
def kubra_fe(baseurl1,baseurl2,meta):
|
||||
|
||||
hex2 = get_kubra_hexes(meta)
|
||||
url = baseurl1 + hex2[1] + baseurl2
|
||||
company = 'FE'
|
||||
state = 'WV'
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas']:
|
||||
if j.get('key') == "county":
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('name').capitalize(),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
def kubra_aep(baseurl1,baseurl2,meta,company='AEP'):
|
||||
|
||||
hex2 = get_kubra_hexes(meta)
|
||||
url = baseurl1 + hex2[1] + baseurl2
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
process_outage_data(tempdata,company)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def process_outage_data(data,company):
|
||||
"""
|
||||
Identifies the data structure and loops through the county-level data.
|
||||
|
||||
Args:
|
||||
data (dict): The parsed JSON data as a Python dictionary.
|
||||
"""
|
||||
# Navigate to the primary list of areas
|
||||
primary_areas = data.get("file_data", {}).get("areas", [])
|
||||
|
||||
# If the list is empty, there's nothing to process
|
||||
if not primary_areas:
|
||||
print("No 'areas' data found.")
|
||||
return
|
||||
|
||||
# --- This is the key logic to handle both formats ---
|
||||
# Check the key of the first item to determine the format
|
||||
first_item_key = primary_areas[0].get("key")
|
||||
|
||||
if first_item_key == "state":
|
||||
# Format 1: Loop through each state object
|
||||
for state_area in primary_areas:
|
||||
state_name = state_area.get("name", "Unknown State")
|
||||
# Get the nested list of counties for this state
|
||||
county_list = state_area.get("areas", [])
|
||||
for county in county_list:
|
||||
# We are now at the county level
|
||||
if county.get("key") == "county":
|
||||
outageinfo = county.get('cust_a').get('val'), county.get('cust_s'), county.get('name').capitalize(),county.get('state'),company
|
||||
allcountyoutages.append(outageinfo)
|
||||
elif first_item_key == "county":
|
||||
# Format 2: The primary list is already the county list
|
||||
for county in primary_areas:
|
||||
# We are now at the county level
|
||||
if county.get("key") == "county":
|
||||
outageinfo = county.get('cust_a').get('val'), county.get('cust_s'), county.get('name').capitalize(),county.get('state'),company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
else:
|
||||
print("Unknown data format. Could not find 'state' or 'county' key.")
|
||||
|
||||
|
||||
try:
|
||||
logger.info("Attempting kubra_fe for FE WV")
|
||||
kubra_fe('https://kubra.io/data/','/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json',wvfemeta)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in kubra_fe for FE WV: {e}")
|
||||
try:
|
||||
logger.info("Attempting kubra_aep for AEP WV")
|
||||
kubra_aep('https://kubra.io/data/','/public/reports/7929429f-635d-4761-b6c7-78f646cef3c2_report.json',aepwvkubrameta)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in kubra_aep for AEP WV: {e}")
|
||||
try:
|
||||
logger.info("Attempting kubra_aep for AEP OH")
|
||||
kubra_aep('https://kubra.io/data/','/public/reports/1bc6bd19-2315-4548-980a-6df73b93b355_report.json',aepohkubrameta)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in kubra_aep for AEP OH: {e}")
|
||||
try:
|
||||
logger.info("Attempting kubra_aep for AEP KY")
|
||||
kubra_aep('https://kubra.io/data/','/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json',aepkykubrameta)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in kubra_aep for AEP KY: {e}")
|
||||
|
||||
|
||||
try:
|
||||
logger.info("Attempting grayson")
|
||||
grayson()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in grayson: {e}")
|
||||
try:
|
||||
logger.info("Attempting ku")
|
||||
ku()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in ku: {e}")
|
||||
try:
|
||||
logger.info("Attempting southcentralpower")
|
||||
southcentralpower()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in southcentralpower: {e}")
|
||||
try:
|
||||
logger.info("Attempting bigsandy")
|
||||
bigsandy()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in bigsandy: {e}")
|
||||
try:
|
||||
logger.info("Attempting fleming")
|
||||
fleming()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in fleming: {e}")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
#for i in allcountyoutages:
|
||||
# sql = 'insert into countyoutages (outages, served, county, state, update, company) values (%s, %s, %s, %s, %s, %s)'
|
||||
# val = (i[0], i[1], i[2], i[3], current_timestamp, i[4])
|
||||
# cursor.execute(sql,val)
|
||||
#conn.commit()
|
||||
|
||||
all_values = []
|
||||
for i in allcountyoutages:
|
||||
# Make sure the order matches the SQL placeholders
|
||||
val = (i[0], i[1], i[2], i[3], current_timestamp, i[4])
|
||||
all_values.append(val)
|
||||
|
||||
# 2. Define the SQL statement ONCE
|
||||
sql = 'INSERT INTO countyoutages (outages, served, county, state, update, company) VALUES (%s, %s, %s, %s, %s, %s)'
|
||||
|
||||
# 3. Execute the command ONCE with all the data
|
||||
if all_values: # Only execute if there's data to insert
|
||||
try:
|
||||
cursor.executemany(sql, all_values)
|
||||
conn.commit() # Commit after successful execution
|
||||
logger.info(f"Successfully inserted {len(all_values)} records into the database")
|
||||
except Exception as e:
|
||||
logger.error(f"Database error during insert: {e}")
|
||||
conn.rollback() # Rollback in case of error
|
||||
else:
|
||||
logger.warning("No data to insert into the database")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
cursor.execute('update countyoutages set cwa = county.cwa from county where county.countyname = countyoutages.county and county.state = countyoutages.state and countyoutages.cwa is null')
|
||||
|
||||
conn.commit()
|
||||
|
||||
#cursor.execute("delete from countyoutages where cwa != 'RLX'")
|
||||
cursor.execute("delete from countyoutages where cwa is null")
|
||||
#cursor.execute("delete from countyoutages where update < now () - interval '365 days'")
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#print(allcountyoutages)
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
209
power3new.py
Normal file
209
power3new.py
Normal file
@@ -0,0 +1,209 @@
|
||||
import requests
|
||||
import polyline
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from datetime import datetime, timezone
|
||||
from geojson import Point, Feature, FeatureCollection, dump
|
||||
import re
|
||||
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
proxies = {"http":"http://nws:nws@localhost:9000"}
|
||||
|
||||
aepohmeta = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
aepwvmeta = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
firstpowerwvmeta = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/metadata.json"
|
||||
aepkymeta = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json'
|
||||
|
||||
|
||||
|
||||
aepwvbase = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
aepohbase = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
firstpowerwvbase = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/"
|
||||
aepkybase = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/'
|
||||
graysoncounty = 'https://outages.graysonrecc.com/data/boundaries.json'
|
||||
#flemingjson = 'https://outage.fme.coop/data/boundaries.json'
|
||||
|
||||
#buckeye rec
|
||||
#https://outage.buckeyerec.coop/maps/OutageWebMap/maps/GWT.rpc
|
||||
#washington
|
||||
#https://weci.ebill.coop/woViewer/MapWiseWeb/GWT.rpc
|
||||
allcountyoutages = []
|
||||
|
||||
S = requests.Session()
|
||||
|
||||
|
||||
|
||||
|
||||
def fleming():
|
||||
state = 'KY'
|
||||
company = 'FLEM'
|
||||
temp = S.get(flemingjson)
|
||||
tempdata = json.loads(temp.text)
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def bigsandy():
|
||||
state = 'OH'
|
||||
company = 'BS'
|
||||
temp = S.get('http://outagemap.bigsandyrecc.com/data/boundaries.json')
|
||||
tempdata = json.loads(temp.text)
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
|
||||
|
||||
def southcentralpower():
|
||||
company = 'SCP'
|
||||
url = 'http://outage.southcentralpower.com/data/boundaries.json'
|
||||
Sp = requests.Session()
|
||||
# Sp.proxies.update(proxies)
|
||||
temp = Sp.get(url).text
|
||||
tempdata = json.loads(temp)
|
||||
state = 'OH'
|
||||
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
def ku_get_url():
|
||||
url = 'https://stormcenter.lge-ku.com/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40'
|
||||
r = requests.get(url).text
|
||||
x = re.search(r"instanceId: '(.*?)',",r)
|
||||
urlcom = x.group(1)
|
||||
urlcom = 'https://kubra.io/stormcenter/api/v1/stormcenters/' + urlcom + '/views/a6cee9e4-312b-4b77-9913-2ae371eb860d/currentState?preview=false'
|
||||
stuff = S.get(urlcom)
|
||||
jsonstuff = json.loads(stuff.text)
|
||||
interval_data = jsonstuff.get('data').get('interval_generation_data')
|
||||
urlcom = 'https://kubra.io/' + interval_data + '/public/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40_report.json'
|
||||
return urlcom
|
||||
|
||||
|
||||
def county_json(meta,url,jsonname):
|
||||
metainfo = json.loads(S.get(meta).text)
|
||||
metadir = metainfo['directory']
|
||||
url = url + metadir + jsonname
|
||||
outage = S.get(url)
|
||||
return outage
|
||||
|
||||
|
||||
|
||||
def ku():
|
||||
ku = []
|
||||
url = ku_get_url()
|
||||
data = S.get(url).text
|
||||
tempdata = json.loads(data)
|
||||
temp = tempdata['file_data']['areas'][2]['areas'][0]['areas']
|
||||
temp1 = tempdata['file_data']['areas'][2]['areas'][1]['areas']
|
||||
temp2 = tempdata['file_data']['areas'][1]['areas'][0]['areas']
|
||||
|
||||
for i in temp:
|
||||
ku.append(i)
|
||||
for i in temp1:
|
||||
ku.append(i)
|
||||
for i in temp2:
|
||||
ku.append(i)
|
||||
for o in ku:
|
||||
outageinfo = o['cust_a']['val'],o['cust_s'],o['name'].capitalize(),o['state'],o['utility']
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
def grayson():
|
||||
company = 'GRE'
|
||||
outage = S.get(graysoncounty)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
state = 'KY'
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
def aep_county_vawv(meta,url,jsonname):
|
||||
company = 'AEP'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
|
||||
tempdata = json.loads(outage.text)
|
||||
state = 'WV'
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][2]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name'), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
state = 'VA'
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][1]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
def aep_county_oh(meta,url,jsonname):
|
||||
company = 'AEP'
|
||||
state = 'OH'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
def aep_county_ky(meta,url,jsonname):
|
||||
company = 'AEP'
|
||||
state = 'KY'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
def firstenergy_county(meta,url,jsonname):
|
||||
company = 'FE'
|
||||
state = 'WV'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(),state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
aep_county_vawv(aepwvmeta,'https://d2oclp3li76tyy.cloudfront.net/resources/data/external/interval_generation_data/','/report_county.json')
|
||||
firstenergy_county('https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/metadata.json','https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/','/report_county_ctv_wv.json')
|
||||
aep_county_oh(aepohmeta,'http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/','/report_county.json')
|
||||
aep_county_ky(aepkymeta, aepkybase, '/report_county.json')
|
||||
grayson()
|
||||
ku()
|
||||
southcentralpower()
|
||||
bigsandy()
|
||||
|
||||
|
||||
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
for i in allcountyoutages:
|
||||
sql = 'insert into countyoutages (outages, served, county, state, update, company) values (%s, %s, %s, %s, %s, %s)'
|
||||
val = (i[0], i[1], i[2], i[3], current_timestamp, i[4])
|
||||
cursor.execute(sql,val)
|
||||
conn.commit()
|
||||
|
||||
cursor.execute('update countyoutages set cwa = county.cwa from county where county.countyname = countyoutages.county and county.state = countyoutages.state and countyoutages.cwa is null')
|
||||
|
||||
conn.commit()
|
||||
|
||||
#cursor.execute("delete from countyoutages where cwa != 'RLX'")
|
||||
cursor.execute("delete from countyoutages where cwa is null")
|
||||
cursor.execute("delete from countyoutages where update < now () - interval '365 days'")
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#print(allcountyoutages)
|
||||
|
||||
82
power_outages.txt
Normal file
82
power_outages.txt
Normal file
@@ -0,0 +1,82 @@
|
||||
Color: 255 0 0
|
||||
Font: 1, 11, 1, "Arial"
|
||||
Threshold: 9999
|
||||
Place: -82.33573 37.99161, icon=2, label="Outage #4
|
||||
Cause: None
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -81.61944 38.92038, icon=2, label="Outage #19
|
||||
Cause: Pending Investigation
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -81.72974 39.15171, icon=2, label="Outage #19
|
||||
Cause: Pending Investigation
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -81.53436 39.20742, icon=2, label="Outage #19
|
||||
Cause: Pending Investigation
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -80.25851 39.16533, icon=2, label="Outage #19
|
||||
Cause: Pending Investigation
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -80.57541 39.34415, icon=2, label="Outage #19
|
||||
Cause: Pending Investigation
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -81.248 38.77818, icon=2, label="Outage #19
|
||||
Cause: Pending Investigation
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -80.52457 38.39864, icon=2, label="Outage #19
|
||||
Cause: Pending Investigation
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -80.41218 38.46986, icon=2, label="Outage #19
|
||||
Cause: Pending Investigation
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -80.45523 38.45674, icon=2, label="Outage #19
|
||||
Cause: Pending Investigation
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -81.46448 38.43202, icon=2, label="Outage #163
|
||||
Cause: Tree Contact
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -81.48033 38.70811, icon=2, label="Outage #108
|
||||
Cause: None
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -81.08233 38.33495, icon=2, label="Outage #14
|
||||
Cause: Tree Contact
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -80.64911 38.42277, icon=2, label="Outage #19
|
||||
Cause: Tree Damage
|
||||
Last Update: 2025-04-06 21:00: UTC"
|
||||
Place: -80.64508 38.42421, icon=2, label="Outage #19
|
||||
Cause: Emergency Equipment Repair
|
||||
Last Update: 2025-04-06 21:00: UTC"
|
||||
Place: -80.64925 38.42389, icon=2, label="Outage #19
|
||||
Cause: Pending Investigation
|
||||
Last Update: 2025-04-06 21:00: UTC"
|
||||
Place: -81.16478 38.32286, icon=2, label="Outage #4
|
||||
Cause: None
|
||||
Last Update: 2025-04-06 21:10: UTC"
|
||||
Place: -81.54974 38.53972, icon=2, label="Outage #7
|
||||
Cause: None
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -81.13474 38.01172, icon=2, label="Outage #23
|
||||
Cause: None
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -82.47047 37.15274, icon=2, label="Outage #46
|
||||
Cause: None
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -82.39272 37.04287, icon=2, label="Outage #147
|
||||
Cause: Tree Contact
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -83.059326 38.44866, icon=2, label="Outage #9
|
||||
Cause: None
|
||||
Last Update: 2025-04-06 21:11: UTC"
|
||||
Place: -81.7956 37.56702, icon=2, label="Outage #64
|
||||
Cause: Tree Contact
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -81.80097 37.5298, icon=2, label="Outage #4
|
||||
Cause: Equipment Related
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -81.91268 39.47965, icon=2, label="Outage #4
|
||||
Cause: None
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
Place: -81.44512 39.42116, icon=2, label="Outage #4
|
||||
Cause: None
|
||||
Last Update: 2025-04-06 21:25: UTC"
|
||||
End:
|
||||
188
power_outages_gr2.txt
Normal file
188
power_outages_gr2.txt
Normal file
@@ -0,0 +1,188 @@
|
||||
Refresh: 1
|
||||
Threshold: 999 nautical_miles
|
||||
Title: Power Outages (RLX CWA) - Triangles
|
||||
Font: 1, 11, 0, "Courier New"
|
||||
Color: 255 0 0
|
||||
Triangles:
|
||||
37.15803, -82.2399
|
||||
37.19803, -82.2399
|
||||
37.17803, -82.1799
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 37.17803, -82.2099, 1, "Outage #4", "Outage #4\nCause: None\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
37.13274, -82.50047
|
||||
37.172740000000005, -82.50047
|
||||
37.15274, -82.44047
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 37.15274, -82.47047, 1, "Outage #46", "Outage #46\nCause: None\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
37.02287, -82.42272
|
||||
37.062870000000004, -82.42272
|
||||
37.04287, -82.36272
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 37.04287, -82.39272, 1, "Outage #147", "Outage #147\nCause: Tree Contact\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
38.31505, -81.11213000000001
|
||||
38.355050000000006, -81.11213000000001
|
||||
38.33505, -81.05213
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 38.33505, -81.08213, 1, "Outage #13", "Outage #13\nCause: Tree Contact\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
38.40376, -81.50807
|
||||
38.443760000000005, -81.50807
|
||||
38.42376, -81.44807
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 38.42376, -81.47807, 1, "Outage #8", "Outage #8\nCause: Tree Contact\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
38.51972, -81.57974
|
||||
38.559720000000006, -81.57974
|
||||
38.53972, -81.51974
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 38.53972, -81.54974, 1, "Outage #7", "Outage #7\nCause: None\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
38.688109999999995, -81.51033
|
||||
38.72811, -81.51033
|
||||
38.70811, -81.45033
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 38.70811, -81.48033, 1, "Outage #108", "Outage #108\nCause: None\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
39.459649999999996, -81.94268
|
||||
39.49965, -81.94268
|
||||
39.47965, -81.88268
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 39.47965, -81.91268, 1, "Outage #4", "Outage #4\nCause: None\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
38.90038, -81.64944
|
||||
38.940380000000005, -81.64944
|
||||
38.92038, -81.58944
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 38.92038, -81.61944, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
39.13171, -81.75974000000001
|
||||
39.171710000000004, -81.75974000000001
|
||||
39.15171, -81.69974
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 39.15171, -81.72974, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
39.37441999999999, -80.6919
|
||||
39.41442, -80.6919
|
||||
39.39442, -80.6319
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 39.39442, -80.6619, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
38.828689999999995, -81.20764
|
||||
38.86869, -81.20764
|
||||
38.84869, -81.14764
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 38.84869, -81.17764, 1, "Outage #30", "Outage #30\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
39.187419999999996, -81.56436000000001
|
||||
39.22742, -81.56436000000001
|
||||
39.20742, -81.50436
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 39.20742, -81.53436, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
39.145329999999994, -80.28851
|
||||
39.18533, -80.28851
|
||||
39.16533, -80.22851
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 39.16533, -80.25851, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
39.324149999999996, -80.60541
|
||||
39.36415, -80.60541
|
||||
39.34415, -80.54541
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 39.34415, -80.57541, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
38.758179999999996, -81.278
|
||||
38.79818, -81.278
|
||||
38.77818, -81.218
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 38.77818, -81.248, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
38.449859999999994, -80.44218000000001
|
||||
38.48986, -80.44218000000001
|
||||
38.46986, -80.38218
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 38.46986, -80.41218, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
38.43674, -80.48523
|
||||
38.47674000000001, -80.48523
|
||||
38.45674, -80.42523
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 38.45674, -80.45523, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
37.5098, -81.83097000000001
|
||||
37.549800000000005, -81.83097000000001
|
||||
37.5298, -81.77097
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 37.5298, -81.80097, 1, "Outage #4", "Outage #4\nCause: Equipment Related\nLast Update: 2025-04-06 21:50: UTC"
|
||||
Triangles:
|
||||
38.314949999999996, -81.11233
|
||||
38.35495, -81.11233
|
||||
38.33495, -81.05233
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 38.33495, -81.08233, 1, "Outage #14", "Outage #14\nCause: Tree Contact\nLast Update: 2025-04-06 21:40: UTC"
|
||||
Triangles:
|
||||
39.40116, -81.47512
|
||||
39.44116, -81.47512
|
||||
39.42116, -81.41512
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 39.42116, -81.44512, 1, "Outage #4", "Outage #4\nCause: None\nLast Update: 2025-04-06 21:40: UTC"
|
||||
Triangles:
|
||||
38.37864, -80.55457
|
||||
38.41864, -80.55457
|
||||
38.39864, -80.49457
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 38.39864, -80.52457, 1, "Outage #19", "Outage #19\nCause: Pending Investigation\nLast Update: 2025-04-06 21:30: UTC"
|
||||
Triangles:
|
||||
37.547019999999996, -81.8256
|
||||
37.58702, -81.8256
|
||||
37.56702, -81.76559999999999
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 37.56702, -81.7956, 1, "Outage #64", "Outage #64\nCause: Tree Contact\nLast Update: 2025-04-06 21:25: UTC"
|
||||
Triangles:
|
||||
38.41202, -81.49448
|
||||
38.452020000000005, -81.49448
|
||||
38.43202, -81.43448
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 38.43202, -81.46448, 1, "Outage #163", "Outage #163\nCause: Tree Contact\nLast Update: 2025-04-06 21:25: UTC"
|
||||
Triangles:
|
||||
37.991719999999994, -81.16474
|
||||
38.03172, -81.16474
|
||||
38.01172, -81.10473999999999
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 38.01172, -81.13474, 1, "Outage #23", "Outage #23\nCause: None\nLast Update: 2025-04-06 21:25: UTC"
|
||||
Triangles:
|
||||
37.97161, -82.36573
|
||||
38.011610000000005, -82.36573
|
||||
37.99161, -82.30573
|
||||
End:
|
||||
Color: 0 0 255
|
||||
Text: 37.99161, -82.33573, 1, "Outage #4", "Outage #4\nCause: None\nLast Update: 2025-04-06 21:25: UTC"
|
||||
End:
|
||||
967
powerapi.php
Normal file
967
powerapi.php
Normal file
@@ -0,0 +1,967 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
//$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
// or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
try {
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws");
|
||||
if ($dbconn === false) {
|
||||
throw new Exception('Could not connect: ' . pg_last_error());
|
||||
}
|
||||
} catch (Exception $e) {
|
||||
http_response_code(500);
|
||||
die('Database connection failed: ' . $e->getMessage());
|
||||
}
|
||||
|
||||
//no gets, curent point outage info
|
||||
//if(empty($_GET)) {
|
||||
//$result = pg_query_params($dbconn,
|
||||
//"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause,'area_geometry', ST_AsGeoJSON(COALESCE(realareageom, realgeom))::json))order by startguess asc)) FROM power WHERE cwa = $1 and active = true",
|
||||
//array('RLX')) or die('Query failed: ' . pg_last_error());
|
||||
//$resultArray = pg_fetch_all($result);
|
||||
//echo($resultArray[0]['json_build_object']);
|
||||
//pg_free_result($result);
|
||||
//}
|
||||
|
||||
|
||||
if (empty($_GET)) {
|
||||
try {
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(realgeom)::json,
|
||||
'properties', json_build_object(
|
||||
'time', startguess,
|
||||
'county', county,
|
||||
'state', state,
|
||||
'outage', outagen,
|
||||
'lastchange', lastchange,
|
||||
'cause', cause,
|
||||
'area_geometry', ST_AsGeoJSON(COALESCE(realareageom, realgeom))::json
|
||||
)
|
||||
)
|
||||
ORDER BY startguess ASC
|
||||
)
|
||||
)
|
||||
FROM power
|
||||
WHERE cwa = $1 AND active = true
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, array('RLX'));
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
// Check if we got results
|
||||
if ($resultArray && isset($resultArray[0]['json_build_object'])) {
|
||||
header('Content-Type: application/json');
|
||||
echo $resultArray[0]['json_build_object'];
|
||||
} else {
|
||||
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
http_response_code(500);
|
||||
die('Query execution failed: ' . $e->getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
//if (isset($_GET['states'])) {
|
||||
//$result = pg_query($dbconn,
|
||||
//"SELECT jsonb_build_object('type', 'FeatureCollection','features', jsonb_agg(features.feature)) FROM (SELECT jsonb_build_object('type', 'Feature','geometry', ST_AsGeoJSON(ST_Transform(geom, 4326))::jsonb,'properties', to_jsonb(properties) - 'geom') AS feature FROM (SELECT * FROM states where state = 'WV' or state = 'VA' or state = 'KY' or state ='VA' or state = 'MD' or state = 'PA' or state = 'OH') AS properties) AS features") or die('Query failed: ' . pg_last_error());
|
||||
// $resultArray = pg_fetch_all($result);
|
||||
//echo($resultArray[0]['jsonb_build_object']);
|
||||
//pg_free_result($result);
|
||||
//}
|
||||
|
||||
if (isset($_GET['states'])) {
|
||||
try {
|
||||
$query = "
|
||||
SELECT jsonb_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', jsonb_agg(features.feature)
|
||||
)
|
||||
FROM (
|
||||
SELECT jsonb_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(ST_Transform(geom, 4326))::jsonb,
|
||||
'properties', to_jsonb(properties) - 'geom'
|
||||
) AS feature
|
||||
FROM (
|
||||
SELECT *
|
||||
FROM states
|
||||
WHERE state IN ('WV', 'VA', 'KY', 'MD', 'PA', 'OH')
|
||||
) AS properties
|
||||
) AS features
|
||||
";
|
||||
|
||||
$result = pg_query($dbconn, $query);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
// Set proper JSON header and handle output
|
||||
header('Content-Type: application/json');
|
||||
if ($resultArray && isset($resultArray[0]['jsonb_build_object'])) {
|
||||
echo $resultArray[0]['jsonb_build_object'];
|
||||
} else {
|
||||
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
http_response_code(500);
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
//county/state max
|
||||
//if($_GET['max'] ?? null) {
|
||||
|
||||
//if($_GET['start'] ?? null) {
|
||||
//$starttime = pg_escape_string($_GET['start']);
|
||||
//if($_GET['end'] ?? null) {
|
||||
//$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
|
||||
//$result = pg_query_params($dbconn,
|
||||
////select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > '2023-04-01' and update < '2023-04-02' and cwa = 'RLX' group by county,state,update) as potato group by county,state;
|
||||
////"select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update) as potato group by county,state",
|
||||
//"select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update) as potato group by county,state",
|
||||
//array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
//
|
||||
//while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
// $array[] = $line;
|
||||
//}
|
||||
//echo json_encode($array);
|
||||
//pg_free_result($result);
|
||||
//}}}
|
||||
|
||||
if (isset($_GET['max'])) {
|
||||
if (isset($_GET['start']) && isset($_GET['end'])) {
|
||||
try {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
$query = "
|
||||
SELECT DISTINCT ON (county, state)
|
||||
max(outage) as max_outage,
|
||||
county,
|
||||
state
|
||||
FROM (
|
||||
SELECT DISTINCT ON (county, state, update)
|
||||
county,
|
||||
state,
|
||||
SUM(outages) as outage,
|
||||
update as time,
|
||||
SUM(served) as served
|
||||
FROM countyoutages
|
||||
WHERE update > $2
|
||||
AND update < $3
|
||||
AND cwa = $1
|
||||
GROUP BY county, state, update
|
||||
) as subquery
|
||||
GROUP BY county, state
|
||||
";
|
||||
|
||||
$result = pg_query_params(
|
||||
$dbconn,
|
||||
$query,
|
||||
['RLX', $starttime, $endtime]
|
||||
);
|
||||
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
} else {
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(400);
|
||||
echo json_encode(['error' => 'Both start and end parameters are required']);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//county current
|
||||
//"SELECT distinct on (county,state) update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 order by county,state,update desc",
|
||||
//if($_GET['county'] ?? null) {
|
||||
//$result = pg_query_params($dbconn,
|
||||
//"SELECT DISTINCT ON (county, state) county, state, SUM(outages) as outage, update as time, SUM(served) as served, round((SUM(outages) / SUM(served))*100,2) as perout FROM countyoutages WHERE update = (SELECT MAX(update) FROM countyoutages) AND cwa = $1 GROUP BY county, state, update",
|
||||
//array('RLX')) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
//while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
//$array[] = $line;
|
||||
//}
|
||||
//echo json_encode($array ?? null);
|
||||
//pg_free_result($result);
|
||||
//}
|
||||
|
||||
if (isset($_GET['county'])) {
|
||||
try {
|
||||
$query = "
|
||||
SELECT DISTINCT ON (county, state)
|
||||
county,
|
||||
state,
|
||||
SUM(outages) as outage,
|
||||
update as time,
|
||||
SUM(served) as served,
|
||||
ROUND(CAST((SUM(outages)::FLOAT / SUM(served)) * 100 AS NUMERIC), 2) as perout
|
||||
FROM countyoutages
|
||||
WHERE update = (SELECT MAX(update) FROM countyoutages)
|
||||
AND cwa = $1
|
||||
GROUP BY county, state, update
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, ['RLX']);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//county archive delete after testing
|
||||
if($_GET['countyarchiveold'] ?? null) {
|
||||
|
||||
if($_GET['start'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end'] ?? null) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
|
||||
"select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update",
|
||||
array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}}
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['countyarchive'])) {
|
||||
if (isset($_GET['start']) && isset($_GET['end'])) {
|
||||
try {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
$query = "
|
||||
SELECT DISTINCT ON (county, state, update)
|
||||
county,
|
||||
state,
|
||||
SUM(outages) as outage,
|
||||
update as time,
|
||||
SUM(served) as served
|
||||
FROM countyoutages
|
||||
WHERE update > $2
|
||||
AND update < $3
|
||||
AND cwa = $1
|
||||
GROUP BY county, state, update
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, ['RLX', $starttime, $endtime]);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]);
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
exit;
|
||||
}
|
||||
} else {
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(400);
|
||||
echo json_encode(['error' => 'Both start and end parameters are required']);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
//Archive point data
|
||||
if($_GET['archivepointold'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and startguess > $2 and lastchange < $3",
|
||||
array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
pg_free_result($result);
|
||||
|
||||
}
|
||||
|
||||
if (isset($_GET['archivepoint'])) {
|
||||
try {
|
||||
if (!isset($_GET['start']) || !isset($_GET['end'])) {
|
||||
throw new Exception('Both start and end parameters are required');
|
||||
}
|
||||
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(realgeom)::json,
|
||||
'properties', json_build_object(
|
||||
'time', startguess,
|
||||
'county', county,
|
||||
'state', state,
|
||||
'outage', outagen,
|
||||
'lastchange', lastchange,
|
||||
'cause', cause
|
||||
)
|
||||
)
|
||||
ORDER BY startguess ASC
|
||||
)
|
||||
)
|
||||
FROM power
|
||||
WHERE cwa = $1
|
||||
AND startguess > $2
|
||||
AND lastchange < $3
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, ['RLX', $starttime, $endtime]);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
header('Content-Type: application/json');
|
||||
if ($resultArray && isset($resultArray[0]['json_build_object'])) {
|
||||
echo $resultArray[0]['json_build_object'];
|
||||
} else {
|
||||
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
header('Content-Type: application/json');
|
||||
$statusCode = strpos($e->getMessage(), 'required') !== false ? 400 : 500;
|
||||
http_response_code($statusCode);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//if($_GET['svr']=='current') {
|
||||
//$result = pg_query_params($dbconn,
|
||||
//"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()"
|
||||
//,array('2023-01-01 01:00','2023-02-12 10:00')) or die('Query failed: ' . pg_last_error());
|
||||
//$resultArray = pg_fetch_all($result);
|
||||
//echo($resultArray[0]['json_build_object']);
|
||||
//}
|
||||
|
||||
if(@$_GET['svr'] =='current') {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
if(@$_GET['svr'] == 'archiveold') {
|
||||
if($_GET['start'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end'] ?? null) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue > $1 and endtime < $2"
|
||||
,array($starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
//echo '<pre>'; print_r($resultAarray); echo '</pre>';
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if(!isset($_GET['start']) && !isset($_GET['end'])) {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() - interval '24 hours' and endtime > now() - interval '24 hours'") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
|
||||
}
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['svr']) && $_GET['svr'] === 'archive') {
|
||||
try {
|
||||
$result = null;
|
||||
|
||||
if (isset($_GET['start']) && isset($_GET['end'])) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(nwspoly)::json,
|
||||
'properties', json_build_object(
|
||||
'issue', issue,
|
||||
'end', endtime,
|
||||
'vtec', vtec,
|
||||
'type', warntype
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
FROM svr
|
||||
WHERE issue > $1
|
||||
AND endtime < $2
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, [$starttime, $endtime]);
|
||||
} elseif (!isset($_GET['start']) && !isset($_GET['end'])) {
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(nwspoly)::json,
|
||||
'properties', json_build_object(
|
||||
'issue', issue,
|
||||
'end', endtime,
|
||||
'vtec', vtec,
|
||||
'type', warntype
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
FROM svr
|
||||
WHERE issue < NOW() - INTERVAL '24 hours'
|
||||
AND endtime > NOW() - INTERVAL '24 hours'
|
||||
";
|
||||
|
||||
$result = pg_query($dbconn, $query);
|
||||
} else {
|
||||
throw new Exception('Both start and end parameters are required together');
|
||||
}
|
||||
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
header('Content-Type: application/json');
|
||||
if ($resultArray && isset($resultArray[0]['json_build_object'])) {
|
||||
echo $resultArray[0]['json_build_object'];
|
||||
} else {
|
||||
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
$statusCode = strpos($e->getMessage(), 'required') !== false ? 400 : 500;
|
||||
http_response_code($statusCode);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
if($_GET['svrpolysold'] ?? null) {
|
||||
$query = "select vtec,outagesvalid,polygonpop,outagesbuffer,lsrids from svr where EXTRACT(EPOCH FROM (current_timestamp - endtime ))/60/60/24 < 60";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['svrpolys'])) {
|
||||
try {
|
||||
$query = "
|
||||
SELECT
|
||||
vtec,
|
||||
outagesvalid,
|
||||
polygonpop,
|
||||
outagesbuffer,
|
||||
lsrids
|
||||
FROM svr
|
||||
WHERE EXTRACT(EPOCH FROM (CURRENT_TIMESTAMP - endtime)) / 60 / 60 / 24 < 60
|
||||
";
|
||||
|
||||
$result = pg_query($dbconn, $query);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (isset($_GET['poweridsold'])) {
|
||||
$powerids = $_GET['powerids'];
|
||||
|
||||
// Convert the comma-separated string to an array
|
||||
$poweridArray = explode(',', $powerids);
|
||||
|
||||
// Sanitize and prepare array values for SQL query
|
||||
$sanitizedIds = array_map('intval', $poweridArray);
|
||||
|
||||
// Prepare placeholders for the query
|
||||
$placeholders = implode(',', array_map(function($i) { return '$' . $i; }, range(1, count($sanitizedIds))));
|
||||
|
||||
// Set up your database connection here
|
||||
|
||||
|
||||
// Prepare and execute the query with pg_query_params
|
||||
$sql = "SELECT lat,lon,lastchange,startguess,peakoutage,cause,lsrtime,lsrref,(lsrtime AT TIME ZONE 'America/New_York')::timestamp as lsrlocal FROM power WHERE id IN ($placeholders)";
|
||||
$result = pg_query_params($dbconn, $sql, $sanitizedIds);
|
||||
|
||||
if (!$result) {
|
||||
echo 'Query failed: ' . pg_last_error();
|
||||
exit;
|
||||
}
|
||||
|
||||
// Fetch and output the results
|
||||
$results = pg_fetch_all($result);
|
||||
echo json_encode($results);
|
||||
|
||||
// Free resultset
|
||||
|
||||
// Close the connection
|
||||
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
|
||||
if (isset($_GET['powerids'])) {
|
||||
try {
|
||||
$powerids = $_GET['powerids'];
|
||||
|
||||
// Validate input exists and isn't empty
|
||||
if (empty($powerids)) {
|
||||
throw new Exception('No power IDs provided');
|
||||
}
|
||||
|
||||
// Convert comma-separated string to array and sanitize
|
||||
$poweridArray = explode(',', $powerids);
|
||||
$sanitizedIds = array_filter(array_map('intval', $poweridArray));
|
||||
|
||||
if (empty($sanitizedIds)) {
|
||||
throw new Exception('Invalid power ID format');
|
||||
}
|
||||
|
||||
// Prepare placeholders for the query
|
||||
$placeholders = implode(',', array_map(function($i) { return '$' . $i; }, range(1, count($sanitizedIds))));
|
||||
|
||||
$query = "
|
||||
SELECT
|
||||
lat,
|
||||
lon,
|
||||
lastchange,
|
||||
startguess,
|
||||
peakoutage,
|
||||
cause,
|
||||
lsrtime,
|
||||
lsrref,
|
||||
(lsrtime AT TIME ZONE 'America/New_York')::timestamp as lsrlocal
|
||||
FROM power
|
||||
WHERE id IN ($placeholders)
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, $sanitizedIds);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = pg_fetch_all($result) ?: [];
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
$statusCode = strpos($e->getMessage(), 'Invalid') !== false ? 400 : 500;
|
||||
http_response_code($statusCode);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
if (isset($_GET['poweridsgeojsonold'])) {
|
||||
$powerids = $_GET['poweridsgeojson'];
|
||||
|
||||
$poweridArray = explode(',', $powerids);
|
||||
|
||||
$sanitizedIds = array_map('intval', $poweridArray);
|
||||
|
||||
$placeholders = implode(',', array_map(function($i) { return '$' . $i; }, range(1, count($sanitizedIds))));
|
||||
|
||||
$sql = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(realgeom)::json,
|
||||
'properties', json_build_object(
|
||||
'id', id,
|
||||
'time', (startguess AT TIME ZONE 'UTC')::timestamp,
|
||||
'county', county,
|
||||
'state', state,
|
||||
'cause', cause,
|
||||
'outage', peakoutage,
|
||||
'lsrtime', (lsrtime AT TIME ZONE 'UTC')::timestamp
|
||||
)
|
||||
) ORDER BY startguess ASC
|
||||
)
|
||||
)
|
||||
FROM power
|
||||
WHERE id IN ($placeholders);";
|
||||
|
||||
// $sql = "SELECT lat,lon,lastchange,startguess,peakoutage,cause,lsrtime,lsrref,(lsrtime AT TIME ZONE 'America/New_York')::timestamp as lsrlocal FROM power WHERE id IN ($placeholders)";
|
||||
$result = pg_query_params($dbconn, $sql, $sanitizedIds);
|
||||
|
||||
if (!$result) {
|
||||
echo 'Query failed: ' . pg_last_error();
|
||||
exit;
|
||||
}
|
||||
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
// Output the JSON object
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
|
||||
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['poweridsgeojson'])) {
|
||||
try {
|
||||
$powerids = $_GET['poweridsgeojson'];
|
||||
|
||||
if (empty($powerids)) {
|
||||
throw new Exception('No power IDs provided');
|
||||
}
|
||||
|
||||
// Convert and sanitize power IDs
|
||||
$poweridArray = explode(',', $powerids);
|
||||
$sanitizedIds = array_filter(array_map('intval', $poweridArray));
|
||||
|
||||
if (empty($sanitizedIds)) {
|
||||
throw new Exception('Invalid power ID format');
|
||||
}
|
||||
|
||||
// Prepare placeholders
|
||||
$placeholders = implode(',', array_map(function($i) { return '$' . $i; }, range(1, count($sanitizedIds))));
|
||||
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(realgeom)::json,
|
||||
'properties', json_build_object(
|
||||
'id', id,
|
||||
'time', (startguess AT TIME ZONE 'UTC')::timestamp,
|
||||
'county', county,
|
||||
'state', state,
|
||||
'cause', cause,
|
||||
'outage', peakoutage,
|
||||
'lsrtime', (lsrtime AT TIME ZONE 'UTC')::timestamp
|
||||
)
|
||||
) ORDER BY startguess ASC
|
||||
)
|
||||
)
|
||||
FROM power
|
||||
WHERE id IN ($placeholders)
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, $sanitizedIds);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
header('Content-Type: application/json');
|
||||
if ($resultArray && isset($resultArray[0]['json_build_object'])) {
|
||||
echo $resultArray[0]['json_build_object'];
|
||||
} else {
|
||||
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
$statusCode = strpos($e->getMessage(), 'Invalid') !== false ? 400 : 500;
|
||||
http_response_code($statusCode);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// Assume $dbconn is your established PostgreSQL connection handle
|
||||
// Example: $dbconn = pg_connect("host=localhost dbname=yourdb user=youruser password=yourpass");
|
||||
// if (!$dbconn) { die("Connection failed"); }
|
||||
|
||||
if (isset($_GET['polygongeojson'])) {
|
||||
$result = null; // Initialize result to null for catch block safety
|
||||
try {
|
||||
$polygonGeoJsonString = $_GET['polygongeojson'];
|
||||
|
||||
if (empty($polygonGeoJsonString)) {
|
||||
throw new Exception('No GeoJSON polygon provided', 400); // Use exception code for status
|
||||
}
|
||||
|
||||
// 1. Validate if the input is valid JSON
|
||||
// We decode here primarily to check JSON validity.
|
||||
// We'll pass the *original string* to PostGIS's ST_GeomFromGeoJSON for robustness.
|
||||
$polygonGeoJson = json_decode($polygonGeoJsonString);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) {
|
||||
throw new Exception('Invalid JSON format: ' . json_last_error_msg(), 400);
|
||||
}
|
||||
|
||||
// 2. Optional: Basic structural validation (can rely on PostGIS for full validation)
|
||||
if (!is_object($polygonGeoJson) || !isset($polygonGeoJson->type) || !in_array($polygonGeoJson->type, ['MultiPolygon', 'Polygon'])) {
|
||||
// Allow both Polygon and MultiPolygon for flexibility? Or stick to MultiPolygon?
|
||||
// Let's allow Polygon too, as ST_Within works with both.
|
||||
// If you strictly need *only* MultiPolygon, change the check.
|
||||
throw new Exception('Input GeoJSON must be of type Polygon or MultiPolygon.', 400);
|
||||
}
|
||||
if (!isset($polygonGeoJson->coordinates) || !is_array($polygonGeoJson->coordinates)) {
|
||||
throw new Exception('Input GeoJSON must have a coordinates array.', 400);
|
||||
}
|
||||
|
||||
// 3. Prepare the PostgreSQL Query using PostGIS functions
|
||||
// - ST_GeomFromGeoJSON($1): Parses the input GeoJSON string.
|
||||
// - ST_SetSRID(..., 4326): Assigns the WGS84 SRID (standard for GeoJSON). Adjust if your data uses a different SRID.
|
||||
// - ST_Within(realgeom, ...): Checks if the power outage geometry is within the provided polygon geometry.
|
||||
// - Ensure your 'realgeom' column has a spatial index for performance!
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(realgeom)::json,
|
||||
'properties', json_build_object(
|
||||
'id', id,
|
||||
'time', (startguess AT TIME ZONE 'UTC')::timestamp,
|
||||
'county', county,
|
||||
'state', state,
|
||||
'cause', cause,
|
||||
'outage', peakoutage,
|
||||
'lsrtime', (lsrtime AT TIME ZONE 'UTC')::timestamp
|
||||
)
|
||||
) ORDER BY startguess ASC -- Optional ordering
|
||||
)
|
||||
)
|
||||
FROM power
|
||||
WHERE ST_Within(realgeom, ST_SetSRID(ST_GeomFromGeoJSON($1), 4326))
|
||||
";
|
||||
// Note: If 'realgeom' might be NULL, you might add "AND realgeom IS NOT NULL"
|
||||
|
||||
// 4. Execute the query with the GeoJSON string as a parameter
|
||||
$params = [$polygonGeoJsonString];
|
||||
$result = pg_query_params($dbconn, $query, $params);
|
||||
|
||||
if ($result === false) {
|
||||
// Check for specific PostGIS errors related to invalid GeoJSON input
|
||||
$pgError = pg_last_error($dbconn);
|
||||
if (strpos($pgError, 'invalid GeoJSON representation') !== false || strpos($pgError, 'ParseException') !== false || strpos($pgError, 'Invalid polygon') !== false) {
|
||||
throw new Exception('Invalid GeoJSON geometry data provided: ' . $pgError, 400);
|
||||
} else {
|
||||
// Throw a generic server error for other query failures
|
||||
throw new Exception('Query failed: ' . $pgError, 500);
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Fetch and Output Results
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
header('Content-Type: application/json');
|
||||
if ($resultArray && isset($resultArray[0]['json_build_object'])) {
|
||||
// Ensure null result from json_agg (no features found) returns empty array
|
||||
$outputJson = $resultArray[0]['json_build_object'];
|
||||
$outputData = json_decode($outputJson, true);
|
||||
if (isset($outputData['features']) && $outputData['features'] === null) {
|
||||
$outputData['features'] = [];
|
||||
echo json_encode($outputData);
|
||||
} else {
|
||||
echo $outputJson; // Output the JSON directly from Postgres
|
||||
}
|
||||
} else {
|
||||
// Should ideally be handled by the check above, but as a fallback
|
||||
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
|
||||
} catch (Exception $e) {
|
||||
// 6. Error Handling
|
||||
if (isset($result) && is_resource($result)) { // Check if $result is a valid resource before freeing
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
// Use exception code for status if provided (>=400), default to 500
|
||||
$statusCode = ($e->getCode() >= 400 && $e->getCode() < 600) ? $e->getCode() : 500;
|
||||
http_response_code($statusCode);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit; // Stop script execution after error
|
||||
}
|
||||
}
|
||||
|
||||
// Add else block if needed for when the parameter is not set
|
||||
// else {
|
||||
// // Handle case where $_GET['polygongeojson'] is not present
|
||||
// header('Content-Type: application/json');
|
||||
// http_response_code(400); // Bad Request
|
||||
// echo json_encode(['error' => 'Required parameter "polygongeojson" is missing.']);
|
||||
// exit;
|
||||
// }
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
110
powerapi.py
Normal file
110
powerapi.py
Normal file
@@ -0,0 +1,110 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
//no gets, curent point outage info
|
||||
if(empty($_GET)) {
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and active = true",
|
||||
array('RLX')) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
//county current
|
||||
if($_GET['county']) {
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT distinct on (county,state) update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 order by county,state,update desc",
|
||||
array('RLX')) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}
|
||||
|
||||
//county archive
|
||||
if($_GET['countyarchive']) {
|
||||
|
||||
if($_GET['start']) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end']) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT county,state, update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 and update > $2 and update < $3 order by update asc",
|
||||
array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}}}
|
||||
|
||||
|
||||
//Archive point data
|
||||
if($_GET['archivepoint']) {
|
||||
if($_GET['start']) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end']) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and startguess > $2 and lastchange < $3"
|
||||
,array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
|
||||
}}}
|
||||
|
||||
|
||||
|
||||
//if($_GET['svr']=='current') {
|
||||
//$result = pg_query_params($dbconn,
|
||||
//"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()"
|
||||
//,array('2023-01-01 01:00','2023-02-12 10:00')) or die('Query failed: ' . pg_last_error());
|
||||
//$resultArray = pg_fetch_all($result);
|
||||
//echo($resultArray[0]['json_build_object']);
|
||||
//}
|
||||
|
||||
if($_GET['svr']=='current') {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
if($_GET['svr'] == 'archive') {
|
||||
if($_GET['start']) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end']) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue > $1 and endtime < $2"
|
||||
,array($starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
}
|
||||
|
||||
if(!isset($_GET['start']) && !isset($_GET['end'])) {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() - interval '24 hours' and endtime > now() - interval '24 hours'") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
109
powerapi2.php
Normal file
109
powerapi2.php
Normal file
@@ -0,0 +1,109 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
//no gets, curent point outage info
|
||||
if(empty($_GET)) {
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and active = true",
|
||||
array('RLX')) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
//county current
|
||||
//"SELECT distinct on (county,state) update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 order by county,state,update desc",
|
||||
if($_GET['county']) {
|
||||
$result = pg_query_params($dbconn,
|
||||
"select distinct on (county,state) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update = (select max(update) from countyoutages) and cwa = $1 group by county,state,update",
|
||||
array('RLX')) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}
|
||||
|
||||
//county archive
|
||||
if($_GET['countyarchive']) {
|
||||
|
||||
if($_GET['start']) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end']) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
//"SELECT county,state, update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 and update > $2 and update < $3 order by update asc",
|
||||
"select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update",
|
||||
array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}}}
|
||||
|
||||
|
||||
//Archive point data
|
||||
if($_GET['archivepoint']) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and startguess > $2 and lastchange < $3",
|
||||
array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
//if($_GET['svr']=='current') {
|
||||
//$result = pg_query_params($dbconn,
|
||||
//"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()"
|
||||
//,array('2023-01-01 01:00','2023-02-12 10:00')) or die('Query failed: ' . pg_last_error());
|
||||
//$resultArray = pg_fetch_all($result);
|
||||
//echo($resultArray[0]['json_build_object']);
|
||||
//}
|
||||
|
||||
if($_GET['svr']=='current') {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
if($_GET['svr'] == 'archive') {
|
||||
if($_GET['start']) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end']) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue > $1 and endtime < $2"
|
||||
,array($starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
}
|
||||
|
||||
if(!isset($_GET['start']) && !isset($_GET['end'])) {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() - interval '24 hours' and endtime > now() - interval '24 hours'") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
141
powerapilwx.php
Normal file
141
powerapilwx.php
Normal file
@@ -0,0 +1,141 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
//no gets, curent point outage info
|
||||
if(empty($_GET)) {
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and active = true",
|
||||
array('LWX')) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
|
||||
|
||||
//county/state max
|
||||
if($_GET['max'] ?? null) {
|
||||
|
||||
if($_GET['start'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end'] ?? null) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
//select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > '2023-04-01' and update < '2023-04-02' and cwa = 'RLX' group by county,state,update) as potato group by county,state;
|
||||
"select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update) as potato group by county,state",
|
||||
array('LWX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}}}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//county current
|
||||
//"SELECT distinct on (county,state) update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 order by county,state,update desc",
|
||||
if($_GET['county'] ?? null) {
|
||||
$result = pg_query_params($dbconn,
|
||||
"select distinct on (county,state) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update = (select max(update) from countyoutages) and (cwa = $1 or cwa = $2 or cwa = $3 or cwa = $4 or cwa = $5 or cwa = $6 or cwa = $7) group by county,state,update",
|
||||
array('RLX','JKL','ILN','PBZ','MRX','LWX','RNK')) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}
|
||||
|
||||
//county archive
|
||||
if($_GET['countyarchive'] ?? null) {
|
||||
|
||||
if($_GET['start'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end'] ?? null) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
//"SELECT county,state, update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 and update > $2 and update < $3 order by update asc",
|
||||
"select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $8 and update < $9 and (cwa = $1 or cwa = $2 or cwa = $3 or cwa = $4 or cwa = $5 or cwa = $6 or cwa = $7) group by county,state,update",
|
||||
array('RLX','JKL','ILN','PBZ','MRX','LWX','RNK',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}}}
|
||||
|
||||
|
||||
//Archive point data
|
||||
if($_GET['archivepoint'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and startguess > $2 and lastchange < $3",
|
||||
array('LWX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
//if($_GET['svr']=='current') {
|
||||
//$result = pg_query_params($dbconn,
|
||||
//"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()"
|
||||
//,array('2023-01-01 01:00','2023-02-12 10:00')) or die('Query failed: ' . pg_last_error());
|
||||
//$resultArray = pg_fetch_all($result);
|
||||
//echo($resultArray[0]['json_build_object']);
|
||||
//}
|
||||
|
||||
if($_GET['svr'] ?? null =='current') {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
if($_GET['svr'] ?? null == 'archive') {
|
||||
if($_GET['start'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end'] ?? null) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue > $1 and endtime < $2"
|
||||
,array($starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
}
|
||||
|
||||
if(!isset($_GET['start']) && !isset($_GET['end'])) {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() - interval '24 hours' and endtime > now() - interval '24 hours'") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
172
powerapitest.php
Normal file
172
powerapitest.php
Normal file
@@ -0,0 +1,172 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
//no gets, curent point outage info
|
||||
if(empty($_GET)) {
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and active = true",
|
||||
array('RLX')) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
|
||||
|
||||
//county/state max
|
||||
if($_GET['max'] ?? null) {
|
||||
|
||||
if($_GET['start'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end'] ?? null) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
//select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > '2023-04-01' and update < '2023-04-02' and cwa = 'RLX' group by county,state,update) as potato group by county,state;
|
||||
"select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update) as potato group by county,state",
|
||||
array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}}}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if (isset($_GET['county'])) {
|
||||
try {
|
||||
$query = "
|
||||
SELECT DISTINCT ON (county, state)
|
||||
county,
|
||||
state,
|
||||
SUM(outages) as outage,
|
||||
update as time,
|
||||
SUM(served) as served,
|
||||
ROUND(
|
||||
CAST(
|
||||
CASE
|
||||
WHEN SUM(served) = 0 THEN NULL
|
||||
ELSE (SUM(outages)::FLOAT / SUM(served)) * 100
|
||||
END AS NUMERIC
|
||||
), 2
|
||||
) as perout
|
||||
FROM countyoutages
|
||||
WHERE update = (SELECT MAX(update) FROM countyoutages)
|
||||
AND (cwa = $1 OR cwa = $2 OR cwa = $3 OR cwa = $4 OR cwa = $5 OR cwa = $6 OR cwa = $7)
|
||||
GROUP BY county, state, update
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, ['RLX','JKL','ILN','PBZ','MRX','LWX','RNK']);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
//county archive
|
||||
if($_GET['countyarchive'] ?? null) {
|
||||
|
||||
if($_GET['start'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end'] ?? null) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
//"SELECT county,state, update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 and update > $2 and update < $3 order by update asc",
|
||||
"select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $9 and update < $10 and (cwa = $1 or cwa = $2 or cwa = $3 or cwa = $4 or cwa = $5 or cwa = $6 or cwa = $7 or cwa = $8) group by county,state,update",
|
||||
array('RLX','JKL','ILN','PBZ','MRX','LWX','RNK','CTP',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}}}
|
||||
|
||||
|
||||
//Archive point data
|
||||
if($_GET['archivepoint'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and startguess > $2 and lastchange < $3",
|
||||
array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
//if($_GET['svr']=='current') {
|
||||
//$result = pg_query_params($dbconn,
|
||||
//"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()"
|
||||
//,array('2023-01-01 01:00','2023-02-12 10:00')) or die('Query failed: ' . pg_last_error());
|
||||
//$resultArray = pg_fetch_all($result);
|
||||
//echo($resultArray[0]['json_build_object']);
|
||||
//}
|
||||
|
||||
if($_GET['svr'] ?? null =='current') {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
if($_GET['svr'] ?? null == 'archive') {
|
||||
if($_GET['start'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end'] ?? null) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue > $1 and endtime < $2"
|
||||
,array($starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
}
|
||||
|
||||
if(!isset($_GET['start']) && !isset($_GET['end'])) {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() - interval '24 hours' and endtime > now() - interval '24 hours'") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
593
powercounty.py
Normal file
593
powercounty.py
Normal file
@@ -0,0 +1,593 @@
|
||||
# powercounty.py
|
||||
|
||||
import logging
|
||||
import requests
|
||||
import json
|
||||
import psycopg2
|
||||
from datetime import datetime
|
||||
import re
|
||||
from collections import defaultdict
|
||||
import threading
|
||||
|
||||
|
||||
# Set up logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||
handlers=[
|
||||
logging.FileHandler('powercounty.log'),
|
||||
logging.StreamHandler()
|
||||
]
|
||||
)
|
||||
|
||||
# Set up a logger for this module
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Database connection parameters
|
||||
DB_PARAMS = {
|
||||
'host': 'localhost',
|
||||
'database': 'nws',
|
||||
'user': 'nws',
|
||||
'password': 'nws'
|
||||
}
|
||||
|
||||
# Set up a requests session
|
||||
S = requests.Session()
|
||||
|
||||
# Power company metadata and URLs (from power3.py)
|
||||
AEP_OH_META = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
AEP_WV_META = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
AEP_KY_META = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json'
|
||||
WV_FE_META = 'https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false'
|
||||
AEP_WV_KUBRA_META = "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false"
|
||||
AEP_OH_KUBRA_META = 'https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false'
|
||||
AEP_KY_KUBRA_META = 'https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false'
|
||||
|
||||
AEP_WV_BASE = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
AEP_OH_BASE = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
AEP_KY_BASE = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/'
|
||||
GRAYSON_COUNTY = 'https://outages.graysonrecc.com/data/boundaries.json'
|
||||
|
||||
# Additional URLs from power3.py
|
||||
flemingjson = 'https://outage.fme.coop/data/boundaries.json'
|
||||
bigsandy_url = 'http://outagemap.bigsandyrecc.com/data/boundaries.json'
|
||||
southcentralpower_url = 'https://outage.southcentralpower.com/data/boundaries.json'
|
||||
|
||||
# Global list to collect all outage data
|
||||
allcountyoutages = []
|
||||
|
||||
|
||||
# This function will try to get a URL and log any errors
|
||||
def safe_request(url, description="Fetching data"):
|
||||
try:
|
||||
logger.info(f"{description}: {url}")
|
||||
response = S.get(url)
|
||||
response.raise_for_status() # Raise an exception for bad status codes
|
||||
logger.info(f"Successfully fetched data from {url}")
|
||||
return response
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Failed to {description} from {url}: {e}")
|
||||
return None
|
||||
|
||||
# This function will parse a JSON response and log errors
|
||||
def safe_json_load(response, description="Parsing JSON"):
|
||||
try:
|
||||
logger.info(f"{description}")
|
||||
data = json.loads(response.text)
|
||||
logger.info("Successfully parsed JSON data")
|
||||
return data
|
||||
except (json.JSONDecodeError, AttributeError) as e:
|
||||
logger.error(f"Failed to {description}: {e}")
|
||||
return None
|
||||
|
||||
# Ported functions from power3.py with enhanced logging
|
||||
|
||||
def fleming():
|
||||
"""Fetch outage data for Fleming County, KY"""
|
||||
logger.info("Fetching Fleming County outage data")
|
||||
state = 'KY'
|
||||
company = 'FLEM'
|
||||
temp = safe_request(flemingjson, "fetching Fleming data")
|
||||
if temp is None:
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(temp, "parsing Fleming JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
try:
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'), j.get('customersServed'), j.get('name'), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} Fleming County boundaries")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing Fleming County data: {e}")
|
||||
|
||||
def bigsandy():
|
||||
"""Fetch outage data for Big Sandy RECC"""
|
||||
logger.info("Fetching Big Sandy RECC outage data")
|
||||
state = 'OH'
|
||||
company = 'BS'
|
||||
temp = safe_request(bigsandy_url, "fetching Big Sandy data")
|
||||
if temp is None:
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(temp, "parsing Big Sandy JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
try:
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'), j.get('customersServed'), j.get('name'), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} Big Sandy boundaries")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing Big Sandy data: {e}")
|
||||
|
||||
def southcentralpower():
|
||||
"""Fetch outage data for South Central Power"""
|
||||
logger.info("Fetching South Central Power outage data")
|
||||
company = 'SCP'
|
||||
url = southcentralpower_url
|
||||
temp = safe_request(url, "fetching South Central Power data")
|
||||
if temp is None:
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(temp, "parsing South Central Power JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
state = 'OH'
|
||||
try:
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'), j.get('customersServed'), j.get('name'), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} South Central Power boundaries")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing South Central Power data: {e}")
|
||||
|
||||
def ku_get_url():
|
||||
"""Get KU outage data URL"""
|
||||
logger.info("Getting KU outage data URL")
|
||||
url = 'https://stormcenter.lge-ku.com/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40'
|
||||
r = safe_request(url, "fetching KU report page")
|
||||
if r is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
x = re.search(r"instanceId: '(.*?)',", r.text)
|
||||
if not x:
|
||||
logger.error("Could not extract instanceId from KU report page")
|
||||
return None
|
||||
|
||||
urlcom = x.group(1)
|
||||
urlcom = 'https://kubra.io/stormcenter/api/v1/stormcenters/' + urlcom + '/views/a6cee9e4-312b-4b77-9913-2ae371eb860d/currentState?preview=false'
|
||||
stuff = safe_request(urlcom, "fetching KU stormcenter data")
|
||||
if stuff is None:
|
||||
return None
|
||||
|
||||
jsonstuff = safe_json_load(stuff, "parsing KU stormcenter JSON")
|
||||
if jsonstuff is None:
|
||||
return None
|
||||
|
||||
interval_data = jsonstuff.get('data').get('interval_generation_data')
|
||||
urlcom = 'https://kubra.io/' + interval_data + '/public/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40_report.json'
|
||||
logger.info(f"Successfully constructed KU data URL: {urlcom}")
|
||||
return urlcom
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting KU URL: {e}")
|
||||
return None
|
||||
|
||||
def county_json(meta, url, jsonname):
|
||||
"""Generic function to get county JSON data"""
|
||||
metainfo_response = safe_request(meta, "fetching metadata for county JSON")
|
||||
if metainfo_response is None:
|
||||
return None
|
||||
|
||||
metainfo = safe_json_load(metainfo_response, "parsing metadata for county JSON")
|
||||
if metainfo is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
metadir = metainfo['directory']
|
||||
url = url + metadir + jsonname
|
||||
outage_response = safe_request(url, "fetching county JSON data")
|
||||
return outage_response
|
||||
except KeyError as e:
|
||||
logger.error(f"Error accessing metadata directory: {e}")
|
||||
return None
|
||||
|
||||
def ku():
|
||||
"""Fetch KU outage data"""
|
||||
logger.info("Fetching KU outage data")
|
||||
ku_list = []
|
||||
url = ku_get_url()
|
||||
if url is None:
|
||||
return
|
||||
|
||||
data_response = safe_request(url, "fetching KU data")
|
||||
if data_response is None:
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(data_response, "parsing KU data JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
try:
|
||||
temp = tempdata['file_data']['areas'][2]['areas'][0]['areas']
|
||||
temp1 = tempdata['file_data']['areas'][2]['areas'][1]['areas']
|
||||
temp2 = tempdata['file_data']['areas'][1]['areas'][0]['areas']
|
||||
|
||||
for i in temp:
|
||||
ku_list.append(i)
|
||||
for i in temp1:
|
||||
ku_list.append(i)
|
||||
for i in temp2:
|
||||
ku_list.append(i)
|
||||
|
||||
for o in ku_list:
|
||||
outageinfo = o['cust_a']['val'], o['cust_s'], o['name'].capitalize(), o['state'], o['utility']
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
logger.info(f"Successfully processed {len(ku_list)} KU outage records")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing KU data: {e}")
|
||||
|
||||
def grayson():
|
||||
"""Fetch Grayson County outage data"""
|
||||
logger.info("Fetching Grayson County outage data")
|
||||
company = 'GRE'
|
||||
outage_response = safe_request(GRAYSON_COUNTY, "fetching Grayson County data")
|
||||
if outage_response is None:
|
||||
return
|
||||
|
||||
if not outage_response.headers.get('Content-Type', '').startswith('application/json'):
|
||||
logger.error(f"Unexpected content type from Grayson County: {outage_response.headers.get('Content-Type')}")
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(outage_response, "parsing Grayson County JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
state = 'KY'
|
||||
try:
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'), j.get('customersServed'), j.get('name'), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} Grayson County boundaries")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing Grayson County data: {e}")
|
||||
|
||||
def aep_county_vawv(meta, url, jsonname):
|
||||
"""Fetch AEP county data for VA and WV"""
|
||||
logger.info("Fetching AEP county data for VA and WV")
|
||||
company = 'AEP'
|
||||
outage_response = county_json(meta, url, jsonname)
|
||||
if outage_response is None:
|
||||
return
|
||||
|
||||
if not outage_response.headers.get('Content-Type', '').startswith('application/octet-stream'):
|
||||
logger.error(f"Unexpected content type from AEP VA/WV: {outage_response.headers.get('Content-Type')}")
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(outage_response, "parsing AEP VA/WV JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
try:
|
||||
# WV data
|
||||
state = 'WV'
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][2]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name'), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
# VA data
|
||||
state = 'VA'
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][1]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
logger.info("Successfully processed AEP VA/WV county data")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing AEP VA/WV data: {e}")
|
||||
|
||||
def aep_county_oh(meta, url, jsonname):
|
||||
"""Fetch AEP county data for Ohio"""
|
||||
logger.info("Fetching AEP county data for Ohio")
|
||||
company = 'AEP'
|
||||
state = 'OH'
|
||||
outage_response = county_json(meta, url, jsonname)
|
||||
if outage_response is None:
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(outage_response, "parsing AEP OH JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
try:
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info("Successfully processed AEP OH county data")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing AEP OH data: {e}")
|
||||
|
||||
def aep_county_ky(meta, url, jsonname):
|
||||
"""Fetch AEP county data for Kentucky"""
|
||||
logger.info("Fetching AEP county data for Kentucky")
|
||||
company = 'AEP'
|
||||
state = 'KY'
|
||||
outage_response = county_json(meta, url, jsonname)
|
||||
if outage_response is None:
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(outage_response, "parsing AEP KY JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
try:
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info("Successfully processed AEP KY county data")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing AEP KY data: {e}")
|
||||
|
||||
def firstenergy_county(meta, url, jsonname):
|
||||
"""Fetch First Energy county data"""
|
||||
logger.info("Fetching First Energy county data")
|
||||
company = 'FE'
|
||||
state = 'WV'
|
||||
outage_response = county_json(meta, url, jsonname)
|
||||
if outage_response is None:
|
||||
return
|
||||
|
||||
if not outage_response.headers.get('Content-Type', '').startswith('application/octet-stream'):
|
||||
logger.error(f"Unexpected content type from First Energy: {outage_response.headers.get('Content-Type')}")
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(outage_response, "parsing First Energy JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
try:
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info("Successfully processed First Energy county data")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing First Energy data: {e}")
|
||||
|
||||
def get_kubra_hexes(url):
|
||||
"""Get Kubra hex data"""
|
||||
outage_response = safe_request(url, "fetching Kubra hex data")
|
||||
if outage_response is None:
|
||||
return None, None
|
||||
|
||||
if not outage_response.headers.get('Content-Type', '').startswith('application/json'):
|
||||
logger.error(f"Unexpected content type from Kubra: {outage_response.headers.get('Content-Type')}")
|
||||
return None, None
|
||||
|
||||
tempdata = safe_json_load(outage_response, "parsing Kubra hex JSON")
|
||||
if tempdata is None:
|
||||
return None, None
|
||||
|
||||
try:
|
||||
bothhex = tempdata.get('data').get('cluster_interval_generation_data')
|
||||
hexes = bothhex.split('/')
|
||||
logger.info(f"Successfully extracted Kubra hexes: {hexes}")
|
||||
return hexes[2], hexes[3]
|
||||
except (KeyError, AttributeError) as e:
|
||||
logger.error(f"Error extracting Kubra hexes: {e}")
|
||||
return None, None
|
||||
|
||||
def kubra_fe(baseurl1, baseurl2, meta):
|
||||
"""Fetch Kubra First Energy data"""
|
||||
logger.info("Fetching Kubra First Energy data")
|
||||
hex2 = get_kubra_hexes(meta)
|
||||
if hex2[0] is None:
|
||||
return
|
||||
|
||||
url = baseurl1 + hex2[1] + baseurl2
|
||||
company = 'FE'
|
||||
state = 'WV'
|
||||
outage_response = safe_request(url, "fetching Kubra FE data")
|
||||
if outage_response is None:
|
||||
return
|
||||
|
||||
if not outage_response.headers.get('Content-Type', '').startswith('application/json'):
|
||||
logger.error(f"Unexpected content type from Kubra FE: {outage_response.headers.get('Content-Type')}")
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(outage_response, "parsing Kubra FE JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
try:
|
||||
for j in tempdata['file_data']['areas']:
|
||||
if j.get('key') == "county":
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info("Successfully processed Kubra FE county data")
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing Kubra FE data: {e}")
|
||||
|
||||
def kubra_aep(baseurl1, baseurl2, meta, company='AEP'):
|
||||
"""Fetch Kubra AEP data"""
|
||||
logger.info(f"Fetching Kubra AEP data for company: {company}")
|
||||
hex2 = get_kubra_hexes(meta)
|
||||
if hex2[0] is None:
|
||||
return
|
||||
|
||||
url = baseurl1 + hex2[1] + baseurl2
|
||||
outage_response = safe_request(url, "fetching Kubra AEP data")
|
||||
if outage_response is None:
|
||||
return
|
||||
|
||||
if not outage_response.headers.get('Content-Type', '').startswith('application/json'):
|
||||
logger.error(f"Unexpected content type from Kubra AEP: {outage_response.headers.get('Content-Type')}")
|
||||
return
|
||||
|
||||
tempdata = safe_json_load(outage_response, "parsing Kubra AEP JSON")
|
||||
if tempdata is None:
|
||||
return
|
||||
|
||||
process_outage_data(tempdata, company)
|
||||
|
||||
def process_outage_data(data, company):
|
||||
"""Process outage data with enhanced error handling"""
|
||||
try:
|
||||
# Navigate to the primary list of areas
|
||||
primary_areas = data.get("file_data", {}).get("areas", [])
|
||||
|
||||
# If the list is empty, there's nothing to process
|
||||
if not primary_areas:
|
||||
logger.warning("No 'areas' data found in outage data.")
|
||||
return
|
||||
|
||||
# Check the key of the first item to determine the format
|
||||
first_item_key = primary_areas[0].get("key")
|
||||
|
||||
if first_item_key == "state":
|
||||
# Format 1: Loop through each state object
|
||||
for state_area in primary_areas:
|
||||
state_name = state_area.get("name", "Unknown State")
|
||||
# Get the nested list of counties for this state
|
||||
county_list = state_area.get("areas", [])
|
||||
for county in county_list:
|
||||
# We are now at the county level
|
||||
if county.get("key") == "county":
|
||||
outageinfo = county.get('cust_a').get('val'), county.get('cust_s'), county.get('name').capitalize(), county.get('state'), company
|
||||
allcountyoutages.append(outageinfo)
|
||||
elif first_item_key == "county":
|
||||
# Format 2: The primary list is already the county list
|
||||
for county in primary_areas:
|
||||
# We are now at the county level
|
||||
if county.get("key") == "county":
|
||||
outageinfo = county.get('cust_a').get('val'), county.get('cust_s'), county.get('name').capitalize(), county.get('state'), company
|
||||
allcountyoutages.append(outageinfo)
|
||||
else:
|
||||
logger.warning(f"Unknown data format. Could not find 'state' or 'county' key. Found: {first_item_key}")
|
||||
|
||||
except (KeyError, IndexError) as e:
|
||||
logger.error(f"Error processing outage data: {e}")
|
||||
|
||||
def insert_outage_data(cursor, outage_data, current_timestamp):
|
||||
"""Insert outage data into the new table"""
|
||||
if not outage_data:
|
||||
logger.info("No outage data to insert into the database.")
|
||||
return
|
||||
|
||||
sql = 'INSERT INTO newcountyoutages (outages, served, county, state, update, company) VALUES (%s, %s, %s, %s, %s, %s)'
|
||||
try:
|
||||
logger.info(f"Inserting {len(outage_data)} rows into the database.")
|
||||
cursor.executemany(sql, outage_data)
|
||||
logger.info("Successfully inserted data into the database.")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to insert data into the database: {e}")
|
||||
raise
|
||||
|
||||
def main():
|
||||
"""Main function to collect and insert outage data"""
|
||||
conn = None
|
||||
try:
|
||||
conn = psycopg2.connect(**DB_PARAMS)
|
||||
cursor = conn.cursor()
|
||||
logger.info("Successfully connected to the database.")
|
||||
|
||||
# Clear the global list at the start
|
||||
global allcountyoutages
|
||||
allcountyoutages = []
|
||||
|
||||
# Collect outage data for each provider
|
||||
logger.info("Starting data collection.")
|
||||
|
||||
# --- Kubra First Energy ---
|
||||
try:
|
||||
kubra_fe('https://kubra.io/data/', '/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json', WV_FE_META)
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting Kubra FE data: {e}")
|
||||
|
||||
# --- Kubra AEP WV ---
|
||||
try:
|
||||
kubra_aep('https://kubra.io/data/', '/public/reports/7929429f-635d-4761-b6c7-78f646cef3c2_report.json', AEP_WV_KUBRA_META)
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting Kubra AEP WV data: {e}")
|
||||
|
||||
# --- Kubra AEP OH ---
|
||||
try:
|
||||
kubra_aep('https://kubra.io/data/', '/public/reports/1bc6bd19-2315-4548-980a-6df73b93b355_report.json', AEP_OH_KUBRA_META)
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting Kubra AEP OH data: {e}")
|
||||
|
||||
# --- Kubra AEP KY ---
|
||||
try:
|
||||
kubra_aep('https://kubra.io/data/', '/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json', AEP_KY_KUBRA_META)
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting Kubra AEP KY data: {e}")
|
||||
|
||||
# --- Grayson County ---
|
||||
try:
|
||||
grayson()
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting Grayson County data: {e}")
|
||||
|
||||
# --- KU ---
|
||||
try:
|
||||
ku()
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting KU data: {e}")
|
||||
|
||||
# --- South Central Power ---
|
||||
try:
|
||||
southcentralpower()
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting South Central Power data: {e}")
|
||||
|
||||
# --- Big Sandy ---
|
||||
try:
|
||||
bigsandy()
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting Big Sandy data: {e}")
|
||||
|
||||
# --- AEP Direct (OH, WV, KY) ---
|
||||
try:
|
||||
aep_county_oh(AEP_OH_META, AEP_OH_BASE, "metadata.json")
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting AEP OH data: {e}")
|
||||
|
||||
try:
|
||||
aep_county_vawv(AEP_WV_META, AEP_WV_BASE, "metadata.json")
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting AEP WV/VA data: {e}")
|
||||
|
||||
try:
|
||||
aep_county_ky(AEP_KY_META, AEP_KY_BASE, "metadata.json")
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting AEP KY data: {e}")
|
||||
|
||||
# --- First Energy Direct ---
|
||||
try:
|
||||
firstenergy_county(WV_FE_META, 'https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/', "metadata.json")
|
||||
except Exception as e:
|
||||
logger.error(f"Error collecting First Energy data: {e}")
|
||||
|
||||
# Insert collected data into the new table
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
insert_outage_data(cursor, allcountyoutages, current_timestamp)
|
||||
conn.commit()
|
||||
logger.info("Data collection and database insert completed successfully.")
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("An error occurred during the main execution.")
|
||||
if conn:
|
||||
conn.rollback()
|
||||
finally:
|
||||
if conn:
|
||||
cursor.close()
|
||||
conn.close()
|
||||
logger.info("Database connection closed.")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
50
powersum.py
Normal file
50
powersum.py
Normal file
@@ -0,0 +1,50 @@
|
||||
import requests
|
||||
import polyline
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from datetime import datetime, timezone
|
||||
from geojson import Point, Feature, FeatureCollection, dump
|
||||
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
|
||||
|
||||
allcountyoutages = []
|
||||
|
||||
S = requests.Session()
|
||||
|
||||
|
||||
#select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update
|
||||
#select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > '2023-04-01' and update < '2023-04-02' and cwa = 'RLX' group by county,state,update) as potato group by county,state;
|
||||
cursor.execute("select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update")
|
||||
|
||||
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
for i in allcountyoutages:
|
||||
sql = 'insert into countyoutages (outages, served, county, state, update, company) values (%s, %s, %s, %s, %s, %s)'
|
||||
val = (i[0], i[1], i[2], i[3], current_timestamp, i[4])
|
||||
cursor.execute(sql,val)
|
||||
conn.commit()
|
||||
|
||||
cursor.execute('update countyoutages set cwa = county.cwa from county where county.countyname = countyoutages.county and county.state = countyoutages.state and countyoutages.cwa is null')
|
||||
|
||||
conn.commit()
|
||||
|
||||
cursor.execute("delete from countyoutages where cwa != 'RLX'")
|
||||
cursor.execute("delete from countyoutages where cwa is null")
|
||||
cursor.execute("delete from countyoutages where update < now () - interval '30 days'")
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
51
powersummary.py
Normal file
51
powersummary.py
Normal file
@@ -0,0 +1,51 @@
|
||||
from tabulate import tabulate
|
||||
import requests
|
||||
import polyline
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from datetime import datetime, timezone
|
||||
from geojson import Point, Feature, FeatureCollection, dump
|
||||
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("""
|
||||
SELECT
|
||||
startguess::timestamp(0),
|
||||
lastchange::timestamp(0),
|
||||
(lastchange-startguess)::interval(0),
|
||||
peakoutage,
|
||||
cause,
|
||||
lat,
|
||||
lon,
|
||||
county,
|
||||
state
|
||||
FROM
|
||||
power
|
||||
WHERE
|
||||
(cause ILIKE '%%tree%%' OR cause ILIKE '%%weather%%')
|
||||
AND cwa = 'RLX'
|
||||
AND startguess BETWEEN now() - interval '120 hours' AND now()
|
||||
ORDER BY
|
||||
startguess DESC
|
||||
""")
|
||||
allweather = cursor.fetchall()
|
||||
cleanprint = []
|
||||
#print(allweather)
|
||||
|
||||
|
||||
if len(allweather) == 0:
|
||||
outage = ("No Tree Damage or Weather Reports In The Last 24 Hours")
|
||||
else:
|
||||
outage = tabulate(allweather,headers=['Start Time UTC', 'End Time UTC','Duration','Max Out','Cause','Lat','Lon','County','State'])
|
||||
|
||||
|
||||
with open("/var/www/html/work/24hrpower.txt", "w") as outfile:
|
||||
outfile.write(outage)
|
||||
|
||||
|
||||
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
57339
rlxtest.json
Normal file
57339
rlxtest.json
Normal file
File diff suppressed because it is too large
Load Diff
2
robots.txt
Normal file
2
robots.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
User-agent: *
|
||||
Disallow: /
|
||||
71
runallgeom.py
Normal file
71
runallgeom.py
Normal file
@@ -0,0 +1,71 @@
|
||||
import time
|
||||
import requests
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from psycopg2.extras import Json
|
||||
|
||||
|
||||
conn = psycopg2.connect(host='127.0.0.1', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
|
||||
|
||||
|
||||
updates = ['UPDATE public.cams SET geom = ST_SetSRID(ST_MakePoint(lon, lat), 4326)',
|
||||
'UPDATE public.cams SET county = county.countyname from public.county WHERE ST_Contains(county.geom,cams.geom)',
|
||||
'UPDATE public.cams SET pzone = pzone.state_zone from public.pzone WHERE ST_Contains(pzone.geom,cams.geom)',
|
||||
'UPDATE public.cams SET fzone = fzone.state_zone from public.fzone WHERE ST_Contains(fzone.geom,cams.geom)',
|
||||
'UPDATE public.cams SET cwa = fzone.cwa from public.fzone WHERE ST_Contains(fzone.geom,cams.geom)',
|
||||
'UPDATE public.cams SET zonename = pzone.shortname from public.pzone WHERE ST_Contains(pzone.geom,cams.geom)',
|
||||
'UPDATE public.cams SET state = county.state from public.county WHERE ST_Contains(county.geom,cams.geom)']
|
||||
|
||||
for i in updates:
|
||||
cursor.execute(i)
|
||||
|
||||
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
headers = {
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36',
|
||||
}
|
||||
|
||||
cursor.execute("SELECT camid, lon, lat from cams WHERE elevation ISNULL")
|
||||
allcams = cursor.fetchall()
|
||||
|
||||
|
||||
|
||||
def get_elev(lon,lat):
|
||||
sql = "SELECT round(ST_Value(rast, ST_SetSRID(ST_MakePoint(%s,%s),4326)) * 3.28084) AS val FROM dem WHERE ST_Intersects(rast, ST_SetSRID(ST_MakePoint(%s,%s),4326))"
|
||||
vars = (lon,lat,lon,lat)
|
||||
cursor.execute(sql,vars)
|
||||
elev = cursor.fetchone()
|
||||
print(elev)
|
||||
if (elev != None):
|
||||
return int(elev[0])
|
||||
if (elev == None):
|
||||
elev = get_elev_backup(lon,lat)
|
||||
print('backup: ' +str(elev))
|
||||
return int(elev)
|
||||
|
||||
def get_elev_backup(lon,lat):
|
||||
baseurl = 'https://nationalmap.gov/epqs/pqs.php?x={}&y={}&units=Feet&output=json'.format(lon,lat)
|
||||
r = requests.get(baseurl, timeout=3,headers=headers)
|
||||
if r.status_code == 200:
|
||||
returns = r.json()
|
||||
elev = returns.get('USGS_Elevation_Point_Query_Service').get('Elevation_Query').get('Elevation')
|
||||
return elev
|
||||
|
||||
|
||||
|
||||
for cam in allcams:
|
||||
camid, lon, lat = cam
|
||||
elev = get_elev(lon,lat)
|
||||
val = (elev,camid)
|
||||
sql = "UPDATE cams SET elevation= %s WHERE camid = %s"
|
||||
cursor.execute(sql,val)
|
||||
conn.commit()
|
||||
|
||||
110
searchapi.php
Normal file
110
searchapi.php
Normal file
@@ -0,0 +1,110 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
//no gets, curent point outage info
|
||||
if(empty($_GET)) {
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and active = true",
|
||||
array('RLX')) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
//county current
|
||||
if($_GET['county']) {
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT distinct on (county,state) update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 order by county,state,update desc",
|
||||
array('RLX')) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}
|
||||
|
||||
//county archive
|
||||
if($_GET['countyarchive']) {
|
||||
|
||||
if($_GET['start']) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end']) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT county,state, update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 and update > $2 and update < $3 order by update asc",
|
||||
array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}}}
|
||||
|
||||
|
||||
//Archive point data
|
||||
if($_GET['archivepoint']) {
|
||||
if($_GET['start']) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end']) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and startguess > $2 and lastchange < $3"
|
||||
,array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
|
||||
}}}
|
||||
|
||||
|
||||
|
||||
//if($_GET['svr']=='current') {
|
||||
//$result = pg_query_params($dbconn,
|
||||
//"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()"
|
||||
//,array('2023-01-01 01:00','2023-02-12 10:00')) or die('Query failed: ' . pg_last_error());
|
||||
//$resultArray = pg_fetch_all($result);
|
||||
//echo($resultArray[0]['json_build_object']);
|
||||
//}
|
||||
|
||||
if($_GET['svr']=='current') {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
if($_GET['svr'] == 'archive') {
|
||||
if($_GET['start']) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end']) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue > $1 and endtime < $2"
|
||||
,array($starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
}
|
||||
|
||||
if(!isset($_GET['start']) && !isset($_GET['end'])) {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() - interval '24 hours' and endtime > now() - interval '24 hours'") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
34
single.php
Normal file
34
single.php
Normal file
@@ -0,0 +1,34 @@
|
||||
<?php
|
||||
// Connecting, selecting database
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
|
||||
$camid=$_GET['camid'];
|
||||
|
||||
// Performing SQL query
|
||||
// Always treat hydro and airport as booleans - convert to true/false
|
||||
$query = "SELECT *, COALESCE(hydro, false) as hydro, COALESCE(airport, false) as airport FROM cams WHERE camid = '{$camid}'";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Printing results in HTML
|
||||
$array = array();
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
// Ensure hydro is a proper boolean
|
||||
$line['hydro'] = ($line['hydro'] === 't' || $line['hydro'] === true);
|
||||
// Ensure airport is a proper boolean
|
||||
$line['airport'] = ($line['airport'] === 't' || $line['airport'] === true);
|
||||
$array[] = $line;
|
||||
}
|
||||
|
||||
// Debug: log the data being sent
|
||||
error_log("Single.php response for camid $camid: " . json_encode($array));
|
||||
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Closing connection
|
||||
pg_close($dbconn);
|
||||
?>
|
||||
|
||||
279
staff.html
Normal file
279
staff.html
Normal file
@@ -0,0 +1,279 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
|
||||
<script src="https://code.jquery.com/ui/1.13.1/jquery-ui.js" integrity="sha256-6XMVI0zB8cRzfZjqKcD01PBsAy3FlDASrlC8SxCpInY=" crossorigin="anonymous"></script>
|
||||
<link rel="stylesheet" href="https://code.jquery.com/ui/1.13.1/themes/smoothness/jquery-ui.css">
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Table Chart - Office Stats</title>
|
||||
<style>
|
||||
body {
|
||||
margin: 0;
|
||||
font-family: Arial, sans-serif;
|
||||
background-color: #f9f9f9;
|
||||
}
|
||||
#main-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
h1 {
|
||||
text-align: center;
|
||||
margin: 5px 0;
|
||||
}
|
||||
#chart-container {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
overflow: auto; /* Allows scrolling for the whole chart if it's too big */
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
background-color: #fff;
|
||||
border: 1px solid #ddd;
|
||||
}
|
||||
|
||||
/* --- MODIFIED/ADDED CSS FOR CELL SIZING --- */
|
||||
th, td {
|
||||
border: 1px solid #ddd;
|
||||
padding: 2px; /* Reduced padding for smaller cells */
|
||||
text-align: center;
|
||||
font-size: 10px; /* Reduced font size for compactness */
|
||||
width: 35px; /* Set a fixed width */
|
||||
height: 22px; /* Set a fixed height */
|
||||
box-sizing: border-box; /* Ensures padding & border are included in width/height */
|
||||
overflow: hidden; /* Hide content that overflows cell boundaries */
|
||||
text-overflow: ellipsis; /* Show '...' for truncated text (horizontally) */
|
||||
white-space: nowrap; /* Prevent text from wrapping to new lines */
|
||||
}
|
||||
|
||||
th {
|
||||
background-color: #f2f2f2;
|
||||
/* Header cells (dates) will also adhere to the width/height above. */
|
||||
/* Ellipsis will show if date strings are too long for 35px width. */
|
||||
}
|
||||
|
||||
/* Special styling for the first column header cells (office names in tbody) */
|
||||
tbody tr th:first-child {
|
||||
width: auto; /* Allow this column to be wider based on content */
|
||||
min-width: 70px; /* Set a minimum width for office names */
|
||||
text-align: left; /* Align office names to the left for readability */
|
||||
white-space: normal; /* Allow office names to wrap if they are too long */
|
||||
text-overflow: clip; /* Default behavior for overflow when wrapping is normal */
|
||||
padding-left: 5px; /* Add some left padding */
|
||||
/* Height will still be 22px from the th,td rule. If more height needed, override here. */
|
||||
}
|
||||
|
||||
td {
|
||||
/* max-width: 50px; */ /* Superseded by width in th,td */
|
||||
/* max-height: 10px; */ /* Typo corrected and superseded by height in th,td */
|
||||
transition: opacity 0.2s;
|
||||
}
|
||||
/* --- END OF MODIFIED/ADDED CSS --- */
|
||||
|
||||
td:hover {
|
||||
opacity: 0.7;
|
||||
}
|
||||
.tooltip {
|
||||
position: absolute;
|
||||
background-color: #333;
|
||||
color: #fff;
|
||||
padding: 5px 10px;
|
||||
border-radius: 3px;
|
||||
font-size: 12px; /* Tooltip font size can remain larger for readability */
|
||||
pointer-events: none;
|
||||
visibility: hidden;
|
||||
max-width: 300px;
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="main-container">
|
||||
<h1>WFO Staffing Based on ICAM Account Status</h1>
|
||||
<div id="chart-container"></div>
|
||||
</div>
|
||||
<div class="tooltip" id="tooltip"></div>
|
||||
|
||||
<script>
|
||||
// Process data
|
||||
function processData(data) {
|
||||
const dates = data.map(item => {
|
||||
const dt = new Date(item.provided_datetime);
|
||||
return `${dt.getMonth() + 1}/${dt.getDate()}/${dt.getFullYear()}`;
|
||||
});
|
||||
const offices = [...new Set(data.flatMap(item => item.data.map(d => d.office)))];
|
||||
|
||||
const officeDateMap = {};
|
||||
const titleBreakdownMap = {};
|
||||
|
||||
offices.forEach(office => {
|
||||
officeDateMap[office] = {};
|
||||
titleBreakdownMap[office] = {};
|
||||
dates.forEach((date, i) => {
|
||||
const item = data[i];
|
||||
const officeData = item.data.find(d => d.office === office);
|
||||
officeDateMap[office][date] = officeData ? parseInt(officeData.unique_person_count, 10) : null;
|
||||
|
||||
if (officeData && officeData.title_counts_array) {
|
||||
try {
|
||||
const titlesString = officeData.title_counts_array
|
||||
.replace(/^{|}$/g, '')
|
||||
.replace(/\\"/g, '"');
|
||||
|
||||
const titlesArray = titlesString
|
||||
.split('","')
|
||||
.map(str => str.replace(/^"|"$/g, ''))
|
||||
.map(str => {
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch (e) {
|
||||
console.error(`Failed to parse individual title: ${str}`, e);
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.filter(t => t !== null);
|
||||
|
||||
titleBreakdownMap[office][date] = titlesArray;
|
||||
} catch (e) {
|
||||
console.error(`Failed to parse title_counts_array for ${office} on ${date}:`, officeData.title_counts_array, e);
|
||||
titleBreakdownMap[office][date] = null;
|
||||
}
|
||||
} else {
|
||||
titleBreakdownMap[office][date] = null;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return { dates, offices, officeDateMap, titleBreakdownMap };
|
||||
}
|
||||
|
||||
|
||||
// Color calculation (Modified for better change visualization)
|
||||
function getColor(count, firstCount) {
|
||||
if (count === null) {
|
||||
return '#e0e0e0';
|
||||
}
|
||||
if (firstCount === undefined || firstCount === null || (firstCount === 0 && count === 0)) {
|
||||
return 'hsl(0, 0%, 92%)';
|
||||
}
|
||||
if (firstCount === 0) {
|
||||
if (count > 0) {
|
||||
return 'hsl(120, 70%, 50%)';
|
||||
}
|
||||
else {
|
||||
return 'hsl(0, 0%, 92%)';
|
||||
}
|
||||
}
|
||||
const diff = (count - firstCount) / firstCount;
|
||||
const maxExpectedChange = 0.5;
|
||||
const intensity = Math.min(Math.abs(diff) / maxExpectedChange, 1.0);
|
||||
let hue, saturation, lightness;
|
||||
|
||||
if (count < firstCount) {
|
||||
hue = 0;
|
||||
saturation = 70;
|
||||
lightness = 92 - (42 * intensity);
|
||||
return `hsl(${hue}, ${saturation}%, ${lightness}%)`;
|
||||
|
||||
} else if (count > firstCount) {
|
||||
hue = 120;
|
||||
saturation = 70;
|
||||
lightness = 92 - (42 * intensity);
|
||||
return `hsl(${hue}, ${saturation}%, ${lightness}%)`;
|
||||
|
||||
} else {
|
||||
return 'hsl(0, 0%, 92%)';
|
||||
}
|
||||
}
|
||||
|
||||
// Generate table chart
|
||||
function generateChart(dates, offices, officeDateMap, titleBreakdownMap) {
|
||||
const table = document.createElement('table');
|
||||
const thead = document.createElement('thead');
|
||||
const tbody = document.createElement('tbody');
|
||||
|
||||
const headerRow = document.createElement('tr');
|
||||
headerRow.appendChild(document.createElement('th'));
|
||||
dates.forEach(date => {
|
||||
const th = document.createElement('th');
|
||||
th.textContent = date;
|
||||
headerRow.appendChild(th);
|
||||
});
|
||||
thead.appendChild(headerRow);
|
||||
table.appendChild(thead);
|
||||
|
||||
offices.forEach(office => {
|
||||
const row = document.createElement('tr');
|
||||
const officeCell = document.createElement('th'); // This will be styled by 'tbody tr th:first-child'
|
||||
officeCell.textContent = office.split('/').pop();
|
||||
row.appendChild(officeCell);
|
||||
|
||||
const firstCount = officeDateMap[office][dates[0]];
|
||||
dates.forEach(date => {
|
||||
const count = officeDateMap[office][date];
|
||||
const titles = titleBreakdownMap[office][date];
|
||||
const td = document.createElement('td');
|
||||
td.textContent = count !== null ? count : '';
|
||||
td.style.backgroundColor = getColor(count, firstCount);
|
||||
|
||||
td.addEventListener('mouseover', (e) => {
|
||||
const tooltip = document.getElementById('tooltip');
|
||||
tooltip.style.visibility = 'visible';
|
||||
if (titles && titles.length > 0) {
|
||||
const breakdown = titles.map(t => `${t.otitle}: ${t.count}`).join('\n');
|
||||
tooltip.textContent = `${office} on ${date}\n${breakdown}`;
|
||||
} else {
|
||||
tooltip.textContent = `${office} on ${date}: No data`;
|
||||
}
|
||||
tooltip.style.left = `${e.pageX + 10}px`;
|
||||
tooltip.style.top = `${e.pageY - 10}px`;
|
||||
});
|
||||
td.addEventListener('mouseout', () => {
|
||||
document.getElementById('tooltip').style.visibility = 'hidden';
|
||||
});
|
||||
|
||||
row.appendChild(td);
|
||||
});
|
||||
tbody.appendChild(row);
|
||||
});
|
||||
table.appendChild(tbody);
|
||||
|
||||
const container = document.getElementById('chart-container');
|
||||
container.innerHTML = '';
|
||||
container.appendChild(table);
|
||||
}
|
||||
|
||||
function fetchWeatherData(url, callback) {
|
||||
$.getJSON(url, function(weatherdata) {
|
||||
callback(weatherdata);
|
||||
}).fail(function(jqXHR, textStatus, errorThrown) {
|
||||
console.error('Error fetching weather data:', textStatus, errorThrown);
|
||||
callback(null);
|
||||
});
|
||||
}
|
||||
|
||||
function main() {
|
||||
fetchWeatherData('https://wx.stoat.org/nws.php?officestats11', function(weatherdata) {
|
||||
if (weatherdata) {
|
||||
const jsonData = weatherdata;
|
||||
const { dates, offices, officeDateMap, titleBreakdownMap } = processData(jsonData);
|
||||
generateChart(dates, offices, officeDateMap, titleBreakdownMap);
|
||||
|
||||
window.addEventListener('resize', () => {
|
||||
// Note: Re-processing data on resize is fine for this dataset size,
|
||||
// but for very large datasets, you might only regenerate the chart.
|
||||
const { dates, offices, officeDateMap, titleBreakdownMap } = processData(jsonData);
|
||||
generateChart(dates, offices, officeDateMap, titleBreakdownMap);
|
||||
});
|
||||
} else {
|
||||
console.log('Failed to retrieve weather data.');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
main();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
642
staff.py
Normal file
642
staff.py
Normal file
@@ -0,0 +1,642 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
import pandas as pd
|
||||
from datetime import datetime
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
import psycopg2
|
||||
import traceback
|
||||
import os
|
||||
import time
|
||||
from google.oauth2 import service_account
|
||||
from googleapiclient.discovery import build
|
||||
from googleapiclient.http import MediaIoBaseDownload
|
||||
import io
|
||||
|
||||
# Configuration
|
||||
SCOPES = ['https://www.googleapis.com/auth/drive'] # Full access for sync and delete
|
||||
SERVICE_ACCOUNT_FILE = '/var/www/html/work/noaa_staff.json' # Path to your service account JSON key
|
||||
DRIVE_FOLDER_ID = '1xCPU7Lhy-2cTg2Ul6tSQt6iRZeBGH3AW' # Replace with your Google Drive folder ID
|
||||
LOCAL_DIR = os.path.expanduser('/var/www/html/work/NOAA')
|
||||
USER_EMAIL = 'stoat@stoat.org'
|
||||
|
||||
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
def get_drive_service():
|
||||
credentials = service_account.Credentials.from_service_account_file(
|
||||
SERVICE_ACCOUNT_FILE, scopes=SCOPES)
|
||||
credentials = credentials.with_subject(USER_EMAIL) # Impersonate your account
|
||||
return build('drive', 'v3', credentials=credentials)
|
||||
|
||||
def get_folder_files(service, folder_id):
|
||||
query = f"'{folder_id}' in parents and trashed=false"
|
||||
results = service.files().list(q=query, fields="files(id, name, mimeType, modifiedTime, size)").execute()
|
||||
return results.get('files', [])
|
||||
|
||||
def download_file(service, file_id, file_name, local_path, modified_time):
|
||||
request = service.files().get_media(fileId=file_id)
|
||||
fh = io.FileIO(local_path, 'wb')
|
||||
downloader = MediaIoBaseDownload(fh, request)
|
||||
done = False
|
||||
while not done:
|
||||
status, done = downloader.next_chunk()
|
||||
fh.close()
|
||||
mod_time = time.mktime(time.strptime(modified_time, "%Y-%m-%dT%H:%M:%S.%fZ"))
|
||||
os.utime(local_path, times=(mod_time, mod_time))
|
||||
|
||||
def sync_folder():
|
||||
if not os.path.exists(LOCAL_DIR):
|
||||
os.makedirs(LOCAL_DIR)
|
||||
|
||||
service = get_drive_service()
|
||||
drive_files = get_folder_files(service, DRIVE_FOLDER_ID)
|
||||
|
||||
local_files = {f: os.path.getmtime(os.path.join(LOCAL_DIR, f)) for f in os.listdir(LOCAL_DIR) if os.path.isfile(os.path.join(LOCAL_DIR, f))}
|
||||
|
||||
for file in drive_files:
|
||||
if file['mimeType'] == 'application/vnd.google-apps.folder':
|
||||
continue
|
||||
file_name = file['name']
|
||||
file_id = file['id']
|
||||
modified_time = file['modifiedTime']
|
||||
local_path = os.path.join(LOCAL_DIR, file_name)
|
||||
|
||||
drive_mod_time = time.mktime(time.strptime(modified_time, "%Y-%m-%dT%H:%M:%S.%fZ"))
|
||||
|
||||
if file_name not in local_files or abs(local_files[file_name] - drive_mod_time) > 1:
|
||||
print(f"Syncing {file_name}...")
|
||||
download_file(service, file_id, file_name, local_path, modified_time)
|
||||
else:
|
||||
print(f"{file_name} is up-to-date.")
|
||||
|
||||
for local_file in local_files:
|
||||
if local_file not in [f['name'] for f in drive_files]:
|
||||
print(f"Removing {local_file} from local directory...")
|
||||
os.remove(os.path.join(LOCAL_DIR, local_file))
|
||||
|
||||
def remove_files(service, filenames):
|
||||
"""
|
||||
Remove specified files from both local sync folder and the Google Drive folder.
|
||||
With Editor permissions, files are moved to Trash and unlinked from the folder.
|
||||
Args:
|
||||
service: Google Drive API service instance.
|
||||
filenames (list): List of filenames to remove.
|
||||
"""
|
||||
drive_files = get_folder_files(service, DRIVE_FOLDER_ID)
|
||||
drive_file_map = {f['name']: f['id'] for f in drive_files}
|
||||
|
||||
for filename in filenames:
|
||||
# Remove from local folder
|
||||
local_path = os.path.join(LOCAL_DIR, filename)
|
||||
if os.path.exists(local_path):
|
||||
try:
|
||||
os.remove(local_path)
|
||||
print(f"Removed {filename} from local directory.")
|
||||
except Exception as e:
|
||||
print(f"Error removing {filename} locally: {e}")
|
||||
else:
|
||||
print(f"{filename} not found in local directory.")
|
||||
|
||||
# Remove from Google Drive folder (move to Trash and unlink)
|
||||
if filename in drive_file_map:
|
||||
file_id = drive_file_map[filename]
|
||||
try:
|
||||
# Move to Trash and remove from the folder
|
||||
service.files().update(
|
||||
fileId=file_id,
|
||||
body={'trashed': True}, # Move to Trash
|
||||
removeParents=DRIVE_FOLDER_ID # Unlink from the original folder
|
||||
).execute()
|
||||
print(f"Moved {filename} to Trash and removed from folder in Google Drive.")
|
||||
except Exception as e:
|
||||
print(f"Error processing {filename} in Google Drive: {e}")
|
||||
else:
|
||||
print(f"{filename} not found in Google Drive folder.")
|
||||
|
||||
|
||||
def excel_to_dict(file_path, sheet_name=0):
|
||||
# Read the Excel file
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings("ignore", category=UserWarning, module=re.escape('openpyxl.styles.stylesheet'))
|
||||
df = pd.read_excel(file_path, sheet_name=sheet_name)
|
||||
|
||||
# Convert DataFrame to dictionary where headers are keys
|
||||
# 'records' orientation makes each row a separate dict
|
||||
result = df.to_dict(orient='index')
|
||||
return result
|
||||
|
||||
|
||||
|
||||
def filter_dict_by_wfo(data, active="active"):
|
||||
|
||||
return {key: inner_dict for key, inner_dict in data.items()
|
||||
#and 'WFO' in inner_dict['NOAA_ORG_TITLE']
|
||||
if 'NOAA_ORG_TITLE' in inner_dict and "NOAA" in inner_dict['EMPL_CODE']}
|
||||
|
||||
|
||||
def collect_and_organize_by_org(data, fields_to_collect, position_title_lookup):
|
||||
"""
|
||||
Collect specific fields, normalize NOAA_POSITION_TITLE, and organize by NOAA_ORG_TITLE with counts.
|
||||
|
||||
:param data: Dictionary with nested personnel data
|
||||
:param fields_to_collect: List of fields to extract
|
||||
:param position_title_lookup: Dict mapping NOAA_POSITION_TITLE variations to standardized titles
|
||||
:return: Tuple of collected data, org-specific counts, and overall position counts
|
||||
"""
|
||||
collected_data = {}
|
||||
org_title_counts = {} # NOAA_ORG_TITLE -> NOAA_POSITION_TITLE -> count
|
||||
overall_position_counts = {} # Overall NOAA_POSITION_TITLE -> count
|
||||
|
||||
# Loop through the data
|
||||
for outer_key, inner_dict in data.items():
|
||||
entry = {}
|
||||
|
||||
# Collect specified fields
|
||||
for field in fields_to_collect:
|
||||
if field in inner_dict:
|
||||
if field == 'NOAA_POSITION_TITLE':
|
||||
raw_title = inner_dict[field].strip()
|
||||
normalized_title = position_title_lookup.get(raw_title, raw_title)
|
||||
entry['ORIG_TITLE'] = raw_title
|
||||
entry[field] = normalized_title
|
||||
else:
|
||||
entry[field] = inner_dict[field]
|
||||
entry['ORIG_TITLE'] = inner_dict['NOAA_POSITION_TITLE']
|
||||
else:
|
||||
entry[field] = ''
|
||||
|
||||
# Store the entry
|
||||
collected_data[outer_key] = entry
|
||||
|
||||
return collected_data
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def collect_data(file):
|
||||
#data = csv_dict(import_file=r"C:\Users\john.peck\Downloads\NSD.xlsx")
|
||||
data = excel_to_dict(file, sheet_name=0)
|
||||
data = filter_dict_by_wfo(data)
|
||||
|
||||
fields_to_collect = [
|
||||
'NOAA_POSITION_TITLE', 'ACCT_STATUS', 'OFFICE', 'NOAA_ORG_TITLE', 'PERSON_ID', 'EMPL_CODE',
|
||||
'LAST_NAME', 'FIRST_NAME', 'MIDDLE_NAME', 'MGR_NAME', 'LAST_UPDATED'
|
||||
]
|
||||
|
||||
|
||||
|
||||
|
||||
# Lookup table for NOAA_POSITION_TITLE normalization
|
||||
position_title_lookup = {
|
||||
'Electronic Technician': 'Electronics Technician',
|
||||
'Electronics Tech': 'Electronics Technician',
|
||||
'Electronics Technician': 'Electronics Technician',
|
||||
'El Tech': 'Electronics Technician',
|
||||
'ET': 'Electronics Technician',
|
||||
'El Tech': 'Electronics Technician',
|
||||
'EKA - Electronics Technician': 'Electronics Technician',
|
||||
'Electronics Tecnician': 'Electronics Technician',
|
||||
'FGZ - Electronics Technician': 'Electronics Technician',
|
||||
'EL TECH': 'Electronics Technician',
|
||||
'PQR - Electronics Technician': 'Electronics Technician',
|
||||
'MSO - El Tech': 'Electronics Technician',
|
||||
'TFX - Electronics Technician': 'Electronics Technician',
|
||||
'Eltech': 'Electronics Technician',
|
||||
'GGW - Electronics Technician': 'Electronics Technician',
|
||||
'SEW - El Tech': 'Electronics Technician',
|
||||
'Electrical Technician': 'Electronics Technician',
|
||||
'Electronic Techncian': 'Electronics Technician',
|
||||
'Meteorologist': 'Meteorologist (Could Include Leads)',
|
||||
'Forecaster': 'Meteorologist (Could Include Leads)',
|
||||
'General Forecaster': 'Meteorologist (Could Include Leads)',
|
||||
'Meteorologist Intern': 'Meteorologist (Could Include Leads)',
|
||||
'General Meteorologist': 'Meteorologist (Could Include Leads)',
|
||||
'NOAA Federal Employee': 'Meteorologist (Could Include Leads)',
|
||||
'Met Intern': 'Meteorologist (Could Include Leads)',
|
||||
'Journey Forecaster': 'Meteorologist (Could Include Leads)',
|
||||
'Meteorologist - IMET': 'Meteorologist (Could Include Leads)',
|
||||
'METEOROLOGIST': 'Meteorologist (Could Include Leads)',
|
||||
'Meteorlogist': 'Meteorologist (Could Include Leads)',
|
||||
'PDT - Meteorologist': 'Meteorologist (Could Include Leads)',
|
||||
'MTR - General Forecaster': 'Meteorologist (Could Include Leads)',
|
||||
'LKN - Forecaster': 'Meteorologist (Could Include Leads)',
|
||||
'Meteorolgist': 'Meteorologist (Could Include Leads)',
|
||||
'PIH - Meteorologist': 'Meteorologist (Could Include Leads)',
|
||||
'Meterologist': 'Meteorologist (Could Include Leads)',
|
||||
'Journeyman Forecaster': 'Meteorologist (Could Include Leads)',
|
||||
'Meteorological Intern': 'Meteorologist (Could Include Leads)',
|
||||
'OTX - Forecaster': 'Meteorologist (Could Include Leads)',
|
||||
'NWS Intern': 'Meteorologist (Could Include Leads)',
|
||||
'Meteorologist - General Forecaster': 'Meteorologist (Could Include Leads)',
|
||||
'MET Intern': 'Meteorologist (Could Include Leads)',
|
||||
'MIT': 'Meteorologist (Could Include Leads)',
|
||||
'Forecaster/Incident Meteorologist': 'Meteorologist (Could Include Leads)',
|
||||
'Entry Level Meteorologist': 'Meteorologist (Could Include Leads)',
|
||||
'Meteorologist and IMET': 'Meteorologist (Could Include Leads)',
|
||||
'Fire Weather Program Manager': 'Meteorologist (Could Include Leads)',
|
||||
'Meteorologist Intern WSFO JAN': 'Meteorologist (Could Include Leads)',
|
||||
'Meteorologist ASA': 'Meteorologist (Could Include Leads)',
|
||||
'Lead Meteorologist and IMET': 'Meteorologist (Could Include Leads)',
|
||||
'meteorologist': 'Meteorologist (Could Include Leads)',
|
||||
'PIH - General Forecaster': 'Meteorologist (Could Include Leads)',
|
||||
'TFX - Meteorologist': 'Meteorologist (Could Include Leads)',
|
||||
'SEW Forecaster': 'Meteorologist (Could Include Leads)',
|
||||
'Metorologist': 'Meteorologist (Could Include Leads)',
|
||||
'MET': 'Meteorologist (Could Include Leads)',
|
||||
'Meteorologist General': 'Meteorologist (Could Include Leads)',
|
||||
'Meteorogist': 'Meteorologist (Could Include Leads)',
|
||||
'LKN - General Forecaster': 'Meteorologist (Could Include Leads)',
|
||||
'EKA - Forecaster': 'Meteorologist (Could Include Leads)',
|
||||
'Meteorologist - Journey': 'Meteorologist (Could Include Leads)',
|
||||
'REV General Forecaster': 'Meteorologist (Could Include Leads)',
|
||||
'VEF - General Forecaster': 'Meteorologist (Could Include Leads)',
|
||||
'MTR - Meteorologist': 'Meteorologist (Could Include Leads)',
|
||||
'Metorologist - National NWSChat Admin': 'Meteorologist (Could Include Leads)',
|
||||
'MSO-Meteorologist': 'Meteorologist (Could Include Leads)',
|
||||
'VEF - Meteorologist': 'Meteorologist (Could Include Leads)',
|
||||
'GGW - Meteorologist': 'Meteorologist (Could Include Leads)',
|
||||
'Meteorologist - Journey': 'Meteorologist (Could Include Leads)',
|
||||
'EKA - Meteorologist': 'Meteorologist (Could Include Leads)',
|
||||
'Meteorologist Senior Forecaster': 'Lead Meteorologist',
|
||||
'Senior Forecaster - LIX': 'Lead Meteorologist',
|
||||
'TWC - Lead Forecaster': 'Lead Meteorologist',
|
||||
'Meteorologist - Lead': 'Lead Meteorologist',
|
||||
'Senior Forecaster-Fire Weather Program Manager': 'Lead Meteorologist',
|
||||
'Lead Forecasters': 'Lead Meteorologist',
|
||||
'Meteorologist - Senior': 'Lead Meteorologist',
|
||||
'lead Meteorologist': 'Lead Meteorologist',
|
||||
'Senior Forecaster Lead Meteorologist': 'Lead Meteorologist',
|
||||
'Lead Meteorologist': 'Lead Meteorologist',
|
||||
'Senior Meteorologist': 'Lead Meteorologist',
|
||||
'Senior Forecaster': 'Lead Meteorologist',
|
||||
'Lead Forecaster': 'Lead Meteorologist',
|
||||
'Meteorologist - Lead Forecaster': 'Lead Meteorologist',
|
||||
'Meteorologist - Senior Forecaster': 'Lead Meteorologist',
|
||||
'Meteorologist Lead Forecaster': 'Lead Meteorologist',
|
||||
'Information Technology Officer': 'ITO (May Include non ITO IT at WFOs)',
|
||||
'IT Officer': 'ITO (May Include non ITO IT at WFOs)',
|
||||
'ITO': 'ITO (May Include non ITO IT at WFOs)',
|
||||
'Information Technology Specialist': 'ITO (May Include non ITO IT at WFOs)',
|
||||
'IT Specialist': 'ITO (May Include non ITO IT at WFOs)',
|
||||
'FGZ ITO': 'ITO (May Include non ITO IT at WFOs)',
|
||||
'Information Technology Specialist ITO': 'ITO (May Include non ITO IT at WFOs)',
|
||||
'Information Technology Officer(ITO)/Meteorologist': 'ITO (May Include non ITO IT at WFOs)',
|
||||
'VEF - Information Technology Officer': 'ITO (May Include non ITO IT at WFOs)',
|
||||
'Information Technolgy Officer': 'ITO (May Include non ITO IT at WFOs)',
|
||||
'Information Technology Officer -ITO': 'ITO (May Include non ITO IT at WFOs)',
|
||||
'Supervisory IT Specialist': 'ITO (May Include non ITO IT at WFOs)',
|
||||
'IT Specialist - Systems Administrator': 'ITO (May Include non ITO IT at WFOs)',
|
||||
'Information Technology Specialist ITO (May Include non ITO IT at WFOs)': 'ITO (May Include non ITO IT at WFOs)',
|
||||
'Electronics Systems Analyst': 'ESA',
|
||||
'Electronic System Analyst': 'ESA',
|
||||
'Electronic Systems Analyst': 'ESA',
|
||||
'Electronics System Analyst': 'ESA',
|
||||
'Electronic Systems Analyst - ESA': 'ESA',
|
||||
'AESA': 'ESA',
|
||||
'IT Specialist - Electronics System Analyst': 'ESA',
|
||||
'OTX ESA': 'ESA',
|
||||
'HNX - Electronic Systems Analyst': 'ESA',
|
||||
'Supervisory Information Technology Specialist - ESA': 'ESA',
|
||||
'Electronic Systems Analyst - ESA IT Specialist': 'ESA',
|
||||
'IT Specialist A-ESA': 'ESA',
|
||||
'Electronics Systems Analyst ESA': 'ESA',
|
||||
'STO ESA': 'ESA',
|
||||
'Electronics Systems Analyst -ESA': 'ESA',
|
||||
'Assistant ESA': 'ESA',
|
||||
'PQR - Assistant ESA': 'ESA',
|
||||
'Electronic Systems Analyst -ESA': 'ESA',
|
||||
'Meteorologist - Science Operations Officer': 'SOO',
|
||||
'SOO': 'SOO',
|
||||
'Science and Operations Officer': 'SOO',
|
||||
'Science Operations Officer': 'SOO',
|
||||
'Meteorologist - SOO': 'SOO',
|
||||
'Meteorologist - Science and Operations Officer': 'SOO',
|
||||
'Science and Operations Officer - AMIC': 'SOO',
|
||||
'Meteorologist -SOO': 'SOO',
|
||||
'Science Operations Officer SOO': 'SOO',
|
||||
'Science and Operations Officer - SOO': 'SOO',
|
||||
'Science amp; Operations Officer': 'SOO',
|
||||
'Meteorologist SOO': 'SOO',
|
||||
'Science and Operations Officer DOC NOAA NWS Taunton MA': 'SOO',
|
||||
'Science and Operations Officer - NWS New York - NY': 'SOO',
|
||||
'Warning Coordination Meteorologist': 'WCM',
|
||||
'WCM': 'WCM',
|
||||
'Meteorologist - Warning Coordination Meteorologist': 'WCM',
|
||||
'Warning Coordination Meteorololgist': 'WCM',
|
||||
'Warning Coordination Meteorologist - WCM': 'WCM',
|
||||
'Meteorologist WCM': 'WCM',
|
||||
'WCM - Meteorologist': 'WCM',
|
||||
'Warning and Coordination Meteorologist': 'WCM',
|
||||
'HNX WCM': 'WCM',
|
||||
'Warning Coordination Meeorologist': 'WCM',
|
||||
'Warning Coordination Meteorlogist': 'WCM',
|
||||
'Meteorologist In Charge - MIC': 'MIC',
|
||||
'MIC': 'MIC',
|
||||
'Meteorologist-In-Charge': 'MIC',
|
||||
'Meteorologist In Charge': 'MIC',
|
||||
'Meteorologist in Charge': 'MIC',
|
||||
'Meteorologist-in-Charge': 'MIC',
|
||||
'Meterorologist in Charge MIC': 'MIC',
|
||||
'Meteorologist-in-Charge MIC': 'MIC',
|
||||
'Meteorologist In Charge MIC': 'MIC',
|
||||
'HNX MIC': 'MIC',
|
||||
'Observations Program Leader': 'OPL',
|
||||
'OPL': 'OPL',
|
||||
'Observation Program Leader': 'OPL',
|
||||
'Observing Program Leader': 'OPL',
|
||||
'Observation Program Leader -OPL': 'OPL',
|
||||
'Observation Program Leader - OPL': 'OPL',
|
||||
'Observing Progam Leader': 'OPL',
|
||||
'Observer Program Leader': 'OPL',
|
||||
'Observer Program Leader -OPL': 'OPL',
|
||||
'Observing Program Leader - OPL': 'OPL',
|
||||
'PIH - Observation Program Lead': 'OPL',
|
||||
'Observing Program Lead': 'OPL',
|
||||
'Observations Program Leader - OPL': 'OPL',
|
||||
'Observation Programs Lead': 'OPL',
|
||||
'Meteorological Technician - OPL': 'OPL',
|
||||
'Meteorologist - Observing Progam Leader': 'OPL',
|
||||
'Lead Meteorological Technician': 'OPL',
|
||||
'Observation Program Manager': 'OPL',
|
||||
'Cooperative Program Manager': 'OPL',
|
||||
'Data Acquisition Program Manager': 'OPL',
|
||||
'Senior Hydrologist': 'Service Hydrologist',
|
||||
'Senior Service Hydrologist': 'Service Hydrologist',
|
||||
'Hydrologist': 'Service Hydrologist',
|
||||
'Service Hydrologist': 'Service Hydrologist',
|
||||
'Hydrologic Forecaster': 'Service Hydrologist',
|
||||
'Service Hydrologist Meteorologist': 'Service Hydrologist',
|
||||
'Lead Hydrologist': 'Service Hydrologist',
|
||||
'Sr. Service Hydrologist': 'Service Hydrologist',
|
||||
'Senior Service Hydrologist/Meteorologist': 'Service Hydrologist',
|
||||
'EKA Hydrologist': 'Service Hydrologist',
|
||||
'Meteorological Technician': 'HMT',
|
||||
'HMT': 'HMT',
|
||||
'Port Meteorological Officer': 'HMT',
|
||||
'Hydro-Meteorological Technician': 'HMT',
|
||||
'Hydrometeorological Technician': 'HMT',
|
||||
'PMO': 'HMT',
|
||||
'AROS Site Operator': 'HMT',
|
||||
'Upper Air Weather Observer': 'HMT',
|
||||
'Meteorologist Technician': 'HMT',
|
||||
'Ice-SST Specialist': 'HMT',
|
||||
'Great Lakes PMO': 'HMT',
|
||||
'Ice SST Specialist': 'HMT',
|
||||
'Upper Air Weather Observer': 'HMT',
|
||||
'ASA': 'ASA',
|
||||
'Administrative Support Asst.': 'ASA',
|
||||
'Administrative Support Assistant': 'ASA',
|
||||
'Administrative Support Assistant ASA': 'ASA',
|
||||
'Administative Support Assistant': 'ASA',
|
||||
'ASa': 'ASA',
|
||||
'FGZ - Administrative Support': 'ASA',
|
||||
'Administrative Support Assitant': 'ASA',
|
||||
'Administrative Support Assistant - ASA - COTR': 'ASA',
|
||||
'Administrative Assistant': 'ASA',
|
||||
'Admin Suppt Asst': 'ASA',
|
||||
'Supervisory Meteorologist': 'Unspecified',
|
||||
'Operations Manager': 'Unspecified',
|
||||
'Director of Operations': 'Unspecified',
|
||||
'Assistant Meteorologist Ice-SST': 'Unspecified',
|
||||
'Skillbridge Electronics Technician': 'Unspecified',
|
||||
'Regional Equipment Specialist ER NWR Focal Point': 'Unspecified',
|
||||
'Virtual Volunteer': 'Unspecified',
|
||||
'WRH Service Evolution Program Leader': 'Unspecified',
|
||||
'Applications Integration Meteorologist': 'Unspecified',
|
||||
'Skillbridge Volunteer': 'Unspecified',
|
||||
'Contrator': 'Contractor',
|
||||
'Contracto': 'Contractor',
|
||||
'Contractor': 'Contractor',
|
||||
'FET': 'Facilities Engineering Technician',
|
||||
'FET': 'FET',
|
||||
'VEF - Engineering Technician': 'FET',
|
||||
'Facilities Technician': 'FET',
|
||||
'Engineering Technician': 'FET',
|
||||
'Facilities Engineering Technician': 'FET',
|
||||
'Field Engineering Tech': 'FET',
|
||||
'Facilities Engineering Tech': 'FET',
|
||||
'Field Engineering Technician': 'FET',
|
||||
'Regional Maintenance Specialist': 'RMS',
|
||||
'RMS': 'RMS',
|
||||
'ASOS RMS': 'RMS',
|
||||
'Pathways Student': 'Pathways',
|
||||
'Pathway Student Trainee': 'Pathways',
|
||||
'Pathways Intern': 'Pathways',
|
||||
'Pathways': 'Pathways',
|
||||
'Pathway': 'Pathways',
|
||||
'MTR - Student Intern': 'Pathways',
|
||||
'Student Fellow': 'Pathways',
|
||||
'Hollings Scholar': 'Pathways',
|
||||
'Student Trainee Meteorology': 'Pathways',
|
||||
'Pathway Intern': 'Pathways',
|
||||
'Emergency Response Specialist': 'ERS',
|
||||
'ERS': 'ERS',
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
collected_data = collect_and_organize_by_org(data, fields_to_collect, position_title_lookup)
|
||||
return collected_data
|
||||
|
||||
|
||||
|
||||
def loop_through_xls():
|
||||
"""
|
||||
Loops through Excel files in a directory and returns a dictionary containing
|
||||
file information sorted by modification time.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary with file names, paths, and modification times
|
||||
"""
|
||||
directory = "/var/www/html/work/NOAA"
|
||||
result = {}
|
||||
|
||||
# Get list of .xlsx files
|
||||
xlsx_files = [f for f in os.listdir(directory) if f.endswith('.xlsx')]
|
||||
|
||||
# Sort files by modification time
|
||||
xlsx_files.sort(key=lambda f: os.path.getmtime(os.path.join(directory, f)))
|
||||
|
||||
# Populate result dictionary
|
||||
|
||||
for file in xlsx_files:
|
||||
full_path = os.path.join(directory, file)
|
||||
# Get modification time and convert to datetime
|
||||
mod_time = datetime.fromtimestamp(os.path.getmtime(full_path))
|
||||
|
||||
# Add file info to result dictionary using filename as key
|
||||
result[file] = {
|
||||
'path': full_path,
|
||||
'last_updated': mod_time,
|
||||
'file_name': file
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
|
||||
def get_inner_dict(big_dict, j):
|
||||
return next((inner for inner in big_dict.values() if inner['PERSON_ID'] == j), None)
|
||||
|
||||
|
||||
def compare_personids(personids, person_dict):
|
||||
try:
|
||||
# Extract person IDs from the dictionary into a set, only if 'PERSON_ID' exists
|
||||
dict_person_ids = {inner_dict['PERSON_ID'] for inner_dict in person_dict.values() if 'PERSON_ID' in inner_dict}
|
||||
|
||||
# Convert the list of person IDs to a set
|
||||
list_person_ids = set(personids)
|
||||
|
||||
# Compute the three sets
|
||||
in_both = list_person_ids & dict_person_ids # Intersection: IDs in both
|
||||
only_in_list = list_person_ids - dict_person_ids # Difference: IDs only in list
|
||||
only_in_dict = dict_person_ids - list_person_ids # Difference: IDs only in dict
|
||||
|
||||
# Return results in a dictionary
|
||||
return {
|
||||
'in_both': in_both,
|
||||
'only_in_list': only_in_list,
|
||||
'only_in_dict': only_in_dict
|
||||
}
|
||||
except Exception as e:
|
||||
# Output the error and traceback
|
||||
print("Content-Type: text/plain\n")
|
||||
print("An error occurred:\n")
|
||||
traceback.print_exc(file=sys.stdout)
|
||||
|
||||
|
||||
|
||||
def insert_data(data):
|
||||
try:
|
||||
#replace this timestamp with the latest value in the data
|
||||
|
||||
#now = datetime.now()
|
||||
#formatted_time = now.strftime("%m/%d/%Y %I:%M:%S %p")
|
||||
sql = "SELECT DISTINCT personid FROM nws ORDER BY personid"
|
||||
cursor.execute(sql)
|
||||
personids_tuple = cursor.fetchall()
|
||||
personids = [row[0] for row in personids_tuple]
|
||||
for i in data:
|
||||
post_data = data[i]
|
||||
formatted_time = i
|
||||
result = compare_personids(personids, post_data)
|
||||
both = result['in_both']
|
||||
nowgone = result['only_in_list']
|
||||
onlynew = result['only_in_dict']
|
||||
|
||||
# Process 'both'
|
||||
for j in both:
|
||||
record = get_inner_dict(post_data, j)
|
||||
sql = """
|
||||
INSERT INTO nws (personid, first, middle, last, title, otitle, status, lastupdate, office, mgrname, orgtitle, recordtime)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
||||
ON CONFLICT DO NOTHING
|
||||
"""
|
||||
parms = (
|
||||
record['PERSON_ID'], record['FIRST_NAME'], record['MIDDLE_NAME'], record['LAST_NAME'],
|
||||
record['NOAA_POSITION_TITLE'], record['ORIG_TITLE'], record['ACCT_STATUS'],
|
||||
formatted_time, record['OFFICE'], record['MGR_NAME'], record['NOAA_ORG_TITLE'], record['LAST_UPDATED']
|
||||
)
|
||||
cursor.execute(sql, parms)
|
||||
|
||||
# Process 'nowgone'
|
||||
for j in nowgone:
|
||||
cursor.execute("SELECT * FROM nws WHERE personid = %s ORDER BY lastupdate DESC LIMIT 1", (j,))
|
||||
row = cursor.fetchone()
|
||||
if row:
|
||||
column_names = [desc[0] for desc in cursor.description]
|
||||
result = dict(zip(column_names, row))
|
||||
if result['status'] != "gone":
|
||||
sql = """
|
||||
INSERT INTO nws (personid, first, middle, last, title, otitle, status, lastupdate, office, mgrname, orgtitle, recordtime)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
||||
ON CONFLICT DO NOTHING
|
||||
"""
|
||||
parms = (
|
||||
j, result['first'], result['middle'], result['last'], result['title'],
|
||||
result['otitle'], 'inactive', formatted_time, result['office'],
|
||||
result['mgrname'], result['orgtitle'], result['lastupdate']
|
||||
)
|
||||
cursor.execute(sql, parms)
|
||||
|
||||
# Process 'onlynew'
|
||||
for j in onlynew:
|
||||
record = get_inner_dict(post_data, j)
|
||||
sql = """
|
||||
INSERT INTO nws (personid, first, middle, last, title, otitle, status, lastupdate, office, mgrname, orgtitle, recordtime)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
||||
ON CONFLICT DO NOTHING
|
||||
"""
|
||||
parms = (
|
||||
record['PERSON_ID'], record['FIRST_NAME'], record['MIDDLE_NAME'], record['LAST_NAME'],
|
||||
record['NOAA_POSITION_TITLE'], record['ORIG_TITLE'], record['ACCT_STATUS'],
|
||||
formatted_time, record['OFFICE'], record['MGR_NAME'], record['NOAA_ORG_TITLE'], record['LAST_UPDATED']
|
||||
)
|
||||
cursor.execute(sql, parms)
|
||||
|
||||
conn.commit() # Single commit at the end
|
||||
cursor.execute("update nws set status = 'gone' where status = '' or status = 'NaN'")
|
||||
conn.commit()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
alldata = {}
|
||||
sync_folder()
|
||||
deletable = []
|
||||
xlsx_files = loop_through_xls()
|
||||
print(xlsx_files)
|
||||
for p in xlsx_files:
|
||||
full_path = xlsx_files[p]['path']
|
||||
update = xlsx_files[p]['last_updated']
|
||||
file = xlsx_files[p]['file_name']
|
||||
# Get the formatted update time
|
||||
formatted_time = update.strftime('%m/%d/%Y %I:%M:%S %p')
|
||||
# Collect data from the file
|
||||
datedata = collect_data(full_path)
|
||||
deletable.append(file)
|
||||
# Add additional file info if desired (optional)
|
||||
#datedata['path'] = xlsx_files[p]['path']
|
||||
|
||||
# Use the formatted time as the key, with datedata as the value
|
||||
alldata[formatted_time] = datedata
|
||||
#print(post_json_to_cgi(alldata))
|
||||
#call database insert here
|
||||
insert_data(alldata)
|
||||
#print(alldata)
|
||||
#newalldata = remove_duplicate_records(alldata)
|
||||
#with open("nws.json", "w") as file:
|
||||
# json.dump(newalldata, file, indent=4)
|
||||
#print(post_json_to_cgi(newalldata))
|
||||
service = get_drive_service()
|
||||
|
||||
|
||||
|
||||
# Example: Remove specific files from both local and Drive
|
||||
files_to_remove = deletable # Replace with your filenames
|
||||
remove_files(service, files_to_remove)
|
||||
conn.close()
|
||||
1
states.geojson
Normal file
1
states.geojson
Normal file
File diff suppressed because one or more lines are too long
109
states.json
Normal file
109
states.json
Normal file
File diff suppressed because one or more lines are too long
121
stoat.py
Normal file
121
stoat.py
Normal file
@@ -0,0 +1,121 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
from socket import *
|
||||
import datetime
|
||||
import math
|
||||
import sqlite3
|
||||
import pytz
|
||||
|
||||
def roundTime(dt=None, roundTo=300):
|
||||
if dt == None : dt = datetime.datetime.now()
|
||||
seconds = (dt - dt.min).seconds
|
||||
rounding = (seconds+roundTo/2) // roundTo * roundTo
|
||||
return dt + datetime.timedelta(0,rounding-seconds,-dt.microsecond)
|
||||
|
||||
conn = sqlite3.connect('/home/stoat/abewx/stoat.db')
|
||||
c = conn.cursor()
|
||||
c.execute('CREATE TABLE IF NOT EXISTS precipitation (dtg TEXT PRIMARY KEY, precip real)')
|
||||
|
||||
os.environ["AMBIENT_ENDPOINT"] = 'https://api.ambientweather.net/v1'
|
||||
os.environ["AMBIENT_API_KEY"] = '5eeaadda004744a284ecbfdc22dcb852ca86b8a488d2456bb638a346ee5adf7f'
|
||||
os.environ["AMBIENT_APPLICATION_KEY"] = '7eb31955d49c41f08e447b23a9dfc8be23992e8ffe3b41f98d9c9c68319e0e2e'
|
||||
|
||||
from ambient_api.ambientapi import AmbientAPI
|
||||
|
||||
callsign = 'FW6696'
|
||||
latitude = 38.344299
|
||||
longitude = -81.6789
|
||||
devicename = 'WS2902A' #This identifies your equipment/software. You can put anything you want. I use 'WS2902A', which is the model of weather station I have
|
||||
#IMPORTANT: lat/long must be listed in DECIMAL DEGREES (DD.DDDD). Number of digits doesn't really matter. Use positive values for N/E, negative for S/W. The program then converts to degrees decimal minutes (DD MM.MMMM), which is the format APRS requires.
|
||||
|
||||
api = AmbientAPI()
|
||||
|
||||
devices = api.get_devices()
|
||||
home = devices[0] #this assumes you have only one station. Increase number accordingly if you want to get data from others
|
||||
weather= home.last_data
|
||||
|
||||
#convert coordinates to degrees decimal minutes
|
||||
if latitude < 0:
|
||||
latitude = abs(latitude)
|
||||
latitude = str(int(latitude)).zfill(2) + str(round(60*(latitude - int(latitude)),2)).zfill(2) + 'S'
|
||||
else:
|
||||
latitude = str(int(latitude)).zfill(2) + str(round(60*(latitude - int(latitude)),2)).zfill(2) + 'N'
|
||||
latitude = '3820.66N'
|
||||
|
||||
if longitude < 0:
|
||||
longitude = abs(longitude)
|
||||
longitude = str(int(longitude)).zfill(3) + str(round(60*(longitude - int(longitude)),2)).zfill(2) + 'W'
|
||||
else:
|
||||
longitude = str(int(longitude)).zfill(3) + str(round(60*(longitude - int(longitude)),2)).zfill(2) + 'E'
|
||||
|
||||
winddir = str(weather.get('winddir')).zfill(3)
|
||||
windspeed = str(int(math.ceil(weather.get('windspeedmph')))).zfill(3)
|
||||
windgust = str(int(math.ceil(weather.get('windgustmph')))).zfill(3)
|
||||
if weather.get('tempf') < 0:
|
||||
temp = '-' + str(int(round(weather.get('tempf')))).zfill(2)
|
||||
else:
|
||||
temp = str(int(round(weather.get('tempf')))).zfill(3)
|
||||
dailyrain = str(int(round(weather.get('dailyrainin')*100))).zfill(3) #this value IS supposed to be "rain since local midnight," so it is always reported
|
||||
rainhour = str(int(weather.get('hourlyrainin')*100)).zfill(3)
|
||||
pressure = str(int(weather.get('baromrelin')/0.0029529983071445)).zfill(5) #pressure is supposed to be reported to APRS in "altimiter" (QNH) format, that is, relative. The system itself corrects the pressure to sea level based on your station's listed elevation, so make sure that's accurate
|
||||
humidity = str(int(weather.get('humidity')%100)).zfill(2) #uses modulus operator % so that 100% is given as '00'
|
||||
|
||||
dtg = roundTime(datetime.datetime.utcnow())
|
||||
olddtg = dtg - datetime.timedelta(days=3)
|
||||
local_tz = pytz.timezone('US/Eastern')
|
||||
utc = pytz.utc
|
||||
localdate = datetime.datetime.now(local_tz).date()
|
||||
localmidnight = local_tz.localize(datetime.datetime.combine(localdate, datetime.time(0, 0)), is_dst=None)
|
||||
utc_midnighttoday = localmidnight.astimezone(utc).replace(tzinfo=None)
|
||||
utc_midnightyesterday = utc_midnighttoday - datetime.timedelta(hours=24)
|
||||
prev24 = dtg - datetime.timedelta(hours=24)
|
||||
|
||||
c.execute("INSERT OR REPLACE INTO precipitation (dtg, precip) values (?, ?)",(dtg, dailyrain))
|
||||
conn.commit()
|
||||
|
||||
try:
|
||||
c.execute("SELECT precip FROM precipitation WHERE dtg = (?)",(prev24,))
|
||||
prev24value = c.fetchone()[0]
|
||||
print(prev24value)
|
||||
c.execute("SELECT precip FROM precipitation WHERE dtg = (?)",(utc_midnightyesterday,))
|
||||
prevmidnightvalue = c.fetchone()[0]
|
||||
print(prevmidnightvalue)
|
||||
c.execute("SELECT precip FROM precipitation WHERE dtg = (?)",(utc_midnighttoday,))
|
||||
currentmidnightvalue = c.fetchone()[0]
|
||||
print(currentmidnightvalue)
|
||||
rain24 = currentmidnightvalue - prev24value + int(dailyrain)
|
||||
print(rain24)
|
||||
past24hoursrain = str(int(rain24)).zfill(3)
|
||||
print(past24hoursrain)
|
||||
except:
|
||||
past24hoursrain = '...'
|
||||
print(past24hoursrain)
|
||||
|
||||
|
||||
|
||||
# If luminosity is above 999 W/m^2, APRS wants a lowercase L
|
||||
if weather.get('solarradiation') >= 1000:
|
||||
luminosity = 'l' + str(int(round(weather.get('solarradiation'))) % 1000).zfill(3)
|
||||
else:
|
||||
luminosity = 'L' + str(int(round(weather.get('solarradiation')))).zfill(3)
|
||||
|
||||
# Time reported in Zulu (UTC). 24-hour rain workaround still has to be local time, though
|
||||
#packet = callsign + '>APRS,TCPIP*:@' + datetime.utcnow().strftime("%d%H%M") + 'z' + latitude + '/' + longitude + '_' + winddir + '/' + windspeed + 'g' + windgust + 't' + temp + 'r' + rainhour + 'p' + (past24hoursrain if datetime.now().time() >= time(23,45) else '...') + 'P' + dailyrain + 'h' + humidity + 'b' + pressure + luminosity + devicename'r''r'
|
||||
#'r' + past24hoursrain + 'p'
|
||||
#past24hoursrain = '...'
|
||||
#packet = callsign + '>APRS,TCPIP*:@' + datetime.datetime.utcnow().strftime("%d%H%M") + 'z' + latitude + '/' + longitude + '_' + winddir + '/' + windspeed + 'g' + windgust + 't' + temp + 'r' + rainhour + 'p' + past24hoursrain.zfill(3) + 'P' + dailyrain + 'h' + humidity + 'b' + pressure + luminosity + devicename
|
||||
packet = callsign + '>APRS,TCPIP*:@' + datetime.datetime.utcnow().strftime("%d%H%M") + 'z' + latitude + '/' + longitude + '_' + winddir + '/' + windspeed + 'g' + windgust + 't' + temp + 'r' + rainhour + 'P' + dailyrain + 'h' + humidity + 'b' + pressure + luminosity + devicename
|
||||
print(packet) #prints the assembled packet for debugging purposes
|
||||
|
||||
#send the packet
|
||||
s = socket(AF_INET, SOCK_STREAM)
|
||||
s.connect(('cwop.aprs.net', 14580))
|
||||
s.send(('user ' + callsign + ' pass -1 vers Python\n').encode())
|
||||
s.send((packet+'\n').encode())
|
||||
s.shutdown(0)
|
||||
s.close()
|
||||
|
||||
|
||||
c.close()
|
||||
conn.close()
|
||||
203
stormdata.html
Normal file
203
stormdata.html
Normal file
@@ -0,0 +1,203 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>NWS VTEC Browser</title>
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.6.0/jquery.min.js"></script>
|
||||
<script src="https://code.jquery.com/ui/1.13.2/jquery-ui.js"></script>
|
||||
<link rel="stylesheet" href="https://code.jquery.com/ui/1.13.2/themes/smoothness/jquery-ui.css">
|
||||
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.8.0/dist/leaflet.css" integrity="sha512-hoalWLoI8r4UszCkZ5kL8vayOGVae1oxXe/2A4AO6J9+580uKHDO3JdHb7NzwwzK5xr/Fs0W40kiNHxM9vyTtQ==" crossorigin="" />
|
||||
<script src="https://unpkg.com/leaflet@1.8.0/dist/leaflet.js" integrity="sha512-BB3hKbKWOc9Ez/TAwyWxNXeoV9c1v6FIeYiBieIWkpLjauysF18NzgR1MBNBXf8/KABdlkX68nAhlwcDFLGPCQ==" crossorigin=""></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/chart.js@3.9.1/dist/chart.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/chartjs-adapter-date-fns@^2.0.0/dist/chartjs-adapter-date-fns.bundle.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/@turf/turf@6.5.0/turf.min.js"></script>
|
||||
<!-- CSS -->
|
||||
<style type="text/css">
|
||||
body, html {
|
||||
height: 100%; width: 100%; margin: 0; padding: 0; font-family: sans-serif;
|
||||
}
|
||||
#container {
|
||||
display: flex; height: 100%; width: 100%;
|
||||
}
|
||||
#map-container {
|
||||
flex-grow: 1; height: 100%; display: flex; flex-direction: column;
|
||||
}
|
||||
#mapid {
|
||||
flex-grow: 1; width: 100%; z-index: 1;
|
||||
}
|
||||
#summary-container {
|
||||
height: 60%; /* Adjust map/summary ratio (higher % = smaller map) */
|
||||
overflow-y: auto; padding: 15px; background: #f9f9f9;
|
||||
border-top: 1px solid #ddd; line-height: 1.4; box-sizing: border-box;
|
||||
}
|
||||
#summary-container img {
|
||||
max-width: calc(100% - 450px); height: auto; border: 1px solid #ccc; margin-top: 10px;
|
||||
margin-bottom: 10px; display: block;
|
||||
}
|
||||
#summary-container h2 {
|
||||
margin-top: 0; margin-bottom: 10px; padding-bottom: 5px; border-bottom: 2px solid #ccc;
|
||||
}
|
||||
#summary-container h3 {
|
||||
margin-top: 18px; margin-bottom: 8px; padding-bottom: 3px; border-bottom: 1px solid #eee;
|
||||
}
|
||||
#summary-container h4 { margin-top: 10px; margin-bottom: 5px; }
|
||||
#summary-container p { margin-top: 0.5em; margin-bottom: 0.5em; }
|
||||
#summary-container ul { margin-top: 0.5em; padding-left: 25px; }
|
||||
#summary-container li { margin-bottom: 0.4em; }
|
||||
#summary-container .gauge-summary { margin-bottom: 20px; border-top: 1px solid #eee; padding-top: 10px; }
|
||||
#summary-container .event-summary-item { border-left-width: 5px; border-left-style: solid; padding-left: 10px; margin-bottom: 15px; }
|
||||
#copySummaryBtn {
|
||||
float: right; margin-bottom: 10px; padding: 5px 10px; cursor: pointer; border: 1px solid #ccc;
|
||||
background-color: #f0f0f0; border-radius: 3px; font-size: 0.9em;
|
||||
}
|
||||
#copySummaryBtn:hover { background-color: #e0e0e0; }
|
||||
#controls {
|
||||
width: 400px; /* Set the desired width */
|
||||
flex-shrink: 0; /* Prevent the item from shrinking */
|
||||
padding: 10px;
|
||||
background: #f0f0f0;
|
||||
overflow-y: auto;
|
||||
border-right: 1px solid #ccc;
|
||||
z-index: 1000;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
#controls label, #controls select, #controls input[type=number], #controls input[type=text], #controls input[type=button] {
|
||||
display: block;
|
||||
margin-bottom: 8px;
|
||||
width: 100%; /* Make controls fill width */
|
||||
box-sizing: border-box;
|
||||
}
|
||||
#controls select[multiple] {
|
||||
height: 250px;
|
||||
width: 100%;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
/* Leaflet Layer Styling */
|
||||
.my-label { /* County Labels */
|
||||
font-size: 8px; font-weight: bold; color: #333; background-color: rgba(255, 255, 255, 0.7);
|
||||
border: 0px; border-radius: 3px; padding: 1px 3px; white-space: nowrap; pointer-events: none;
|
||||
}
|
||||
.leaflet-tooltip { /* Optional overrides for base tooltip */ }
|
||||
.lsr-label-tooltip {
|
||||
font-size: 9px !important; font-weight: bold !important; color: black !important; white-space: nowrap;
|
||||
background: transparent !important; border: none !important; box-shadow: none !important;
|
||||
padding: 0 !important; margin: 0 !important; display: inline-block; text-align: center;
|
||||
vertical-align: middle; pointer-events: none;
|
||||
}
|
||||
/* Hide the template canvas */
|
||||
#hydrograph-canvas-template { display: none; }
|
||||
#summary-container ul.analysis-list {
|
||||
margin-top: 0; /* Remove extra space above list */
|
||||
padding-left: 25px; /* Keep indentation */
|
||||
list-style-type: disc; /* Or desired bullet style */
|
||||
}
|
||||
/* Style for the per-zone verification list */
|
||||
#summary-container .zone-verification-list {
|
||||
margin-top: 0; /* Remove extra space above list */
|
||||
padding-left: 25px; /* Keep indentation */
|
||||
list-style-type: disc; /* Or desired bullet style */
|
||||
font-size: 0.9em; /* Match other sub-details */
|
||||
}
|
||||
#summary-container .zone-verification-list li {
|
||||
margin-bottom: 0.2em; /* Smaller spacing between zones */
|
||||
}
|
||||
/* Style for export buttons */
|
||||
#export-controls {
|
||||
margin-top: 15px;
|
||||
border-top: 1px solid #ccc;
|
||||
padding-top: 10px;
|
||||
}
|
||||
#export-controls h3 {
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
#export-controls input[type=button] {
|
||||
background-color: #e7e7e7;
|
||||
border: 1px solid #ccc;
|
||||
padding: 6px 12px;
|
||||
cursor: pointer;
|
||||
margin-top: 5px; /* Space between buttons */
|
||||
}
|
||||
#export-controls input[type=button]:hover {
|
||||
background-color: #d7d7d7;
|
||||
}
|
||||
#export-controls input[type=button]:disabled {
|
||||
background-color: #f5f5f5;
|
||||
color: #aaa;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="container">
|
||||
<div id="controls">
|
||||
<h3>Select Products</h3>
|
||||
<label for="yeartowork">Choose a year:</label>
|
||||
<select name="yeartowork" id="yeartowork" onchange="getwwas()">
|
||||
<option value="2025">2025</option>
|
||||
<option value="2024">2024</option>
|
||||
<option value="2023">2023</option>
|
||||
<option value="2022">2022</option>
|
||||
</select>
|
||||
<!-- Event selector added by JS -->
|
||||
<label for="eventFilterInput" style="margin-top: 15px;">Filter Events:</label>
|
||||
<input type="text" id="eventFilterInput" placeholder="Type to filter list..." />
|
||||
<h3>Options</h3>
|
||||
<label for="lsrbuffer">Time Buffer for Reports (hrs):</label>
|
||||
<input id="lsrbuffer" name="lsrbuffer" type="number" value="1">
|
||||
<label for="powerthresh">Power Outage Threshold for Reporting:</label>
|
||||
<input id="powerthresh" name="powerthresh" type="number" value="50">
|
||||
<input id="generateSummaryBtn" type="button" value="Generate Summary" onclick="generateSummaryForSelectedEvents();" style="margin-top: 15px; font-weight: bold; background-color: #add8e6;" />
|
||||
<!-- Export Controls Section -->
|
||||
<div id="export-controls">
|
||||
<h3>Export Options</h3>
|
||||
<input type="button" id="exportLsrCsvBtn" value="Export All Reports (CSV)" onclick="generateLsrCsv();" disabled>
|
||||
<input type="button" id="exportKmlBtn" value="Export Products/Reports (KML)" onclick="generateKml();" disabled>
|
||||
Note: Reports not from LSRs (power outage, DOT, 911, etc) will be filtered if an LSR has already been issued within 1 mile of them.
|
||||
</div>
|
||||
</div>
|
||||
<div id="map-container">
|
||||
<div id="mapid"></div>
|
||||
<div id="summary-container">
|
||||
<!-- Copy button will be added here by JS in buildHtmlSummary -->
|
||||
<p>Select one or more warnings and click "Generate Summary".</p>
|
||||
<!-- Summary content will be generated here -->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<script>
|
||||
let mymap;
|
||||
let geoJSONwwas; // Layer for selected warning polygons
|
||||
let geoJSONcounties; // Layer for county/zone outlines & zone lookup
|
||||
let markersLayer; // Layer for LSR markers (Filtered by selected product polygons)
|
||||
let gaugeMarkersLayer; // Layer for NWS Gauge markers
|
||||
let currentSelectedEvents = []; // Raw event objects selected by the user
|
||||
let currentSelectedEventDetails = []; // Detailed info { event, data, fullDetails } for selected events
|
||||
let unfilteredLSRs = { type: "FeatureCollection", features: [] }; // Stores ALL LSRs fetched for the time range + buffer
|
||||
let hydrographCharts = {}; // Keep track of Chart.js instances if needed (though now generating images directly)
|
||||
let prePlotLayer; // Layer for pre-plotting selected warnings
|
||||
let eventGeometryCache = {}; // Simple cache for event geometry { 'phen|sig|etn|year': geoJsonData }
|
||||
</script>
|
||||
<!-- Core helper functions (constants, lookups, LSR verification helpers, contrast colors, status calculations) -->
|
||||
<script src="stormdata/core_helpers.js"></script>
|
||||
<!-- Utility functions (date formatting, formatting helpers, utility functions) -->
|
||||
<script src="stormdata/utility_functions.js"></script>
|
||||
<!-- Data fetching functions (fetching from APIs, data retrieval helpers) -->
|
||||
<script src="stormdata/data_fetching.js"></script>
|
||||
<!-- Parsing functions (NWS product parsing, time string parsing, text parsing) -->
|
||||
<script src="stormdata/parsing_functions.js"></script>
|
||||
<!-- Analysis functions (hydrograph analysis, LSR processing, data analysis) -->
|
||||
<script src="stormdata/analysis_functions.js"></script>
|
||||
<!-- Hydro functions (gauge handling, hydrograph generation, water data functions) -->
|
||||
<script src="stormdata/hydro_functions.js"></script>
|
||||
<!-- Map functions (map setup, styling, Leaflet helpers, marker creation) -->
|
||||
<script src="stormdata/map_functions.js"></script>
|
||||
<!-- Event handling functions (VTEC event handling, selection, summary generation triggers) -->
|
||||
<script src="stormdata/event_handling_functions.js"></script>
|
||||
<!-- UI/Summary functions (HTML generation, summary building, export functions) -->
|
||||
<script src="stormdata/ui_summary_functions.js"></script>
|
||||
<canvas id="hydrograph-canvas-template" width="800" height="450" style="display: none;"></canvas>
|
||||
</body>
|
||||
</html>
|
||||
572
stormdata.php
Normal file
572
stormdata.php
Normal file
@@ -0,0 +1,572 @@
|
||||
<?php
|
||||
|
||||
// --- Error Reporting (Recommended for Development) ---
|
||||
ini_set('display_errors', 1);
|
||||
ini_set('display_startup_errors', 1);
|
||||
error_reporting(E_ALL);
|
||||
// In production, turn display_errors off and configure log_errors.
|
||||
// ini_set('display_errors', 0);
|
||||
// ini_set('log_errors', 1);
|
||||
// ini_set('error_log', '/path/to/your/php_error.log');
|
||||
|
||||
// --- Database Connection ---
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws");
|
||||
|
||||
if (!$dbconn) {
|
||||
error_log('Database connection failed: ' . pg_last_error());
|
||||
http_response_code(503);
|
||||
header('Content-Type: application/json; charset=utf-8');
|
||||
echo json_encode(['error' => 'Service temporarily unavailable due to database connection issue.']);
|
||||
exit;
|
||||
}
|
||||
|
||||
// --- Helper Functions ---
|
||||
|
||||
/**
|
||||
* Sends a JSON error response and terminates the script.
|
||||
* @param int $http_code The HTTP status code.
|
||||
* @param string $message The error message for the client.
|
||||
* @param ?string $log_message Optional detailed message for the server error log.
|
||||
*/
|
||||
function send_error(int $http_code, string $message, ?string $log_message = null): void {
|
||||
if ($log_message) { error_log($log_message); }
|
||||
elseif ($http_code >= 500) { error_log("Server Error (" . $http_code . "): " . $message); }
|
||||
http_response_code($http_code);
|
||||
header('Content-Type: application/json; charset=utf-8');
|
||||
echo json_encode(['error' => $message]);
|
||||
exit;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends a GeoJSON FeatureCollection response and terminates the script.
|
||||
* @param array $features An array of GeoJSON Feature objects.
|
||||
*/
|
||||
function send_geojson(array $features): void {
|
||||
$geojson_output = ['type' => 'FeatureCollection', 'features' => $features];
|
||||
header('Content-Type: application/geo+json; charset=utf-8');
|
||||
echo json_encode($geojson_output);
|
||||
exit;
|
||||
}
|
||||
|
||||
// --- Main Request Handling ---
|
||||
|
||||
if ($_SERVER['REQUEST_METHOD'] === 'POST') {
|
||||
|
||||
error_log("--- New POST Request Received ---");
|
||||
$input_data = null;
|
||||
$request_type = null;
|
||||
$contentType = trim(strtolower($_SERVER['HTTP_CONTENT_TYPE'] ?? $_SERVER['CONTENT_TYPE'] ?? ''));
|
||||
error_log("Received Content-Type: " . $contentType);
|
||||
|
||||
// ***********************************************************
|
||||
// ***** START: CRUCIAL JSON INPUT HANDLING BLOCK *****
|
||||
// ***********************************************************
|
||||
if (strpos($contentType, 'application/json') === 0) {
|
||||
error_log("Content-Type identified as JSON.");
|
||||
$raw_post_data = file_get_contents('php://input');
|
||||
error_log("Raw php://input length: " . strlen($raw_post_data));
|
||||
|
||||
if ($raw_post_data === false || $raw_post_data === '') {
|
||||
send_error(400, 'Received empty request body or could not read input.', "Error: Could not read php://input or it was empty.");
|
||||
}
|
||||
|
||||
// Decode JSON into an associative array
|
||||
$input_data = json_decode($raw_post_data, true); // Use 'true' for array
|
||||
|
||||
if (json_last_error() !== JSON_ERROR_NONE) {
|
||||
send_error(400, 'Invalid JSON payload received.', 'JSON Decode Error: ' . json_last_error_msg() . " | Raw data snippet: " . substr($raw_post_data, 0, 100));
|
||||
} elseif (!is_array($input_data)) {
|
||||
send_error(400, 'Invalid JSON payload: Expected a JSON object.', "JSON Decode Warning: Result is not an array. Data: " . print_r($input_data, true));
|
||||
} else {
|
||||
error_log("JSON Decode Successful.");
|
||||
// ** GET request_type FROM THE DECODED ARRAY **
|
||||
$request_type = $input_data['request_type'] ?? null;
|
||||
error_log("Extracted request_type from JSON: " . ($request_type ?? 'null'));
|
||||
}
|
||||
} else {
|
||||
// If JSON is strictly required, reject other types
|
||||
send_error(415, 'Unsupported Media Type. This endpoint requires application/json.', "Unsupported Media Type Received: " . $contentType);
|
||||
}
|
||||
// ***********************************************************
|
||||
// ***** END: CRUCIAL JSON INPUT HANDLING BLOCK *****
|
||||
// ***********************************************************
|
||||
|
||||
|
||||
// --- Final Check and Routing ---
|
||||
if ($request_type === null) {
|
||||
if (is_array($input_data) && !isset($input_data['request_type'])) {
|
||||
send_error(400, 'Missing "request_type" field within the request payload.');
|
||||
} else {
|
||||
error_log("Routing check reached but request_type is null without prior exit.");
|
||||
send_error(400, 'Missing required parameter: request_type (or processing error).');
|
||||
}
|
||||
}
|
||||
|
||||
error_log("Routing request for type: " . $request_type);
|
||||
switch ($request_type) {
|
||||
case 'ohgo':
|
||||
// ** Pass the $input_data array **
|
||||
handle_ohgo_request($dbconn, $input_data);
|
||||
break;
|
||||
case 'ohgonopoly':
|
||||
// ** Pass the $input_data array **
|
||||
handle_ohgo_request_no_poly($dbconn, $input_data);
|
||||
break;
|
||||
case 'power':
|
||||
// ** Pass the $input_data array **
|
||||
handle_power_request($dbconn, $input_data);
|
||||
break;
|
||||
case 'powernopoly':
|
||||
// ** Pass the $input_data array **
|
||||
handle_power_request_no_poly($dbconn, $input_data);
|
||||
break;
|
||||
case 'wupoly':
|
||||
// ** Pass the $input_data array **
|
||||
handle_wu_request_poly($dbconn, $input_data);
|
||||
break;
|
||||
case 'campoly':
|
||||
// ** Pass the $input_data array **
|
||||
handle_cam_request($dbconn, $input_data);
|
||||
break;
|
||||
|
||||
|
||||
|
||||
default:
|
||||
send_error(400, 'Invalid request_type specified: ' . htmlspecialchars($request_type));
|
||||
break;
|
||||
}
|
||||
|
||||
} else {
|
||||
http_response_code(405);
|
||||
header('Allow: POST');
|
||||
header('Content-Type: application/json; charset=utf-8');
|
||||
echo json_encode(['error' => 'Invalid request method. Only POST is allowed.']);
|
||||
exit;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// --- Request Handler Functions ---
|
||||
|
||||
|
||||
|
||||
function handle_cam_request($dbconn, array $data): void {
|
||||
error_log("Handling 'camera image' request.");
|
||||
|
||||
// --- 1. Get Data from the $data array ---
|
||||
$start_time_str = $data['start_time'] ?? null;
|
||||
$end_time_str = $data['end_time'] ?? null;
|
||||
$geojson_str = $data['area_geojson'] ?? null;
|
||||
|
||||
// --- 2. Validation ---
|
||||
if ($start_time_str === null || $end_time_str === null || $geojson_str === null) {
|
||||
send_error(400, 'Missing required parameters for camera request: start_time, end_time, area_geojson');
|
||||
}
|
||||
|
||||
// Validate Timestamps (basic check, can be more robust)
|
||||
// Consider using DateTime objects for more rigorous validation if needed
|
||||
if (strtotime($start_time_str) === false) {
|
||||
send_error(400, 'Invalid start_time format.');
|
||||
}
|
||||
if (strtotime($end_time_str) === false) {
|
||||
send_error(400, 'Invalid end_time format.');
|
||||
}
|
||||
// Ensure start is before end? Optional, depends on requirements.
|
||||
// if (strtotime($start_time_str) >= strtotime($end_time_str)) {
|
||||
// send_error(400, 'start_time must be before end_time.');
|
||||
// }
|
||||
|
||||
|
||||
// Validate GeoJSON
|
||||
$geojson_obj = json_decode($geojson_str);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) {
|
||||
send_error(400, 'Invalid area_geojson provided: Contains invalid JSON string.', 'GeoJSON Decode Error: ' . json_last_error_msg());
|
||||
}
|
||||
if (!is_object($geojson_obj) || !isset($geojson_obj->type) || !in_array($geojson_obj->type, ['Polygon', 'MultiPolygon'])) {
|
||||
send_error(400, 'Invalid area_geojson provided: Decoded JSON must be a Polygon or MultiPolygon object.');
|
||||
}
|
||||
|
||||
// --- 3. Prepare and Execute Query ---
|
||||
// This query finds active cameras within the GeoJSON area,
|
||||
// then LEFT JOINs aggregated image data from camdb within the time range.
|
||||
// We use jsonb_agg for efficiency and COALESCE to return an empty array []
|
||||
// for cameras with no images in the range, instead of NULL.
|
||||
// NOTE: Selecting c.* assumes 'geom' is not excessively large or problematic
|
||||
// when fetched directly. If it is, list all columns except 'geom'.
|
||||
// We explicitly fetch ST_AsGeoJSON for the geometry representation.
|
||||
$query = "
|
||||
SELECT
|
||||
c.*, -- Select all columns from cams
|
||||
ST_AsGeoJSON(c.geom) as geometry_geojson, -- Get geometry as GeoJSON string
|
||||
COALESCE(img_agg.images, '[]'::jsonb) AS images -- Get aggregated images or empty JSON array
|
||||
FROM
|
||||
cams c
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
camid,
|
||||
jsonb_agg(
|
||||
jsonb_build_object(
|
||||
'timestamp', dateutc,
|
||||
'url', filepath -- Assuming filepath is the relative URL path
|
||||
) ORDER BY dateutc ASC -- Order images chronologically
|
||||
) AS images
|
||||
FROM
|
||||
camdb
|
||||
WHERE
|
||||
dateutc >= $1::timestamp -- start_time
|
||||
AND dateutc <= $2::timestamp -- end_time
|
||||
GROUP BY
|
||||
camid
|
||||
) AS img_agg ON c.camid = img_agg.camid
|
||||
WHERE
|
||||
c.active = TRUE -- Only active cameras
|
||||
AND ST_Within(c.geom, ST_GeomFromGeoJSON($3)) -- Camera location within area
|
||||
ORDER BY
|
||||
c.camid; -- Optional: Order cameras by ID
|
||||
";
|
||||
|
||||
$params = array(
|
||||
$start_time_str, // $1: start_time
|
||||
$end_time_str, // $2: end_time
|
||||
$geojson_str // $3: area_geojson string
|
||||
);
|
||||
|
||||
$result = pg_query_params($dbconn, $query, $params);
|
||||
|
||||
if (!$result) {
|
||||
send_error(500, 'Database query failed for camera data.', 'Camera Query Failed: ' . pg_last_error($dbconn) . " | Query: " . $query . " | Params: " . print_r($params, true));
|
||||
}
|
||||
|
||||
// --- 4. Process Results ---
|
||||
$cameras_output = [];
|
||||
while ($row = pg_fetch_assoc($result)) {
|
||||
// Decode the geometry GeoJSON string into a PHP object/array
|
||||
$geometry = json_decode($row['geometry_geojson']);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) {
|
||||
error_log('Failed to decode geometry for camid ' . ($row['camid'] ?? 'N/A') . ': ' . json_last_error_msg());
|
||||
// Decide how to handle: skip camera, set geometry to null, etc.
|
||||
$geometry = null; // Example: Set to null on error
|
||||
}
|
||||
|
||||
// Decode the images JSON string (from jsonb_agg) into a PHP array
|
||||
$images = json_decode($row['images']);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) {
|
||||
error_log('Failed to decode images JSON for camid ' . ($row['camid'] ?? 'N/A') . ': ' . json_last_error_msg());
|
||||
// Decide how to handle: skip camera, set images to empty array, etc.
|
||||
$images = []; // Example: Set to empty array on error
|
||||
}
|
||||
|
||||
// Prepare the output structure for this camera
|
||||
$camera_data = $row; // Start with all columns fetched via c.*
|
||||
|
||||
// Replace/remove raw JSON strings and potentially the original binary geom
|
||||
unset($camera_data['geometry_geojson']); // Remove the raw GeoJSON string
|
||||
unset($camera_data['geom']); // Remove the raw binary geometry if it was fetched by c.*
|
||||
$camera_data['geometry'] = $geometry; // Add the decoded geometry object/array
|
||||
$camera_data['images'] = $images; // Add the decoded images array
|
||||
|
||||
$cameras_output[] = $camera_data;
|
||||
}
|
||||
pg_free_result($result);
|
||||
error_log("Found " . count($cameras_output) . " cameras matching criteria.");
|
||||
|
||||
// --- 5. Send Response ---
|
||||
// Use a function like send_json defined above, or inline the logic:
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($cameras_output, JSON_PRETTY_PRINT | JSON_UNESCAPED_SLASHES);
|
||||
exit; // Important to stop script execution here
|
||||
// Alternatively, if you have the helper:
|
||||
// send_json($cameras_output);
|
||||
}
|
||||
|
||||
function handle_wu_request_poly($dbconn, array $data): void { // Takes $data array
|
||||
|
||||
$polygons = $data['polygons'] ?? []; // Array of WKT polygons, e.g., ['POLYGON((...))', 'POLYGON((...))']
|
||||
$start_time = $data['start_time'] ?? '2025-01-01 00:00:00'; // e.g., '2025-01-01 00:00:00'
|
||||
$end_time = $data['end_time'] ?? '2025-01-02 00:00:00'; // e.g., '2025-01-02 00:00:00'
|
||||
|
||||
if (empty($polygons)) {
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => 'No polygons provided']);
|
||||
pg_close($dbconn);
|
||||
exit;
|
||||
}
|
||||
|
||||
$polygon_placeholders = [];
|
||||
$params = [];
|
||||
$param_index = 1;
|
||||
|
||||
foreach ($polygons as $polygon) {
|
||||
$polygon_placeholders[] = "ST_GeomFromText(\$$param_index, 4326)";
|
||||
$params[] = $polygon;
|
||||
$param_index++;
|
||||
}
|
||||
|
||||
$params[] = $start_time;
|
||||
$params[] = $end_time;
|
||||
$start_time_placeholder = "\$$param_index";
|
||||
$param_index++;
|
||||
$end_time_placeholder = "\$$param_index";
|
||||
|
||||
$polygon_sql = implode(', ', $polygon_placeholders);
|
||||
|
||||
$sql = "
|
||||
SELECT wo.*
|
||||
FROM wuobs wo
|
||||
JOIN wusites ws ON wo.stationid = ws.stationid
|
||||
WHERE ws.geom && ST_Union(ARRAY[$polygon_sql])::geometry
|
||||
AND ST_Within(ws.geom, ST_Union(ARRAY[$polygon_sql])::geometry)
|
||||
AND wo.observation_time BETWEEN $start_time_placeholder AND $end_time_placeholder
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $sql, $params);
|
||||
|
||||
if ($result === false) {
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => pg_last_error($dbconn)]);
|
||||
pg_close($dbconn);
|
||||
exit;
|
||||
}
|
||||
|
||||
// Fetch results
|
||||
$results = [];
|
||||
while ($row = pg_fetch_assoc($result)) {
|
||||
$results[] = $row;
|
||||
}
|
||||
|
||||
// Free result and close connection
|
||||
pg_free_result($result);
|
||||
|
||||
// Output results as JSON
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles the 'ohgo' data request.
|
||||
* @param resource $dbconn The active database connection resource.
|
||||
* @param array $data The associative array of input parameters (from JSON).
|
||||
*/
|
||||
function handle_ohgo_request($dbconn, array $data): void { // Takes $data array
|
||||
error_log("Handling 'ohgo' request.");
|
||||
// --- 1. Get Data from the $data array ---
|
||||
$start = $data['start_time'] ?? null; // Use $data, use correct key
|
||||
$geojson_str = $data['area_geojson'] ?? null; // Use $data, not $_POST
|
||||
$end = $data['end_time'] ?? null; // Use $data, use correct key
|
||||
|
||||
// --- 2. Validation ---
|
||||
if ($start === null || $geojson_str === null || $end === null) { send_error(400, 'Missing required parameters for ohgo request: start, geojson, end'); }
|
||||
$geojson_obj = json_decode($geojson_str);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) { send_error(400, 'Invalid GeoJSON provided: Not valid JSON.', 'GeoJSON Decode Error: ' . json_last_error_msg()); }
|
||||
if (!isset($geojson_obj->type) || !in_array($geojson_obj->type, ['Polygon', 'MultiPolygon'])) { send_error(400, 'Invalid GeoJSON provided: Type must be Polygon or MultiPolygon.'); }
|
||||
|
||||
// --- 3. Prepare and Execute Query ---
|
||||
$query = "SELECT ST_AsGeoJSON(geom) AS geometry, category, roadstatus, county, state, location, routename, description, start AS start_timestamp, endtime AS end_timestamp, lastupdate FROM ohgo WHERE start > $1::timestamp AND start < $3::timestamp AND ST_Within(geom, ST_GeomFromGeoJSON($2)) ORDER BY start ASC";
|
||||
$params = array($start, $geojson_str, $end);
|
||||
$result = pg_query_params($dbconn, $query, $params);
|
||||
if (!$result) { send_error(500, 'Database query failed for ohgo data.', 'OHGO Query Failed: ' . pg_last_error($dbconn)); }
|
||||
|
||||
// --- 4. Process Results ---
|
||||
$features = [];
|
||||
while ($line = pg_fetch_assoc($result)) {
|
||||
$geometry = json_decode($line['geometry']);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) { error_log('Failed to decode geometry for ohgo row: ' . json_last_error_msg()); continue; }
|
||||
$properties = $line; unset($properties['geometry']);
|
||||
$features[] = ['type' => 'Feature', 'geometry' => $geometry, 'properties' => $properties];
|
||||
}
|
||||
pg_free_result($result);
|
||||
error_log("Found " . count($features) . " features for ohgo request.");
|
||||
|
||||
// --- 5. Send Response ---
|
||||
send_geojson($features);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Handles the 'power' data request.
|
||||
* @param resource $dbconn The active database connection resource.
|
||||
* @param array $data The associative array of input parameters (from JSON).
|
||||
*/
|
||||
function handle_power_request($dbconn, array $data): void { // Takes $data array
|
||||
error_log("Handling 'power' request.");
|
||||
// --- 1. Get Data from the $data array ---
|
||||
// ** Match keys from your fetch request body: start_time, area_geojson, etc. **
|
||||
$start = $data['start_time'] ?? null; // Use $data, use correct key
|
||||
$geojson_str = $data['area_geojson'] ?? null; // Use $data, use correct key
|
||||
$end = $data['end_time'] ?? null; // Use $data, use correct key
|
||||
$buffer_hours = $data['buffer'] ?? 0;// Use $data, use correct key
|
||||
|
||||
// --- 2. Validation ---
|
||||
if ($start === null || $geojson_str === null || $end === null || $buffer_hours === null) {
|
||||
// Update error message to reflect the actual keys expected from JSON
|
||||
send_error(400, 'Missing required parameters for power request: start_time, area_geojson, end_time, buffer_hours');
|
||||
}
|
||||
if (!is_numeric($buffer_hours) || ($buffer_hours_float = floatval($buffer_hours)) < 0) { send_error(400, 'Invalid buffer_hours provided: Must be a non-negative number.'); }
|
||||
$buffer_hours_int = (int)$buffer_hours_float;
|
||||
$geojson_obj = json_decode($geojson_str); // Decode the *string* value from the JSON input
|
||||
if (json_last_error() !== JSON_ERROR_NONE) { send_error(400, 'Invalid area_geojson provided: Contains invalid JSON string.', 'GeoJSON Decode Error: ' . json_last_error_msg()); }
|
||||
if (!is_object($geojson_obj) || !isset($geojson_obj->type) || !in_array($geojson_obj->type, ['Polygon', 'MultiPolygon'])) { send_error(400, 'Invalid area_geojson provided: Decoded JSON must be a Polygon or MultiPolygon object.'); }
|
||||
// ** Crucial Fix: Use the decoded $geojson_str for the query parameter, not $geojson_obj **
|
||||
|
||||
// --- 3. Prepare and Execute Query ---
|
||||
// ** VERIFY TABLE/COLUMN NAMES FOR POWER TABLE **
|
||||
$query = "SELECT ST_AsGeoJSON(realgeom) AS geometry, derivedstart AS start_timestamp, cause, peakoutage, lastchange AS end_timestamp FROM power WHERE derivedstart >= $1::timestamp AND derivedstart < ($3::timestamp + make_interval(hours => $4::integer)) AND ST_Within(realgeom, ST_GeomFromGeoJSON($2)) ORDER BY derivedstart ASC";
|
||||
$params = array(
|
||||
$start, // $1: start_time from JSON
|
||||
$geojson_str, // $2: area_geojson STRING from JSON
|
||||
$end, // $3: end_time from JSON
|
||||
$buffer_hours_int // $4: buffer_hours from JSON (as integer)
|
||||
);
|
||||
$result = pg_query_params($dbconn, $query, $params);
|
||||
if (!$result) { send_error(500, 'Database query failed for power data.', 'Power Query Failed: ' . pg_last_error($dbconn) . " | Query: " . $query . " | Params: " . print_r($params, true)); }
|
||||
|
||||
// --- 4. Process Results ---
|
||||
$features = [];
|
||||
while ($line = pg_fetch_assoc($result)) {
|
||||
$geometry = json_decode($line['geometry']);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) { error_log('Failed to decode geometry for power row: ' . json_last_error_msg()); continue; }
|
||||
$properties = $line; unset($properties['geometry']);
|
||||
$features[] = ['type' => 'Feature', 'geometry' => $geometry, 'properties' => $properties];
|
||||
}
|
||||
pg_free_result($result);
|
||||
error_log("Found " . count($features) . " features for power request.");
|
||||
|
||||
// --- 5. Send Response ---
|
||||
send_geojson($features);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Handles the 'ohgo' data request.
|
||||
* @param resource $dbconn The active database connection resource.
|
||||
* @param array $data The associative array of input parameters (from JSON).
|
||||
*/
|
||||
function handle_ohgo_request_no_poly($dbconn, array $data): void { // Takes $data array
|
||||
error_log("Handling 'ohgo' request.");
|
||||
// --- 1. Get Data from the $data array ---
|
||||
$start = $data['start_time'] ?? null; // Use $data, use correct key
|
||||
|
||||
$end = $data['end_time'] ?? null; // Use $data, use correct key
|
||||
|
||||
// --- 2. Validation ---
|
||||
if ($start === null || $end === null) { send_error(400, 'Missing required parameters for ohgo request: start, geojson, end'); }
|
||||
|
||||
// --- 3. Prepare and Execute Query ---
|
||||
$query = "SELECT ST_AsGeoJSON(geom) AS geometry, county, state AS st, location, routename AS city, upper(cwa) AS wfo, 'FLOOD' AS typetext, 'Department of Highways' AS source, description AS remark,
|
||||
TO_CHAR(start, 'YYYY-MM-DD\"T\"HH24:MI:SS\"Z\"') AS valid
|
||||
FROM ohgo
|
||||
WHERE start > $1::timestamp
|
||||
AND start < $2::timestamp
|
||||
AND cwa = 'RLX'
|
||||
ORDER BY start ASC";
|
||||
$params = array($start, $end);
|
||||
$result = pg_query_params($dbconn, $query, $params);
|
||||
if (!$result) { send_error(500, 'Database query failed for ohgo data.', 'OHGO Query Failed: ' . pg_last_error($dbconn)); }
|
||||
|
||||
// --- 4. Process Results ---
|
||||
$features = [];
|
||||
while ($line = pg_fetch_assoc($result)) {
|
||||
$geometry = json_decode($line['geometry']);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) { error_log('Failed to decode geometry for ohgo row: ' . json_last_error_msg()); continue; }
|
||||
$properties = $line; unset($properties['geometry']);
|
||||
$features[] = ['type' => 'Feature', 'geometry' => $geometry, 'properties' => $properties];
|
||||
}
|
||||
pg_free_result($result);
|
||||
error_log("Found " . count($features) . " features for ohgo request.");
|
||||
|
||||
// --- 5. Send Response ---
|
||||
send_geojson($features);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Handles the 'power' data request.
|
||||
* @param resource $dbconn The active database connection resource.
|
||||
* @param array $data The associative array of input parameters (from JSON).
|
||||
*/
|
||||
function handle_power_request_no_poly($dbconn, array $data): void { // Takes $data array
|
||||
error_log("Handling 'power' request.");
|
||||
// --- 1. Get Data from the $data array ---
|
||||
// ** Match keys from your fetch request body: start_time, area_geojson, etc. **
|
||||
$start = $data['start_time'] ?? null; // Use $data, use correct key
|
||||
$end = $data['end_time'] ?? null; // Use $data, use correct key
|
||||
$outage_threshold = $data['outage_threshold'] ?? 9;
|
||||
$buffer_hours = $data['buffer'] ?? 0;// Use $data, use correct key
|
||||
|
||||
// --- 2. Validation ---
|
||||
if ($start === null || $end === null || $buffer_hours === null) {
|
||||
// Update error message to reflect the actual keys expected from JSON
|
||||
send_error(400, 'Missing required parameters for power request: start_time, area_geojson, end_time, buffer_hours');
|
||||
}
|
||||
if (!is_numeric($buffer_hours) || ($buffer_hours_float = floatval($buffer_hours)) < 0) { send_error(400, 'Invalid buffer_hours provided: Must be a non-negative number.'); }
|
||||
$buffer_hours_int = (int)$buffer_hours_float;
|
||||
$outage_thresh = (float)$outage_threshold;
|
||||
|
||||
|
||||
|
||||
|
||||
// --- 3. Prepare and Execute Query ---
|
||||
// ** VERIFY TABLE/COLUMN NAMES FOR POWER TABLE **
|
||||
$query = "SELECT ST_AsGeoJSON(realgeom) AS geometry,
|
||||
TO_CHAR(derivedstart, 'YYYY-MM-DD\"T\"HH24:MI:SS\"Z\"') AS valid,
|
||||
('Power Outage affecting ' || peakoutage || ' customers caused by ' || COALESCE(cause, 'unknown')) AS remark,
|
||||
'Utility Company' as source,
|
||||
'POWER OUTAGE' as typetext,
|
||||
'U' as type,
|
||||
(ROUND(ST_Y(realgeom)::numeric, 3) || ', ' || ROUND(ST_X(realgeom)::numeric, 3)) AS city,
|
||||
county as county,
|
||||
state as state,
|
||||
state as st
|
||||
FROM power
|
||||
WHERE derivedstart >= $1::timestamp
|
||||
AND derivedstart < ($2::timestamp + make_interval(hours => $3::integer))
|
||||
and peakoutage > $4
|
||||
AND ST_Within(realgeom, (SELECT geom FROM public.cwa WHERE cwa = 'RLX'))
|
||||
ORDER BY derivedstart ASC";
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
$params = array(
|
||||
$start, // $1: start_time from JSON
|
||||
$end, // $2: end_time from JSON
|
||||
$buffer_hours_int, // $3: buffer_hours from JSON (as integer)
|
||||
$outage_thresh // $4
|
||||
);
|
||||
$result = pg_query_params($dbconn, $query, $params);
|
||||
if (!$result) { send_error(500, 'Database query failed for power data.', 'Power Query Failed: ' . pg_last_error($dbconn) . " | Query: " . $query . " | Params: " . print_r($params, true)); }
|
||||
|
||||
// --- 4. Process Results ---
|
||||
$features = [];
|
||||
while ($line = pg_fetch_assoc($result)) {
|
||||
$geometry = json_decode($line['geometry']);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) { error_log('Failed to decode geometry for power row: ' . json_last_error_msg()); continue; }
|
||||
$properties = $line; unset($properties['geometry']);
|
||||
$features[] = ['type' => 'Feature', 'geometry' => $geometry, 'properties' => $properties];
|
||||
}
|
||||
pg_free_result($result);
|
||||
error_log("Found " . count($features) . " features for power request.");
|
||||
|
||||
// --- 5. Send Response ---
|
||||
send_geojson($features);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// --- Close Database Connection ---
|
||||
if ($dbconn) {
|
||||
pg_close($dbconn);
|
||||
error_log("Database connection closed.");
|
||||
}
|
||||
|
||||
?>
|
||||
1058
stormdata_SERVERMAP.html
Normal file
1058
stormdata_SERVERMAP.html
Normal file
File diff suppressed because it is too large
Load Diff
3081
stormdata_backup.html
Normal file
3081
stormdata_backup.html
Normal file
File diff suppressed because it is too large
Load Diff
173
summary.html
Normal file
173
summary.html
Normal file
@@ -0,0 +1,173 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>RLX Power Outage Map</title>
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
|
||||
<script src="https://code.jquery.com/ui/1.13.1/jquery-ui.js" integrity="sha256-6XMVI0zB8cRzfZjqKcD01PBsAy3FlDASrlC8SxCpInY=" crossorigin="anonymous"></script>
|
||||
<link rel="stylesheet" href="https://code.jquery.com/ui/1.13.1/themes/smoothness/jquery-ui.css">
|
||||
<link rel="stylesheet" href="https://www.w3schools.com/w3css/4/w3.css">
|
||||
<link href="https://unpkg.com/tabulator-tables@5.4.4/dist/css/tabulator.min.css" rel="stylesheet">
|
||||
<script type="text/javascript" src="https://unpkg.com/tabulator-tables@5.4.4/dist/js/tabulator.min.js"></script>
|
||||
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div id="dialog">
|
||||
Enter Start/End Times in UTC
|
||||
<input type="datetime-local" id="start" name="start">
|
||||
<input type="datetime-local" id="end" name="end">
|
||||
|
||||
<button id="archive" onclick="rackandstack()">Load Data From Selected Time Period</button>
|
||||
|
||||
</div>
|
||||
<div id="countysum"></div>
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
function googleMap(lat,lon){
|
||||
return "http://maps.google.com/maps?t=k&q=loc:" + lat + "+" + lon + "&basemap=satellite";
|
||||
}
|
||||
|
||||
|
||||
|
||||
var countysum;
|
||||
function rackandstack() {
|
||||
var start = document.getElementById("start").value;
|
||||
var end = document.getElementById("end").value;
|
||||
|
||||
$.getJSON(`powerapi.php?max=potato&start=${start}&end=${end}`, function(data) {
|
||||
|
||||
$.getJSON('powerapi.php?county=r', function(data1) {
|
||||
countylist(data,data1);
|
||||
});
|
||||
|
||||
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
var wvstateout,kystateout,ohstateout,vastateout;
|
||||
var wvstateserved,kystateserved,ohstateserved,vastateserved;
|
||||
|
||||
|
||||
function countylist(data,data1) {
|
||||
for (i=0; i< data.length; i++) {
|
||||
for (j=0; j<data1.length;j++){
|
||||
if (data[i].county == data1[j].county && data[i].state == data1[j].state) {
|
||||
data[i]['served'] = data1[j]['served'];
|
||||
data[i]['percent'] = Math.round(data[i]['max']/data[i]['served']*1000)/10;
|
||||
}
|
||||
}
|
||||
}
|
||||
states = ['WV','KY','OH','VA'];
|
||||
for (l in states) {
|
||||
stateout = 0;
|
||||
stateserved = 0;
|
||||
statepercent = 0;
|
||||
|
||||
for (k=0; k<data.length; k++) {
|
||||
if (states[l] == data[k]['state']) {
|
||||
stateout = stateout + parseFloat(data[k]['max']);
|
||||
stateserved = stateserved + parseFloat(data[k]['served']);
|
||||
statepercent = Math.round(stateout/stateserved*1000)/10;
|
||||
if (states[l] == 'WV') {
|
||||
wvstateout = stateout;
|
||||
wvstateserved = stateserved;
|
||||
}
|
||||
if (states[l] == 'VA') {
|
||||
vastateout = stateout;
|
||||
vastateserved = stateserved;
|
||||
}
|
||||
if (states[l] == 'KY') {
|
||||
kystateout = stateout;
|
||||
kystateserved = stateserved;
|
||||
}
|
||||
if (states[l] == 'OH') {
|
||||
ohstateout = stateout;
|
||||
ohstateserved = stateserved;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
for (m in data) {
|
||||
if (data[m]['state'] == 'WV') {
|
||||
data[m]['stateout'] = wvstateout;
|
||||
data[m]['stateserved'] = wvstateserved;
|
||||
data[m]['statepercent'] = Math.floor(wvstateout/wvstateserved*1000)/10;
|
||||
}
|
||||
|
||||
if (data[m]['state'] == 'VA') {
|
||||
data[m]['stateout'] = vastateout;
|
||||
data[m]['stateserved'] = vastateserved;
|
||||
data[m]['statepercent'] = Math.floor(vastateout/vastateserved*1000)/10;
|
||||
}
|
||||
|
||||
if (data[m]['state'] == 'KY') {
|
||||
data[m]['stateout'] = kystateout;
|
||||
data[m]['stateserved'] = kystateserved;
|
||||
data[m]['statepercent'] = Math.floor(kystateout/kystateserved*1000)/10;
|
||||
}
|
||||
if (data[m]['state'] == 'OH') {
|
||||
data[m]['stateout'] = ohstateout;
|
||||
data[m]['stateserved'] = ohstateserved;
|
||||
data[m]['statepercent'] = Math.floor(ohstateout/ohstateserved*1000)/10;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
buildtable(data);
|
||||
}
|
||||
|
||||
function buildtable(data) {
|
||||
|
||||
|
||||
|
||||
table.setData(data);
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
var table = new Tabulator("#countysum", {
|
||||
responsiveLayout:true,
|
||||
tooltipsHeader:true,
|
||||
columns:[
|
||||
{title:"County", field:"county"},
|
||||
{title:"State", field:"state"},
|
||||
{title:"Max Outages", field:"max"},
|
||||
{title:"Served", field:"served"},
|
||||
{title:"Max % Out", field:"percent"},
|
||||
{title:"State Total Out", field:"stateout"},
|
||||
{title:"State Total Served", field:"stateserved"},
|
||||
{title:"State Total % Out", field:"statepercent"}
|
||||
],
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
175
svr.html
Normal file
175
svr.html
Normal file
@@ -0,0 +1,175 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>RLX Severe Helper</title>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.5.1/jquery.js" integrity="sha512-WNLxfP/8cVYL9sj8Jnp6et0BkubLP31jhTG9vhL/F5uEZmg5wEzKoXp1kJslzPQWwPT1eyMiSxlKCgzHLOTOTQ==" crossorigin="anonymous"></script>
|
||||
<link href="https://unpkg.com/tabulator-tables@4.7.2/dist/css/tabulator.min.css" rel="stylesheet">
|
||||
<script type="text/javascript" src="https://unpkg.com/tabulator-tables@4.7.2/dist/js/tabulator.min.js"></script>
|
||||
|
||||
<div id="disclaimer">
|
||||
A factor of 2.5 is applied to raw outages to account for household size<br>
|
||||
|
||||
Click on the product id to go directly to the IAState page for the polygon<br>
|
||||
Click on the number under potentially verifying outages to evaluate the individual polygon/radar/outages<br>
|
||||
Click on Ver LSRs to see the LSRs that have been submitted for the polygon<br>
|
||||
Note: To prevent continuous calls to IAState for LSRs, you will need to refresh the page to update verifying LSRs<br>
|
||||
Note: The 5 mile buffer helps to account for my outage points being the centroid of any outage polygon, if this number is large compared to the other outage number you may need to take look at the archived power map to see where they are (I'm going to add just showing all outage points for the valid time of the polygon + a selectable buffer and showing the outage polygons to my todo list!)
|
||||
|
||||
</div>
|
||||
<div id="outages"></div>
|
||||
|
||||
<script>
|
||||
lsrdata = {}
|
||||
function googleMap(cell, formatterParams){
|
||||
return "http://maps.google.com/maps?t=k&q=loc:" + cell.getData().lat + "+" + cell.getData().lon + "&basemap=satellite";
|
||||
}
|
||||
|
||||
function datestrings() {
|
||||
const now = new Date();
|
||||
const currentDateTimeISO = now.toISOString().slice(0, -8) + 'Z';
|
||||
const oneMonthAgo = new Date();
|
||||
oneMonthAgo.setMonth(oneMonthAgo.getMonth() - 1);
|
||||
const oneMonthAgoISO = oneMonthAgo.toISOString().slice(0, -8) + 'Z';
|
||||
return {currentDateTimeISO, oneMonthAgoISO}
|
||||
}
|
||||
|
||||
dates = datestrings()
|
||||
start = dates['oneMonthAgoISO']
|
||||
end = dates['currentDateTimeISO']
|
||||
url = `https://mesonet.agron.iastate.edu/api/1/cow.json?wfo=RLX&begints=${dates.oneMonthAgoISO}&endts=${dates.currentDateTimeISO}&phenomena=SV&phenomena=TO&lsrtype=SV&lsrtype=TO`
|
||||
$.getJSON(url, function(json) {
|
||||
lsrdata = json;
|
||||
console.log("LSR Data Loaded:", lsrdata.events.features.length, "events");
|
||||
}).done(function() {
|
||||
table.setData("powerapi.php?svrpolys=potato");
|
||||
table.setSort([
|
||||
{column:"vtec", dir: "desc"},
|
||||
]);
|
||||
var timeout = setInterval(reloadData, 300000);
|
||||
});
|
||||
|
||||
function lsrfortable(cell, formatterParams) {
|
||||
vtectext = cell.getData().vtec;
|
||||
vtectext = vtectext.slice(1,-1);
|
||||
potato = vtectext.split('.');
|
||||
vtecstring = "#20" + potato[6].substring(0,2) + "-";
|
||||
}
|
||||
|
||||
function vtecget(cell, formatterParams){
|
||||
vtectext = cell.getData().vtec;
|
||||
vtectext = vtectext.slice(1,-1);
|
||||
potato = vtectext.split('.');
|
||||
vtecstring = "#20" + potato[6].substring(0,2) + "-";
|
||||
for (let i = 0; i < 6; i++) {
|
||||
vtecstring = vtecstring.concat(potato[i]);
|
||||
if (i < 5) {
|
||||
vtecstring = vtecstring.concat("-");
|
||||
}
|
||||
}
|
||||
return "https://mesonet.agron.iastate.edu/vtec/" + vtecstring
|
||||
}
|
||||
|
||||
function poppcnt(cell, formatterParams) {
|
||||
out = cell.getData().outagesvalid / cell.getData().polygonpop;
|
||||
return out
|
||||
}
|
||||
|
||||
function findFeatureById(featureCollection, id) {
|
||||
return featureCollection.features.find(feature => feature.id === id);
|
||||
}
|
||||
|
||||
function reloadData() {
|
||||
table.replaceData("powerapi.php?svrpolys=potato");
|
||||
table.setSort([
|
||||
{column:"vtec", dir: "desc"},
|
||||
]);
|
||||
}
|
||||
|
||||
var table = new Tabulator("#outages", {
|
||||
height: '100%',
|
||||
responsiveLayout: true,
|
||||
tooltipsHeader: true,
|
||||
columns: [
|
||||
{title:"VTEC", field:"vtec", formatter:"link", formatterParams:{url: vtecget, target:"_blank"}},
|
||||
{title:"Outages During Warning + 2hrs", field:"outagesvalid"},
|
||||
{title:"Polygon Population", field:"polygonpop"},
|
||||
{title:"5 Mile Buffer on Polygon Outages", field:"outagesbuffer"},
|
||||
{title:"Potentially Ver Outages", field:"lsrids", formatter:function(cell) {
|
||||
var lsrids = cell.getData().lsrids;
|
||||
if (lsrids != null) {
|
||||
let array = lsrids.replace(/[{}]/g, '').split(',').filter(Boolean).map(Number);
|
||||
lsrstring = lsrids.replace(/[{}]/g, '');
|
||||
var vtec = cell.getData().vtec;
|
||||
return "<a href='lsrtool.html?vtec=" + vtec + "&id=" + lsrstring + "' target='_blank'>"+ array.length +"</a>"
|
||||
} else {
|
||||
return "None"
|
||||
}
|
||||
}},
|
||||
{title:"Ver LSRs", field:"vtec", formatter:function(cell) {
|
||||
var vtec = cell.getData().vtec;
|
||||
potato = vtec.split('.');
|
||||
warnid = "20" + potato[6].substring(0,2) + "RLX" + Number(potato[5]) + potato[3] + "W1";
|
||||
console.log("VTEC:", vtec, "WarnID:", warnid);
|
||||
|
||||
foundFeature = findFeatureById(lsrdata.events, warnid);
|
||||
console.log("Found Event:", foundFeature ? foundFeature.properties : "Not found");
|
||||
|
||||
if (foundFeature) {
|
||||
let stormreports = foundFeature.properties.stormreports;
|
||||
if (typeof stormreports === "string") {
|
||||
stormreports = stormreports.split(',').map(id => id.trim()).filter(id => id !== "");
|
||||
}
|
||||
stormreports = Array.isArray(stormreports) ? stormreports : [];
|
||||
console.log("Storm Reports IDs:", stormreports);
|
||||
|
||||
let lsrs = [];
|
||||
for (let i = 0; i < stormreports.length; i++) {
|
||||
let remark = getlsrtext(stormreports[i], lsrdata.stormreports);
|
||||
lsrs.push(remark ? remark : "No remark available for ID " + stormreports[i]);
|
||||
console.log("ID:", stormreports[i], "Remark:", remark);
|
||||
}
|
||||
let value = lsrs.length;
|
||||
|
||||
let clickable = document.createElement("span");
|
||||
clickable.innerHTML = value;
|
||||
clickable.style.cursor = "pointer";
|
||||
clickable.style.textDecoration = "underline";
|
||||
|
||||
// Bind the current lsrs and warnid to the click event
|
||||
clickable.addEventListener("click", (function(localLsrs, localWarnid) {
|
||||
return function() {
|
||||
let reportText = localLsrs.length > 0
|
||||
? localLsrs.join("\n\n")
|
||||
: "No storm reports available.";
|
||||
alert("Storm Reports for " + localWarnid + ":\n\n" + reportText);
|
||||
};
|
||||
})(lsrs.slice(), warnid)); // Pass copies to avoid reference issues
|
||||
|
||||
if (value == 0) {
|
||||
color = 'red';
|
||||
} else if (value == 1) {
|
||||
color = 'yellow';
|
||||
} else {
|
||||
color = 'green';
|
||||
}
|
||||
cell.getElement().style.backgroundColor = color;
|
||||
return clickable;
|
||||
} else {
|
||||
cell.getElement().style.backgroundColor = 'red';
|
||||
return 'None';
|
||||
}
|
||||
}}
|
||||
],
|
||||
});
|
||||
|
||||
function getlsrtext(id, lsrs) {
|
||||
foundFeature = findFeatureById(lsrs, id);
|
||||
return foundFeature ? foundFeature.properties.remark : null;
|
||||
}
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
106
svr.py
Normal file
106
svr.py
Normal file
@@ -0,0 +1,106 @@
|
||||
import requests
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from datetime import datetime, timezone
|
||||
import re
|
||||
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
S = requests.Session()
|
||||
#select power.realgeom from power,svr where st_contains(svr.nwspoly,power.realgeom)
|
||||
|
||||
headers = {
|
||||
'user-agent': 'wx.stoat.org, john.peck@noaa.gov','Cache-Control': 'no-cache',
|
||||
}
|
||||
|
||||
states = ['WV','OH','VA','KY']
|
||||
|
||||
basestate = 'https://api.weather.gov/alerts/active/area/'
|
||||
|
||||
allalerts = "https://api.weather.gov/alerts"
|
||||
|
||||
|
||||
|
||||
def decode_vtec(pvtec_string):
|
||||
pattern = r"/([A-Z]+)\.([A-Z]+)\.([A-Z]+)\.([A-Z]+)\.([A-Z])\.([0-9]+)\.([0-9A-Z]+)T([0-9A-Z]+)-([0-9A-Z]+)T([0-9A-Z]+)/"
|
||||
match = re.match(pattern, pvtec_string)
|
||||
if match:
|
||||
action_code = match.group(1)
|
||||
phenomena = match.group(2)
|
||||
office = match.group(3)
|
||||
significance = match.group(4)
|
||||
event_type = match.group(5)
|
||||
event_tracking_number = match.group(6)
|
||||
result = [action_code,phenomena,significance,event_type,event_tracking_number,office]
|
||||
else:
|
||||
print("P-VTEC string format not recognized.")
|
||||
return result
|
||||
|
||||
|
||||
def new_check_for_products():
|
||||
products = json.loads(S.get(allalerts, headers=headers).text)
|
||||
for i in products['features']:
|
||||
for j in i['properties']['parameters']['AWIPSidentifier']:
|
||||
#print(j,' first loop')
|
||||
if j == 'SVRRLX' or j == 'TORRLX':
|
||||
warntype = j
|
||||
geometry = i['geometry']
|
||||
gisgeom = json.dumps(geometry)
|
||||
issuetime = i['properties']['sent']
|
||||
issuetime = datetime.strptime(issuetime,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
endtime = i['properties']['ends']
|
||||
endtime = datetime.strptime(endtime,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
vtec = i['properties']['parameters']['VTEC'][0]
|
||||
sql = "insert into svr (nwspoly, issue, endtime, warntype, vtec) values (ST_SetSRID(ST_GeomFromGeoJSON(%s), 4326),%s,%s,%s,%s) on conflict (vtec) do nothing"
|
||||
parms = (gisgeom,issuetime,endtime,warntype,vtec)
|
||||
cursor.execute(sql,parms)
|
||||
conn.commit()
|
||||
|
||||
|
||||
for i in products['features']:
|
||||
for j in i['properties']['parameters']['AWIPSidentifier']:
|
||||
# print(j,'second loop')
|
||||
try:
|
||||
if 'SVS' in j or 'SVR' in j or 'TOR' in j or 'FFW' in j or 'FFS' in j or 'FLW' in j or 'FLS' in j:
|
||||
warntype = j
|
||||
geometry = i['geometry']
|
||||
gisgeom = json.dumps(geometry)
|
||||
issuetime = i['properties']['sent']
|
||||
issuetime = datetime.strptime(issuetime,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
endtime = i['properties']['ends']
|
||||
print(endtime)
|
||||
endtime = datetime.strptime(endtime,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
year = issuetime.year
|
||||
vtec = i['properties']['parameters']['VTEC'][0]
|
||||
pil = i['properties']['parameters']['AWIPSidentifier'][0]
|
||||
ugc = i['properties']['geocode']['UGC']
|
||||
vtecelements = decode_vtec(vtec)
|
||||
etin = vtecelements[4]
|
||||
operational = vtecelements[0]
|
||||
svstype = vtecelements[1]
|
||||
warntype = vtecelements[2]
|
||||
eventtype = vtecelements[3]
|
||||
office = vtecelements[5]
|
||||
print(ugc)
|
||||
sql = "insert into warntracker (nwspoly, issue, endtime, warntype, vtectext,etin,svstype,pil,ugc,year,office,sig) values (ST_SetSRID(ST_GeomFromGeoJSON(%s), 4326),%s,%s,%s,%s,%s,%s,%s,%s, %s,%s,%s) on conflict do nothing"
|
||||
|
||||
parms = (gisgeom,issuetime,endtime,warntype,vtec,etin,svstype,pil,ugc,year,office,eventtype)
|
||||
print(parms)
|
||||
cursor.execute(sql,parms)
|
||||
conn.commit()
|
||||
|
||||
except Exception as e:
|
||||
print(i,e)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
new_check_for_products()
|
||||
|
||||
|
||||
|
||||
conn.commit()
|
||||
276
svr2.py
Normal file
276
svr2.py
Normal file
@@ -0,0 +1,276 @@
|
||||
import requests
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from datetime import datetime, timezone, timedelta
|
||||
import re
|
||||
import time
|
||||
import sys
|
||||
|
||||
# --- Configuration ---
|
||||
DRY_RUN = False # Set to False for actual DB writes
|
||||
|
||||
# --- Database Connection ---
|
||||
DB_CONN_INFO = { 'host': 'localhost', 'database': 'nws', 'user': 'nws', 'password': 'nws' }
|
||||
conn = None
|
||||
cursor = None
|
||||
try:
|
||||
conn = psycopg2.connect(**DB_CONN_INFO)
|
||||
cursor = conn.cursor()
|
||||
print("Database connection successful.")
|
||||
except psycopg2.OperationalError as e:
|
||||
print(f"WARNING: Database connection failed: {e}. Backfill checks for existing VTEC will not work.")
|
||||
# exit()
|
||||
|
||||
# --- Requests Session ---
|
||||
S = requests.Session()
|
||||
headers = {
|
||||
'user-agent': 'wx.stoat.org Backfill Script v3, john.peck@noaa.gov',
|
||||
'Accept': 'application/json',
|
||||
'Cache-Control': 'no-cache',
|
||||
}
|
||||
|
||||
# --- Configuration ---
|
||||
BACKFILL_DAYS = 2
|
||||
WFOS_FOR_BACKFILL = ['RLX']
|
||||
COW_API_BASE_URL = "https://mesonet.agron.iastate.edu/api/1/cow.json"
|
||||
PHENOMENA_SIG_PAIRS = [ ('SV', 'W'), ('TO', 'W'), ('FF', 'W'), ('MA', 'W'), ('SQ', 'W'), ('DS', 'W') ]
|
||||
|
||||
|
||||
# --- IEM COW API Backfill (Refactored to construct full VTEC) ---
|
||||
def backfill_from_cow(session, db_cursor, db_conn, days_back, wfo_list):
|
||||
"""Queries IEM COW API, constructs the full VTEC string, and inserts if missing."""
|
||||
print(f"\nStarting IEM COW API backfill simulation for the last {days_back} days...")
|
||||
end_time_utc = datetime.now(timezone.utc)
|
||||
start_time_utc = end_time_utc - timedelta(days=days_back)
|
||||
start_ts_str = start_time_utc.strftime('%Y-%m-%dT%H:%MZ')
|
||||
end_ts_str = end_time_utc.strftime('%Y-%m-%dT%H:%MZ')
|
||||
|
||||
cow_processed = 0
|
||||
cow_checked_exists = 0
|
||||
cow_would_insert_svr = 0
|
||||
cow_would_insert_tracker = 0
|
||||
cow_skipped_no_link = 0
|
||||
cow_skipped_bad_vtec_parse = 0
|
||||
cow_skipped_missing_times = 0 # Counter for missing times needed for VTEC key
|
||||
|
||||
for wfo in wfo_list:
|
||||
print(f" Querying COW for WFO: {wfo}")
|
||||
for phenom, sig in PHENOMENA_SIG_PAIRS:
|
||||
params = { 'wfo': wfo, 'phenomena': phenom, 'significance': sig,
|
||||
'begints': start_ts_str, 'endts': end_ts_str, 'limit': 1000 }
|
||||
try:
|
||||
# ... (API request and initial JSON parsing) ...
|
||||
response = session.get(COW_API_BASE_URL, params=params, headers=headers, timeout=90)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f" ERROR: Failed fetch for {wfo}/{phenom}.{sig}: {e}")
|
||||
continue
|
||||
except json.JSONDecodeError as e:
|
||||
print(f" ERROR: Failed JSON parse for {wfo}/{phenom}.{sig}: {e}")
|
||||
continue
|
||||
|
||||
if 'events' not in data or 'features' not in data['events']: continue
|
||||
event_features = data['events']['features']
|
||||
if not event_features: continue
|
||||
|
||||
print(f" Found {len(event_features)} {phenom}.{sig} event features for WFO {wfo}.")
|
||||
|
||||
for feature in event_features:
|
||||
try:
|
||||
cow_processed += 1
|
||||
properties = feature.get('properties')
|
||||
geometry = feature.get('geometry')
|
||||
feature_id = feature.get('id') # For logging
|
||||
|
||||
if not properties or not geometry:
|
||||
print(f" DEBUG: Skipping COW feature - missing properties or geometry. ID: {feature_id}")
|
||||
continue
|
||||
|
||||
# --- Extract VTEC components from Link ---
|
||||
link = properties.get('link')
|
||||
if not link or not isinstance(link, str):
|
||||
print(f" WARNING: Skipping COW feature - Missing or invalid 'link'. ID: {feature_id}, ProdID: {properties.get('product_id')}")
|
||||
cow_skipped_no_link += 1
|
||||
continue
|
||||
|
||||
try:
|
||||
vtec_short_form = link.strip('/').split('/')[-1]
|
||||
vtec_parts = vtec_short_form.split('-')
|
||||
if len(vtec_parts) != 7: raise ValueError("Incorrect number of VTEC parts")
|
||||
# Assign parsed components
|
||||
v_action = vtec_parts[2].upper()
|
||||
v_office_raw = vtec_parts[3].upper()
|
||||
v_phenomena = vtec_parts[4].upper()
|
||||
v_significance = vtec_parts[5].upper()
|
||||
v_etn = vtec_parts[6]
|
||||
v_office_k = v_office_raw # Keep KXXX format for VTEC string
|
||||
v_office_3letter = v_office_raw[1:] if v_office_raw.startswith('K') and len(v_office_raw) == 4 else v_office_raw # For office column
|
||||
except Exception as e:
|
||||
print(f" WARNING: Skipping COW feature - Failed VTEC parse from link '{link}': {e}. ID: {feature_id}")
|
||||
cow_skipped_bad_vtec_parse += 1
|
||||
continue
|
||||
|
||||
# --- Extract and Parse Timestamps ---
|
||||
issue_str = properties.get('issue')
|
||||
expire_str = properties.get('expire')
|
||||
if not issue_str or not expire_str:
|
||||
print(f" WARNING: Skipping COW feature - Missing issue or expire time needed for full VTEC key. VTEC(short): {vtec_short_form}, ID: {feature_id}")
|
||||
cow_skipped_missing_times += 1
|
||||
continue # Cannot construct full VTEC without both times
|
||||
|
||||
try:
|
||||
issuetime = datetime.fromisoformat(issue_str.replace('Z', '+00:00'))
|
||||
endtime = datetime.fromisoformat(expire_str.replace('Z', '+00:00'))
|
||||
except ValueError as e:
|
||||
print(f" WARNING: Could not parse timestamp for COW VTEC(short) {vtec_short_form}. Issue: '{issue_str}', Expire: '{expire_str}'. Error: {e}. ID: {feature_id}")
|
||||
cow_skipped_missing_times += 1
|
||||
continue
|
||||
|
||||
# --- Construct Full VTEC String for Database ---
|
||||
# Format: /O.ACTION.KXXX.PHENOM.SIG.ETN.YYMMDDTHHMMZ-YYMMDDTHHMMZ/
|
||||
# Note: NWS VTEC uses 2-digit year (%y)
|
||||
try:
|
||||
issue_vtec_time = issuetime.strftime('%y%m%dT%H%MZ')
|
||||
expire_vtec_time = endtime.strftime('%y%m%dT%H%MZ')
|
||||
# Ensure K is present for office ID in VTEC string
|
||||
if not v_office_k.startswith('K') and len(v_office_k) == 3:
|
||||
v_office_k_formatted = 'K' + v_office_k
|
||||
else:
|
||||
v_office_k_formatted = v_office_k # Assume it's already KXXX or something else
|
||||
|
||||
# Assemble the string
|
||||
# Using 'O' for the operational mode, matching the DB example
|
||||
full_vtec_key = f"/O.{v_action}.{v_office_k_formatted}.{v_phenomena}.{v_significance}.{v_etn}.{issue_vtec_time}-{expire_vtec_time}/"
|
||||
|
||||
except Exception as e:
|
||||
print(f" ERROR: Failed to format VTEC time strings for {vtec_short_form}. Error: {e}. ID: {feature_id}")
|
||||
cow_skipped_missing_times += 1 # Count as time-related issue
|
||||
continue
|
||||
|
||||
# --- Extract other needed properties ---
|
||||
product_id = properties.get('product_id')
|
||||
prop_wfo = properties.get('wfo')
|
||||
office_col_val = prop_wfo if prop_wfo else v_office_3letter # Office for the dedicated 'office' column
|
||||
ugc_list = properties.get('ar_ugc')
|
||||
prop_year = properties.get('year') # Use property year if available, else derive
|
||||
|
||||
# --- Further Validation ---
|
||||
if not product_id or not office_col_val:
|
||||
print(f" DEBUG: Skipping COW feature {full_vtec_key} - missing product_id or office. ID: {feature_id}")
|
||||
continue
|
||||
if geometry['type'] not in ('Polygon', 'MultiPolygon'):
|
||||
print(f" DEBUG: Skipping COW feature {full_vtec_key} - non-polygon geometry. ID: {feature_id}")
|
||||
continue
|
||||
|
||||
# --- Check if Full VTEC exists in DB ---
|
||||
exists_in_svr = False
|
||||
exists_in_tracker = False
|
||||
if db_cursor:
|
||||
try:
|
||||
db_cursor.execute("SELECT 1 FROM svr WHERE vtec = %s", (full_vtec_key,))
|
||||
exists_in_svr = db_cursor.fetchone() is not None
|
||||
db_cursor.execute("SELECT 1 FROM warntracker WHERE vtectext = %s", (full_vtec_key,))
|
||||
exists_in_tracker = db_cursor.fetchone() is not None
|
||||
cow_checked_exists += 1
|
||||
except Exception as e:
|
||||
print(f" ERROR: DB check failed for VTEC {full_vtec_key}: {e}")
|
||||
continue
|
||||
# else: Assume missing if no DB conn/cursor
|
||||
|
||||
# --- Skip if already exists ---
|
||||
is_svr_tor_type = v_phenomena in ('SV', 'TO') and v_significance == 'W'
|
||||
if exists_in_tracker and (not is_svr_tor_type or exists_in_svr):
|
||||
continue
|
||||
|
||||
# --- Prepare final data for insertion ---
|
||||
year = prop_year if prop_year else issuetime.year # Prefer property year, fallback to issue time
|
||||
gisgeom_str = json.dumps(geometry)
|
||||
pil_parts = product_id.split('-')
|
||||
pil = pil_parts[-1] if len(pil_parts) > 1 else product_id
|
||||
ugc_param = list(ugc_list) if isinstance(ugc_list, (list, tuple)) else []
|
||||
|
||||
# --- Simulate insert into 'svr' table if missing ---
|
||||
if is_svr_tor_type and not exists_in_svr:
|
||||
svr_warntype = f"{v_phenomena}{office_col_val}" # e.g., TORRLX
|
||||
sql_svr = """
|
||||
INSERT INTO svr (nwspoly, issue, endtime, warntype, vtec)
|
||||
VALUES (ST_SetSRID(ST_GeomFromGeoJSON(%s), 4326), %s, %s, %s, %s)
|
||||
ON CONFLICT (issue,warntype) DO NOTHING
|
||||
"""
|
||||
# Use the constructed full_vtec_key for the 'vtec' column
|
||||
params_svr = (gisgeom_str, issuetime, endtime, svr_warntype, full_vtec_key)
|
||||
if DRY_RUN:
|
||||
print(f" [DRY RUN] Would execute COW svr backfill:")
|
||||
print(f" SQL: INSERT INTO svr ... VTEC='{full_vtec_key}'")
|
||||
cow_would_insert_svr += 1
|
||||
else: # Actual Insert
|
||||
try: db_cursor.execute(sql_svr, params_svr)
|
||||
except Exception as e:
|
||||
print(f" ERROR: Insert failed for svr table, VTEC {full_vtec_key}: {e}")
|
||||
if db_conn: db_conn.rollback()
|
||||
|
||||
# --- Simulate insert into 'warntracker' table if missing ---
|
||||
if not exists_in_tracker: # This check might now be slightly redundant if ON CONFLICT works, but keep it for logic flow
|
||||
sql_tracker = """
|
||||
INSERT INTO warntracker (
|
||||
nwspoly, issue, endtime, warntype, vtectext, etin,
|
||||
svstype, pil, ugc, year, office, sig
|
||||
) VALUES (
|
||||
ST_SetSRID(ST_GeomFromGeoJSON(%s), 4326), %s, %s, %s, %s, %s,
|
||||
%s, %s, %s, %s, %s, %s
|
||||
)
|
||||
ON CONFLICT (issue, vtectext) DO NOTHING -- **** MODIFIED HERE ****
|
||||
"""
|
||||
params_tracker = (
|
||||
gisgeom_str, issuetime, endtime, v_significance, full_vtec_key, v_etn,
|
||||
v_phenomena, pil, ugc_param, year, office_col_val, v_action
|
||||
)
|
||||
if DRY_RUN:
|
||||
print(f" [DRY RUN] Would execute COW warntracker backfill:")
|
||||
print(f" SQL: INSERT INTO warntracker ... VTEC='{full_vtec_key}'")
|
||||
cow_would_insert_tracker += 1
|
||||
else: # Actual Insert
|
||||
try: db_cursor.execute(sql_tracker, params_tracker)
|
||||
except Exception as e:
|
||||
print(f" ERROR: Insert failed for warntracker table, VTEC {full_vtec_key}: {e}")
|
||||
# print(f" DEBUG PARAMS: {params_tracker}") # Uncomment for deep debug
|
||||
if db_conn: db_conn.rollback()
|
||||
|
||||
except Exception as e:
|
||||
# Catch unexpected errors during feature processing
|
||||
link_err = feature.get('properties', {}).get('link', 'N/A')
|
||||
print(f" ERROR: Unexpected error processing COW feature (Link: {link_err}): {e}", exc_info=True)
|
||||
|
||||
# --- Commit after each WFO/Phenom/Sig batch (if not DRY_RUN) ---
|
||||
if not DRY_RUN and db_conn:
|
||||
try: db_conn.commit()
|
||||
except Exception as e: print(f"ERROR: Failed commit after {wfo}/{phenom}.{sig}: {e}")
|
||||
time.sleep(0.2)
|
||||
|
||||
# --- Final Summary ---
|
||||
if DRY_RUN:
|
||||
print(f"\nCOW API backfill simulation complete. Processed Features: {cow_processed}, Checked DB: {cow_checked_exists}")
|
||||
print(f" Skipped (No/Bad Link): {cow_skipped_no_link}, Skipped (Bad VTEC Parse): {cow_skipped_bad_vtec_parse}, Skipped (Missing Times): {cow_skipped_missing_times}")
|
||||
print(f" Would Insert SVR: {cow_would_insert_svr}, Would Insert Tracker: {cow_would_insert_tracker}")
|
||||
else:
|
||||
print(f"\nCOW API backfill complete (live run). Processed Features: {cow_processed}.")
|
||||
print(f" Skipped (No/Bad Link): {cow_skipped_no_link}, Skipped (Bad VTEC Parse): {cow_skipped_bad_vtec_parse}, Skipped (Missing Times): {cow_skipped_missing_times}")
|
||||
|
||||
# --- Main Execution ---
|
||||
if __name__ == "__main__":
|
||||
print(f"Script started at {datetime.now(timezone.utc)}")
|
||||
if DRY_RUN:
|
||||
print("--- RUNNING IN DRY RUN MODE - NO DATABASE CHANGES WILL BE MADE ---")
|
||||
|
||||
# Optional: Run NWS check first if needed
|
||||
# check_nws_api_products(S, cursor, conn)
|
||||
|
||||
# Run COW backfill
|
||||
backfill_from_cow(S, cursor, conn, days_back=BACKFILL_DAYS, wfo_list=WFOS_FOR_BACKFILL)
|
||||
|
||||
# Close connection
|
||||
if cursor: cursor.close()
|
||||
if conn: conn.close(); print("Database connection closed.")
|
||||
print(f"Script finished at {datetime.now(timezone.utc)}")
|
||||
115
svrcheck2.py
Normal file
115
svrcheck2.py
Normal file
@@ -0,0 +1,115 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import requests
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from datetime import datetime, timezone
|
||||
import re
|
||||
|
||||
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
|
||||
def polygon_pop():
|
||||
cursor.execute("select warnindex from svr where polygonpop is null")
|
||||
polys_to_update = cursor.fetchall()
|
||||
sql = "update svr set polygonpop = (select(St_SummaryStats(st_clip(rlxpop.rast, svr.nwspoly, true))).sum::int from rlxpop,svr where warnindex = %s) where warnindex = %s"
|
||||
for p in polys_to_update:
|
||||
vals = p,p
|
||||
cursor.execute(sql,vals)
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
def verify_severe():
|
||||
cursor.execute('select vtec from svr where EXTRACT(EPOCH FROM (current_timestamp - endtime ))/60/60/24 < 3')
|
||||
svr = cursor.fetchall()
|
||||
#print(svr)
|
||||
for i in svr:
|
||||
sql = "select sum (power.outagen) as total from power,svr where svr.vtec = %s and ST_Contains(svr.nwspoly,power.realgeom) and power.startguess > svr.issue and power.startguess < svr.endtime + (120 ||'minutes')::interval group by svr.vtec"
|
||||
val = (i,)
|
||||
cursor.execute(sql,val)
|
||||
svrout = cursor.fetchall()
|
||||
try:
|
||||
if svrout[0][0] == None:
|
||||
svroutages = 0
|
||||
else:
|
||||
svroutages = str(svrout[0][0])
|
||||
sql = 'update svr set outagesvalid = %s where vtec = %s'
|
||||
val = (svroutages,i)
|
||||
cursor.execute(sql,val)
|
||||
# info = {'vtec': str(i[0]), 'outagesvalid': svroutages}
|
||||
# svrinfo.append(info)
|
||||
|
||||
except:
|
||||
svroutages = 0
|
||||
sql = 'update svr set outagesvalid = %s where vtec = %s'
|
||||
val = (svroutages,i)
|
||||
cursor.execute(sql,val)
|
||||
conn.commit()
|
||||
# info = {'vtec': str(i[0]), 'outagesvalid': svroutages}
|
||||
# svrinfo.append(info)
|
||||
conn.commit()
|
||||
|
||||
#select sum (power.outagen) as total from power,svr where st_contains(st_buffer(svr.nwspoly, .1,'side=left join=mitre quad_segs=32'),power.realgeom) and power.startguess > svr.issue and power.startguess < svr.endtime + (60 ||'minutes')::interval group by svr.vtec
|
||||
#select nwspoly, st_buffer(svr.nwspoly, .072,'side=left join=mitre quad_segs=32') from svr limit 1
|
||||
def verify_severe_buffer():
|
||||
cursor.execute('select vtec from svr where EXTRACT(EPOCH FROM (current_timestamp - endtime ))/60/60/24 < 3')
|
||||
svr = cursor.fetchall()
|
||||
#print(svr)
|
||||
for i in svr:
|
||||
sql = "select sum (power.outagen) as total from power,svr where svr.vtec = %s and ST_Contains(st_buffer(svr.nwspoly, .072,'side=left join=mitre quad_segs=32'),power.realgeom) and power.startguess > svr.issue and power.startguess < svr.endtime + (120 ||'minutes')::interval group by svr.vtec"
|
||||
val = (i,)
|
||||
cursor.execute(sql,val)
|
||||
svrout = cursor.fetchall()
|
||||
try:
|
||||
if svrout[0][0] == None:
|
||||
svroutages = 0
|
||||
else:
|
||||
svroutages = str(svrout[0][0])
|
||||
sql = 'update svr set outagesbuffer = %s where vtec = %s'
|
||||
val = (svroutages,i)
|
||||
cursor.execute(sql,val)
|
||||
# info = {'vtec': str(i[0]), 'outagesvalid': svroutages}
|
||||
# svrinfo.append(info)
|
||||
|
||||
except:
|
||||
svroutages = 0
|
||||
sql = 'update svr set outagesbuffer = %s where vtec = %s'
|
||||
val = (svroutages,i)
|
||||
cursor.execute(sql,val)
|
||||
conn.commit()
|
||||
# info = {'vtec': str(i[0]), 'outagesbuffer': svroutages}
|
||||
# svrinfo.append(info)
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
svrinfo = []
|
||||
verify_severe()
|
||||
verify_severe_buffer()
|
||||
|
||||
|
||||
|
||||
cursor.execute('select vtec,outagesvalid,polygonpop,outagesbuffer from svr where EXTRACT(EPOCH FROM (current_timestamp - endtime ))/60/60/24 < 60')
|
||||
svrout = cursor.fetchall()
|
||||
|
||||
polygon_pop()
|
||||
|
||||
for i in svrout:
|
||||
info = {'vtec': str(i[0]), 'outagesvalid': i[1], 'polygonpop': int(i[2]), 'percout': round(i[1]*2.5/int(i[2])*1000)/10, 'outagesbuffer': i[3]}
|
||||
svrinfo.append(info)
|
||||
|
||||
|
||||
|
||||
with open("/var/www/html/work/svr.json", "w") as outfile:
|
||||
outfile.write(json.dumps(svrinfo))
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
118
svrver.html
Normal file
118
svrver.html
Normal file
@@ -0,0 +1,118 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>RLX CAD Feed</title>
|
||||
|
||||
<script type="text/javascript" src="https://unpkg.com/tabulator-tables@6.3.0/dist/js/tabulator.min.js"></script>
|
||||
<link href="/tabulator/dist/css/tabulator_midnight.css" rel="stylesheet">
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.7.1/jquery.min.js"></script>
|
||||
<style>
|
||||
.tabulator, .tabulator-header, .tabulator-tableHolder{
|
||||
overflow:visible !important;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
a {
|
||||
color: pink;
|
||||
}
|
||||
.switch {
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
width: 40px;
|
||||
height: 24px;
|
||||
}
|
||||
|
||||
/* Hide default HTML checkbox */
|
||||
.switch input {
|
||||
opacity: 0;
|
||||
width: 0;
|
||||
height: 0;
|
||||
}
|
||||
|
||||
/* The slider */
|
||||
.slider {
|
||||
position: absolute;
|
||||
cursor: pointer;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background-color: #ccc;
|
||||
-webkit-transition: .4s;
|
||||
transition: .4s;
|
||||
}
|
||||
|
||||
.slider:before {
|
||||
position: absolute;
|
||||
content: "";
|
||||
height: 16px;
|
||||
width: 16px;
|
||||
left: 4px;
|
||||
bottom: 4px;
|
||||
background-color: white;
|
||||
-webkit-transition: .4s;
|
||||
transition: .4s;
|
||||
}
|
||||
|
||||
input:checked + .slider {
|
||||
background-color: #2196F3;
|
||||
}
|
||||
|
||||
input:focus + .slider {
|
||||
box-shadow: 0 0 1px #2196F3;
|
||||
}
|
||||
|
||||
input:checked + .slider:before {
|
||||
-webkit-transform: translateX(16px);
|
||||
-ms-transform: translateX(16px);
|
||||
transform: translateX(16px);
|
||||
}
|
||||
|
||||
/* Rounded sliders */
|
||||
.slider.round {
|
||||
border-radius: 24px;
|
||||
}
|
||||
|
||||
.slider.round:before {
|
||||
border-radius: 50%;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
|
||||
|
||||
|
||||
<body>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
// Pull radar from duration of polygon
|
||||
//Pull power outages
|
||||
//Pull existing LSRs
|
||||
//Pull 911 reports
|
||||
|
||||
|
||||
function saddisplay() {
|
||||
const queryString = window.location.search;
|
||||
const urlParams = new URLSearchParams(queryString);
|
||||
const staticdisplay = urlParams.has('vtec')
|
||||
const reportids = urlParams.has('reportids')
|
||||
console.log(urlParams.getAll('vtec'));
|
||||
console.log(urlParams.getAll('id'));
|
||||
|
||||
|
||||
}
|
||||
|
||||
saddisplay();
|
||||
|
||||
</script>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user